mirror of
https://github.com/ansible/awx.git
synced 2026-02-09 05:24:42 -03:30
Compare commits
610 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
4788f0814f | ||
|
|
c528ece5df | ||
|
|
a1c03cd6a1 | ||
|
|
42fbb81337 | ||
|
|
5286e24721 | ||
|
|
0bde309d23 | ||
|
|
b2442d42a3 | ||
|
|
18409f89c5 | ||
|
|
88d5fb0420 | ||
|
|
1cc0f81913 | ||
|
|
8cb8e63db5 | ||
|
|
8597670299 | ||
|
|
c0ff4dad59 | ||
|
|
d98c60519e | ||
|
|
5dd8c3ace2 | ||
|
|
d021c253aa | ||
|
|
c48c8c04f4 | ||
|
|
a2102c92ec | ||
|
|
99288a5e18 | ||
|
|
44c48d1d66 | ||
|
|
ebe0ded9c2 | ||
|
|
2dadfbcc14 | ||
|
|
3a58a5b772 | ||
|
|
5010e98b8f | ||
|
|
3ef4cc9bfa | ||
|
|
c01c671642 | ||
|
|
a86e270905 | ||
|
|
4058d18593 | ||
|
|
caa55f112f | ||
|
|
d0af952685 | ||
|
|
fbc7f496c5 | ||
|
|
bb19a4234e | ||
|
|
11f7e90f6a | ||
|
|
5c080678a6 | ||
|
|
2df51a923d | ||
|
|
0da0a8e67b | ||
|
|
b75ba7ebea | ||
|
|
24de951f6c | ||
|
|
974306541e | ||
|
|
d2fa5cc182 | ||
|
|
e45e4b3cda | ||
|
|
65641c7edd | ||
|
|
5f01c3f5a8 | ||
|
|
7b39198f26 | ||
|
|
f583dd73e8 | ||
|
|
57b8aa4892 | ||
|
|
474876872e | ||
|
|
3eaed52b83 | ||
|
|
28822d891c | ||
|
|
37dbfa88f9 | ||
|
|
b6c30e8ef5 | ||
|
|
d938c96a76 | ||
|
|
4ce18618cb | ||
|
|
6c7f11395b | ||
|
|
134950ade1 | ||
|
|
7258a43bad | ||
|
|
27f98163ff | ||
|
|
6d04bd34ce | ||
|
|
584ec9cf75 | ||
|
|
aebeeb170e | ||
|
|
c434d38876 | ||
|
|
ae3ab89515 | ||
|
|
0c250cd6af | ||
|
|
33c1416f6c | ||
|
|
3d7fcb3835 | ||
|
|
04da4503db | ||
|
|
2016798e0f | ||
|
|
39d119534c | ||
|
|
d273472927 | ||
|
|
5aa99b2ca1 | ||
|
|
96b9bd6ab6 | ||
|
|
2c5bdf3611 | ||
|
|
af4234556e | ||
|
|
c6482137d1 | ||
|
|
f223df303f | ||
|
|
f132ce9b64 | ||
|
|
f22fd58392 | ||
|
|
cccc038600 | ||
|
|
b9607dd415 | ||
|
|
7b32262f75 | ||
|
|
d69f6acf64 | ||
|
|
66a859872e | ||
|
|
ef3aab1357 | ||
|
|
62ebf85b96 | ||
|
|
0c074e0988 | ||
|
|
32c705a62a | ||
|
|
7194338653 | ||
|
|
d43521bb77 | ||
|
|
b1710f9523 | ||
|
|
3b456d3e72 | ||
|
|
12a04a6da6 | ||
|
|
99205fde16 | ||
|
|
8539eae114 | ||
|
|
3e2dd4f86b | ||
|
|
32c14d6eab | ||
|
|
4f9901db38 | ||
|
|
a77c981e0c | ||
|
|
77d2364022 | ||
|
|
d1b42fd583 | ||
|
|
2dfb0abb69 | ||
|
|
7bcbaabd71 | ||
|
|
9c20e1b494 | ||
|
|
2b5210842d | ||
|
|
0b3e51458d | ||
|
|
d57fc998d5 | ||
|
|
1079051b12 | ||
|
|
4641056829 | ||
|
|
db2bb19d65 | ||
|
|
e5ad2e44fb | ||
|
|
aa8cda0001 | ||
|
|
949f383564 | ||
|
|
479ad13630 | ||
|
|
23c2e1be31 | ||
|
|
7628ef01f1 | ||
|
|
c0730aa562 | ||
|
|
67d6a9f9ea | ||
|
|
f9854abfa1 | ||
|
|
2697615dbf | ||
|
|
a131250dc1 | ||
|
|
2d237b6dbb | ||
|
|
c8b15005b4 | ||
|
|
a5c4350695 | ||
|
|
9f18f8dbdb | ||
|
|
a8e1c8960f | ||
|
|
7f66053654 | ||
|
|
1d6c88b7e2 | ||
|
|
049f85f3c9 | ||
|
|
4858868428 | ||
|
|
4e37076955 | ||
|
|
e6f654b568 | ||
|
|
65e110cdbf | ||
|
|
10e99c76a8 | ||
|
|
b0e3bc96dd | ||
|
|
59df54b363 | ||
|
|
5950f26c69 | ||
|
|
a3a5c6bf9f | ||
|
|
ca16787e7c | ||
|
|
271bd10b47 | ||
|
|
e3872ebd58 | ||
|
|
eee716644b | ||
|
|
c2660af60d | ||
|
|
2758a38485 | ||
|
|
9104f485e6 | ||
|
|
ae7361f82d | ||
|
|
42562e86e4 | ||
|
|
982ed37b06 | ||
|
|
c0c666cc87 | ||
|
|
8a284889f5 | ||
|
|
b891e2c204 | ||
|
|
a8bf7366cf | ||
|
|
e517f81b8f | ||
|
|
c4c99332fc | ||
|
|
40b5ce4b2e | ||
|
|
d2cd337c1f | ||
|
|
b9913fb4f9 | ||
|
|
d1705dd0cc | ||
|
|
816cc29132 | ||
|
|
f09b8efa87 | ||
|
|
6ebc6809eb | ||
|
|
4b31367945 | ||
|
|
2a62e300a2 | ||
|
|
f6b075843e | ||
|
|
4723773354 | ||
|
|
70be95cec5 | ||
|
|
201b17012d | ||
|
|
47264b0809 | ||
|
|
c51f235fab | ||
|
|
f1b1224a27 | ||
|
|
63b0796738 | ||
|
|
5961e3ef2e | ||
|
|
246d80f177 | ||
|
|
8005b47c14 | ||
|
|
1317572979 | ||
|
|
b763c51f8a | ||
|
|
e70055a333 | ||
|
|
52f86a206a | ||
|
|
7252883094 | ||
|
|
afa7c2d69f | ||
|
|
9c44d1f526 | ||
|
|
473ce95c86 | ||
|
|
3e1e068013 | ||
|
|
746a154f2b | ||
|
|
28733800c4 | ||
|
|
cf2deefa41 | ||
|
|
e5645dd798 | ||
|
|
6205a5db83 | ||
|
|
e50dd92425 | ||
|
|
4955fc8bc4 | ||
|
|
15adb1e828 | ||
|
|
c90d81b914 | ||
|
|
8e9c28701e | ||
|
|
abc74fc9b8 | ||
|
|
21fce00102 | ||
|
|
d347a06e3d | ||
|
|
0391dbc292 | ||
|
|
349c7efa69 | ||
|
|
0f451595d7 | ||
|
|
fcb6ce2907 | ||
|
|
273d7a83f2 | ||
|
|
916c92ffc7 | ||
|
|
38bf174bda | ||
|
|
09dff99340 | ||
|
|
7f178ef28b | ||
|
|
68328109d7 | ||
|
|
d573a9a346 | ||
|
|
d6e89689ae | ||
|
|
d1d97598e2 | ||
|
|
f57fa9d1fb | ||
|
|
83760deb9d | ||
|
|
3893e29a33 | ||
|
|
feeaa0bf5c | ||
|
|
22802e7a64 | ||
|
|
1ac5bc5e2b | ||
|
|
362a3753d0 | ||
|
|
71ee9d28b9 | ||
|
|
d8d89d253d | ||
|
|
a72f3d2f2f | ||
|
|
1adeb833fb | ||
|
|
a810aaf319 | ||
|
|
d9866c35b4 | ||
|
|
4e45c3a66c | ||
|
|
87b55dc413 | ||
|
|
2e3949d612 | ||
|
|
d928ccd922 | ||
|
|
a9c51b737c | ||
|
|
51669c9765 | ||
|
|
17cc82d946 | ||
|
|
10de5b6866 | ||
|
|
55dc27f243 | ||
|
|
6fc2ba3495 | ||
|
|
7bad01e193 | ||
|
|
62a1f10c42 | ||
|
|
3975a2ecdb | ||
|
|
bfa361c87f | ||
|
|
d5f07a9652 | ||
|
|
65ec1d18ad | ||
|
|
7b4521f980 | ||
|
|
3762ba7b24 | ||
|
|
762c882cd7 | ||
|
|
343639d4b7 | ||
|
|
38dc0b8e90 | ||
|
|
ed40ba6267 | ||
|
|
54d56f2284 | ||
|
|
1477bbae30 | ||
|
|
625c6c30fc | ||
|
|
228e412478 | ||
|
|
f8f2e005ba | ||
|
|
d8bf82a8cb | ||
|
|
2eeca3cfd7 | ||
|
|
28a4bbbe8a | ||
|
|
1cfcaa72ad | ||
|
|
4c14727762 | ||
|
|
0c8dde9718 | ||
|
|
febf051748 | ||
|
|
56885a5da1 | ||
|
|
623cf54766 | ||
|
|
a804c854bf | ||
|
|
7b087d4a6c | ||
|
|
cfa098479e | ||
|
|
3c510e6344 | ||
|
|
4c9a1d6b90 | ||
|
|
d1aa52a2a6 | ||
|
|
f30f52a0a8 | ||
|
|
5b459e3c5d | ||
|
|
676c068b71 | ||
|
|
00d71cea50 | ||
|
|
72263c5c7b | ||
|
|
281345dd67 | ||
|
|
1a85fcd2d5 | ||
|
|
c1171fe4ff | ||
|
|
d6a8ad0b33 | ||
|
|
4a6a3b27fa | ||
|
|
266831e26d | ||
|
|
a6e20eeaaa | ||
|
|
6529c1bb46 | ||
|
|
ae0d0db62c | ||
|
|
b81d795c00 | ||
|
|
1b87e11d8f | ||
|
|
8bb9cfd62a | ||
|
|
a176a4b8cf | ||
|
|
3f4d14e48d | ||
|
|
0499d419c3 | ||
|
|
700860e040 | ||
|
|
3dadeb3037 | ||
|
|
16a60412cf | ||
|
|
9f3e272665 | ||
|
|
b84fc3b111 | ||
|
|
e1e8d3b372 | ||
|
|
05f4d94db2 | ||
|
|
61fb3eb390 | ||
|
|
7b95d2114d | ||
|
|
07db7a41b3 | ||
|
|
1120f8b1e1 | ||
|
|
17b3996568 | ||
|
|
584b3f4e3d | ||
|
|
f8c53f4933 | ||
|
|
6e40e9c856 | ||
|
|
2f9dc4d075 | ||
|
|
9afc38b714 | ||
|
|
dfccc9e07d | ||
|
|
7b22d1b874 | ||
|
|
29b4979736 | ||
|
|
87d6253176 | ||
|
|
1e10d4323f | ||
|
|
4111e53113 | ||
|
|
02df0c29e9 | ||
|
|
475c90fd00 | ||
|
|
2742b00a65 | ||
|
|
ea29e66a41 | ||
|
|
6ef6b649e8 | ||
|
|
9bf2a49e0f | ||
|
|
914892c3ac | ||
|
|
77661c6032 | ||
|
|
b4fc585495 | ||
|
|
ff6db37a95 | ||
|
|
1a064bdc59 | ||
|
|
ebabec0dad | ||
|
|
3506b9a7d8 | ||
|
|
cc374ca705 | ||
|
|
ad56a27cc0 | ||
|
|
779e1a34db | ||
|
|
447dfbb64d | ||
|
|
a9365a3967 | ||
|
|
f5c10f99b0 | ||
|
|
c53ccc8d4a | ||
|
|
e214dcac85 | ||
|
|
de77f6bd1f | ||
|
|
18c4771a38 | ||
|
|
042c7ffe5b | ||
|
|
f25c6effa3 | ||
|
|
46d303ceee | ||
|
|
b1bd87bcd2 | ||
|
|
50b0a5a54d | ||
|
|
d5c6c589b2 | ||
|
|
fc0a039097 | ||
|
|
4f731017ea | ||
|
|
9eb2c02e92 | ||
|
|
55e5432027 | ||
|
|
a9ae4dc5a8 | ||
|
|
0398b744a1 | ||
|
|
012511e4f0 | ||
|
|
4483d0320f | ||
|
|
32e7ddd43a | ||
|
|
0b32733dc8 | ||
|
|
d310c48988 | ||
|
|
5a6eefaf2c | ||
|
|
8c5a94fa64 | ||
|
|
05d988349c | ||
|
|
bb1473f67f | ||
|
|
79f483a66d | ||
|
|
1b09a0230d | ||
|
|
f3344e9816 | ||
|
|
554e4d45aa | ||
|
|
bfe86cbc95 | ||
|
|
29e4160d3e | ||
|
|
b4f906ceb1 | ||
|
|
e099fc58c7 | ||
|
|
e342ef5cfa | ||
|
|
8997fca457 | ||
|
|
435ab4ad67 | ||
|
|
d7a28dcea4 | ||
|
|
5f3024d395 | ||
|
|
3126480d1e | ||
|
|
ca84d312ce | ||
|
|
b790b50a1a | ||
|
|
6df26eb7a3 | ||
|
|
fccaebdc8e | ||
|
|
45728dc1bb | ||
|
|
9cd8aa1667 | ||
|
|
b74597f4dd | ||
|
|
ce3d3c3490 | ||
|
|
a9fe1ad9c1 | ||
|
|
22e7083d71 | ||
|
|
951515da2f | ||
|
|
9e2f4cff08 | ||
|
|
0c1a4439ba | ||
|
|
c2a1603a56 | ||
|
|
13e715aeb9 | ||
|
|
2bc75270e7 | ||
|
|
fabe56088d | ||
|
|
0e3bf6db09 | ||
|
|
c6a7d0859d | ||
|
|
fed00a18ad | ||
|
|
ecbdc55955 | ||
|
|
bca9bcf6dd | ||
|
|
018a8e12de | ||
|
|
e0a28e32eb | ||
|
|
c105885c7b | ||
|
|
89a0be64af | ||
|
|
c1d85f568c | ||
|
|
75566bad39 | ||
|
|
75c2d1eda1 | ||
|
|
9a4667c6c7 | ||
|
|
9917841585 | ||
|
|
fbc3cd3758 | ||
|
|
d65687f14a | ||
|
|
4ea7511ae8 | ||
|
|
a8d22b9459 | ||
|
|
f8453ffe68 | ||
|
|
38f43c147a | ||
|
|
38fbcf8ee6 | ||
|
|
2bd25b1fba | ||
|
|
7178fb83b0 | ||
|
|
2376013d49 | ||
|
|
a94042def5 | ||
|
|
2d2164a4ba | ||
|
|
3c980d373c | ||
|
|
5b3ce1e999 | ||
|
|
6d4469ebbd | ||
|
|
eb58a6cc0e | ||
|
|
a60401abb9 | ||
|
|
1203c8c0ee | ||
|
|
0c52d17951 | ||
|
|
44fa3b18a9 | ||
|
|
33328c4ad7 | ||
|
|
11adcb9800 | ||
|
|
932a1c6386 | ||
|
|
d1791fc48c | ||
|
|
5b274cfc2a | ||
|
|
1d7d2820fd | ||
|
|
605c1355a8 | ||
|
|
a6e00df041 | ||
|
|
67219e743f | ||
|
|
67273ff8c3 | ||
|
|
a3bbe308a8 | ||
|
|
f1b5bbb1f6 | ||
|
|
7330102961 | ||
|
|
61916b86b5 | ||
|
|
35d5bde690 | ||
|
|
b17c477af7 | ||
|
|
01d891cd6e | ||
|
|
e36335f68c | ||
|
|
39369c7721 | ||
|
|
4fbc39991d | ||
|
|
91075e8332 | ||
|
|
0ed50b380a | ||
|
|
53716a4c5a | ||
|
|
f30bbad07d | ||
|
|
b923efad37 | ||
|
|
3b36372880 | ||
|
|
661cc896a9 | ||
|
|
e9c3623dfd | ||
|
|
c65b362841 | ||
|
|
6a0e11a233 | ||
|
|
7417f9925f | ||
|
|
2f669685d8 | ||
|
|
86510029e1 | ||
|
|
37234ca66e | ||
|
|
4ae1fdef05 | ||
|
|
95e94a8ab5 | ||
|
|
ea35d9713a | ||
|
|
949cf53b89 | ||
|
|
a68e22b114 | ||
|
|
d70cd113e1 | ||
|
|
f737fc066f | ||
|
|
9b992c971e | ||
|
|
e0d59766e0 | ||
|
|
a9d88f728d | ||
|
|
e24d63ea9a | ||
|
|
1833a5b78b | ||
|
|
4213a00548 | ||
|
|
7345512785 | ||
|
|
d3dc126d45 | ||
|
|
758a488aee | ||
|
|
c0c358b640 | ||
|
|
49f4ed10ca | ||
|
|
5e18eccd19 | ||
|
|
c4e9daca4e | ||
|
|
5443e10697 | ||
|
|
a3f9c0b012 | ||
|
|
8517053934 | ||
|
|
0506968d4f | ||
|
|
3bb91b20d0 | ||
|
|
268b1ff436 | ||
|
|
f16a72081a | ||
|
|
cceac8d907 | ||
|
|
8d012de3e2 | ||
|
|
cee7ac9511 | ||
|
|
36faaf4720 | ||
|
|
33b8e7624b | ||
|
|
a213e01491 | ||
|
|
a00ed8e297 | ||
|
|
aeaebcd81a | ||
|
|
b5849f3712 | ||
|
|
658f87953e | ||
|
|
5562e636ea | ||
|
|
80fcdae50b | ||
|
|
2c5f209996 | ||
|
|
692b55311e | ||
|
|
10667fc855 | ||
|
|
1fc33b551d | ||
|
|
729256c3d1 | ||
|
|
23e1feba96 | ||
|
|
e3614c3012 | ||
|
|
f37391397e | ||
|
|
6a8454f748 | ||
|
|
d1328c7625 | ||
|
|
d1cce109fb | ||
|
|
32bd8b6473 | ||
|
|
001bd4ca59 | ||
|
|
541b503e06 | ||
|
|
093c29e315 | ||
|
|
eec7d7199b | ||
|
|
7dbb862673 | ||
|
|
be1422d021 | ||
|
|
459ac0e5d9 | ||
|
|
16c9d043c0 | ||
|
|
1b465c4ed9 | ||
|
|
198a0db808 | ||
|
|
91dda0a164 | ||
|
|
9341480209 | ||
|
|
21877b3378 | ||
|
|
03169a96ef | ||
|
|
ebc3dbe7b6 | ||
|
|
1bed5d4af2 | ||
|
|
0783d86c6c | ||
|
|
a3d5705cea | ||
|
|
2ae8583a86 | ||
|
|
edda4bb265 | ||
|
|
d068481aec | ||
|
|
e20d8c8e81 | ||
|
|
f6cc351f7f | ||
|
|
c2d4887043 | ||
|
|
4428dbf1ff | ||
|
|
e225489f43 | ||
|
|
5169fe3484 | ||
|
|
01d1470544 | ||
|
|
faa6ee47c5 | ||
|
|
aa1d71148c | ||
|
|
e86ded6c68 | ||
|
|
365bf4eb53 | ||
|
|
ceb9bfe486 | ||
|
|
0f85c867a0 | ||
|
|
f8a8186bd1 | ||
|
|
33dfb6bf76 | ||
|
|
28cd762dd7 | ||
|
|
217cca47f5 | ||
|
|
8faa5d8b7a | ||
|
|
2c6711e183 | ||
|
|
45328b6e6d | ||
|
|
1523feee91 | ||
|
|
856dc3645e | ||
|
|
5e4dd54112 | ||
|
|
5860689619 | ||
|
|
da7834476b | ||
|
|
d5ba981515 | ||
|
|
afe07bd874 | ||
|
|
f916bd7994 | ||
|
|
3fef7acaa8 | ||
|
|
95190c5509 | ||
|
|
76e887f46d | ||
|
|
0c2b1b7747 | ||
|
|
4c74c8c40c | ||
|
|
3a929919a3 | ||
|
|
c25af96c56 | ||
|
|
f28f1e434d | ||
|
|
b3c5df193a | ||
|
|
8645602b0a | ||
|
|
05156a5991 | ||
|
|
049d642df8 | ||
|
|
951ebf146a | ||
|
|
7a67e0f3d6 | ||
|
|
37def8cf7c | ||
|
|
e4c28fed03 | ||
|
|
f8b7259d7f | ||
|
|
6ae1e156c8 | ||
|
|
9a055dbf78 | ||
|
|
80ac44565a | ||
|
|
a338199198 | ||
|
|
47fc0a759f | ||
|
|
5eb4b35508 | ||
|
|
d93eedaedb | ||
|
|
a748a272fb | ||
|
|
d8d710a83d | ||
|
|
673068464a | ||
|
|
694e494484 | ||
|
|
c8e208dea7 | ||
|
|
f2cec03900 | ||
|
|
0b4e0678e9 | ||
|
|
6e3b2a5c2d | ||
|
|
3d378077d9 | ||
|
|
716d440a76 | ||
|
|
9d81727d16 | ||
|
|
d5626a4f3e | ||
|
|
6073e8e3b6 | ||
|
|
867ff5da71 | ||
|
|
c8b2ca7fed | ||
|
|
d4e3127fb4 | ||
|
|
503a47c509 | ||
|
|
71577bb00d | ||
|
|
cfb58eb145 | ||
|
|
5994c35975 | ||
|
|
3aa07baf26 | ||
|
|
f1c53fcd85 | ||
|
|
6c98e6c3a0 | ||
|
|
8aec4ed72e | ||
|
|
0a0cdc2e21 | ||
|
|
f0776d6838 | ||
|
|
9de63832ce | ||
|
|
70629ef7f3 | ||
|
|
1d8bb47726 | ||
|
|
5e16c72d30 | ||
|
|
02f709f8d1 | ||
|
|
90bd27f5a8 | ||
|
|
593ab90f92 | ||
|
|
27c06a7285 | ||
|
|
b2c755ba76 | ||
|
|
c88cab7d31 | ||
|
|
f82f4a9993 | ||
|
|
5a6f1a342f |
5
.gitignore
vendored
5
.gitignore
vendored
@@ -1,3 +1,7 @@
|
||||
# Ignore generated schema
|
||||
swagger.json
|
||||
schema.json
|
||||
reference-schema.json
|
||||
|
||||
# Tags
|
||||
.tags
|
||||
@@ -58,6 +62,7 @@ __pycache__
|
||||
# UI build flag files
|
||||
awx/ui/.deps_built
|
||||
awx/ui/.release_built
|
||||
awx/ui/.release_deps_built
|
||||
|
||||
# Testing
|
||||
.cache
|
||||
|
||||
@@ -34,10 +34,11 @@ Have questions about this document or anything not covered here? Come chat with
|
||||
## Things to know prior to submitting code
|
||||
|
||||
- All code submissions are done through pull requests against the `devel` branch.
|
||||
- You must use `git commit --signoff` for any commit to be merged, and agree that usage of --signoff constitutes agreement with the terms of [DCO 1.1](./DCO_1_1.md).
|
||||
- You must use `git commit --signoff` for any commit to be merged, and agree that usage of --signoff constitutes agreement with the terms of [DCO 1.1](./DCO_1_1.md).
|
||||
- Take care to make sure no merge commits are in the submission, and use `git rebase` vs `git merge` for this reason.
|
||||
- If collaborating with someone else on the same branch, consider using `--force-with-lease` instead of `--force`. This will prevent you from accidentally overwriting commits pushed by someone else. For more information, see https://git-scm.com/docs/git-push#git-push---force-with-leaseltrefnamegt
|
||||
- If submitting a large code change, it's a good idea to join the `#ansible-awx` channel on irc.freenode.net, and talk about what you would like to do or add first. This not only helps everyone know what's going on, it also helps save time and effort, if the community decides some changes are needed.
|
||||
- We ask all of our community members and contributors to adhere to the [Ansible code of conduct](http://docs.ansible.com/ansible/latest/community/code_of_conduct.html). If you have questions, or need assistance, please reach out to our community team at [codeofconduct@ansible.com](mailto:codeofconduct@ansible.com)
|
||||
- We ask all of our community members and contributors to adhere to the [Ansible code of conduct](http://docs.ansible.com/ansible/latest/community/code_of_conduct.html). If you have questions, or need assistance, please reach out to our community team at [codeofconduct@ansible.com](mailto:codeofconduct@ansible.com)
|
||||
|
||||
## Setting up your development environment
|
||||
|
||||
@@ -49,7 +50,7 @@ The AWX development environment workflow and toolchain is based on Docker, and t
|
||||
|
||||
Prior to starting the development services, you'll need `docker` and `docker-compose`. On Linux, you can generally find these in your distro's packaging, but you may find that Docker themselves maintain a separate repo that tracks more closely to the latest releases.
|
||||
|
||||
For macOS and Windows, we recommend [Docker for Mac](https://www.docker.com/docker-mac) and [Docker for Windows](https://www.docker.com/docker-windows)
|
||||
For macOS and Windows, we recommend [Docker for Mac](https://www.docker.com/docker-mac) and [Docker for Windows](https://www.docker.com/docker-windows)
|
||||
respectively.
|
||||
|
||||
For Linux platforms, refer to the following from Docker:
|
||||
@@ -137,21 +138,21 @@ Run the following to build the AWX UI:
|
||||
```
|
||||
### Running the environment
|
||||
|
||||
#### Start the containers
|
||||
#### Start the containers
|
||||
|
||||
Start the development containers by running the following:
|
||||
|
||||
```bash
|
||||
(host)$ make docker-compose
|
||||
```
|
||||
|
||||
|
||||
The above utilizes the image built in the previous step, and will automatically start all required services and dependent containers. Once the containers launch, your session will be attached to the *awx* container, and you'll be able to watch log messages and events in real time. You will see messages from Django and the front end build process.
|
||||
|
||||
If you start a second terminal session, you can take a look at the running containers using the `docker ps` command. For example:
|
||||
|
||||
```bash
|
||||
# List running containers
|
||||
(host)$ docker ps
|
||||
(host)$ docker ps
|
||||
|
||||
$ docker ps
|
||||
CONTAINER ID IMAGE COMMAND CREATED STATUS PORTS NAMES
|
||||
@@ -219,7 +220,7 @@ If you want to start and use the development environment, you'll first need to b
|
||||
```
|
||||
|
||||
The above will do all the setup tasks, including running database migrations, so it may take a couple minutes.
|
||||
|
||||
|
||||
Now you can start each service individually, or start all services in a pre-configured tmux session like so:
|
||||
|
||||
```bash
|
||||
@@ -248,9 +249,9 @@ Before you can log into AWX, you need to create an admin user. With this user yo
|
||||
(container)# awx-manage createsuperuser
|
||||
```
|
||||
You will be prompted for a username, an email address, and a password, and you will be asked to confirm the password. The email address is not important, so just enter something that looks like an email address. Remember the username and password, as you will use them to log into the web interface for the first time.
|
||||
|
||||
|
||||
##### Load demo data
|
||||
|
||||
|
||||
You can optionally load some demo data. This will create a demo project, inventory, and job template. From within the container shell, run the following to load the data:
|
||||
|
||||
```bash
|
||||
@@ -276,7 +277,7 @@ in OpenAPI format. A variety of online tools are available for translating
|
||||
this data into more consumable formats (such as HTML). http://editor.swagger.io
|
||||
is an example of one such service.
|
||||
|
||||
### Accessing the AWX web interface
|
||||
### Accessing the AWX web interface
|
||||
|
||||
You can now log into the AWX web interface at [https://localhost:8043](https://localhost:8043), and access the API directly at [https://localhost:8043/api/](https://localhost:8043/api/).
|
||||
|
||||
@@ -289,7 +290,7 @@ When necessary, remove any AWX containers and images by running the following:
|
||||
```bash
|
||||
(host)$ make docker-clean
|
||||
```
|
||||
|
||||
|
||||
## What should I work on?
|
||||
|
||||
For feature work, take a look at the current [Enhancements](https://github.com/ansible/awx/issues?q=is%3Aissue+is%3Aopen+label%3Atype%3Aenhancement).
|
||||
@@ -331,6 +332,23 @@ Sometimes it might take us a while to fully review your PR. We try to keep the `
|
||||
|
||||
All submitted PRs will have the linter and unit tests run against them via Zuul, and the status reported in the PR.
|
||||
|
||||
## PR Checks ran by Zuul
|
||||
Zuul jobs for awx are defined in the [zuul-jobs](https://github.com/ansible/zuul-jobs) repo.
|
||||
|
||||
Zuul runs the following checks that must pass:
|
||||
1) `tox-awx-api-lint`
|
||||
2) `tox-awx-ui-lint`
|
||||
3) `tox-awx-api`
|
||||
4) `tox-awx-ui`
|
||||
5) `tox-awx-swagger`
|
||||
|
||||
Zuul runs the following checks that are non-voting (can not pass but serve to inform PR reviewers):
|
||||
1) `tox-awx-detect-schema-change`
|
||||
This check generates the schema and diffs it against a reference copy of the `devel` version of the schema.
|
||||
Reviewers should inspect the `job-output.txt.gz` related to the check if their is a failure (grep for `diff -u -b` to find beginning of diff).
|
||||
If the schema change is expected and makes sense in relation to the changes made by the PR, then you are good to go!
|
||||
If not, the schema changes should be fixed, but this decision must be enforced by reviewers.
|
||||
|
||||
## Reporting Issues
|
||||
|
||||
We welcome your feedback, and encourage you to file an issue when you run into a problem. But before opening a new issues, we ask that you please view our [Issues guide](./ISSUES.md).
|
||||
|
||||
@@ -18,7 +18,7 @@ $ pip install --upgrade ansible-tower-cli
|
||||
|
||||
The AWX host URL, user, and password must be set for the AWX instance to be exported:
|
||||
```
|
||||
$ tower-cli config host <old-awx-host.example.com>
|
||||
$ tower-cli config host http://<old-awx-host.example.com>
|
||||
$ tower-cli config username <user>
|
||||
$ tower-cli config password <pass>
|
||||
```
|
||||
@@ -62,7 +62,7 @@ For other install methods, refer to the [Install.md](https://github.com/ansible/
|
||||
Configure tower-cli for your new AWX host as shown earlier. Import from a JSON file named assets.json
|
||||
|
||||
```
|
||||
$ tower-cli config host <new-awx-host.example.com>
|
||||
$ tower-cli config host http://<new-awx-host.example.com>
|
||||
$ tower-cli config username <user>
|
||||
$ tower-cli config password <pass>
|
||||
$ tower-cli send assets.json
|
||||
|
||||
103
Makefile
103
Makefile
@@ -1,4 +1,4 @@
|
||||
PYTHON ?= python
|
||||
PYTHON ?= python3
|
||||
PYTHON_VERSION = $(shell $(PYTHON) -c "from distutils.sysconfig import get_python_version; print(get_python_version())")
|
||||
SITELIB=$(shell $(PYTHON) -c "from distutils.sysconfig import get_python_lib; print(get_python_lib())")
|
||||
OFFICIAL ?= no
|
||||
@@ -11,7 +11,6 @@ GIT_BRANCH ?= $(shell git rev-parse --abbrev-ref HEAD)
|
||||
MANAGEMENT_COMMAND ?= awx-manage
|
||||
IMAGE_REPOSITORY_AUTH ?=
|
||||
IMAGE_REPOSITORY_BASE ?= https://gcr.io
|
||||
|
||||
VERSION := $(shell cat VERSION)
|
||||
|
||||
# NOTE: This defaults the container image version to the branch that's active
|
||||
@@ -54,6 +53,7 @@ WHEEL_FILE ?= $(WHEEL_NAME)-py2-none-any.whl
|
||||
|
||||
# UI flag files
|
||||
UI_DEPS_FLAG_FILE = awx/ui/.deps_built
|
||||
UI_RELEASE_DEPS_FLAG_FILE = awx/ui/.release_deps_built
|
||||
UI_RELEASE_FLAG_FILE = awx/ui/.release_built
|
||||
|
||||
I18N_FLAG_FILE = .i18n_built
|
||||
@@ -74,6 +74,7 @@ clean-ui:
|
||||
rm -rf awx/ui/test/e2e/reports/
|
||||
rm -rf awx/ui/client/languages/
|
||||
rm -f $(UI_DEPS_FLAG_FILE)
|
||||
rm -f $(UI_RELEASE_DEPS_FLAG_FILE)
|
||||
rm -f $(UI_RELEASE_FLAG_FILE)
|
||||
|
||||
clean-tmp:
|
||||
@@ -85,6 +86,11 @@ clean-venv:
|
||||
clean-dist:
|
||||
rm -rf dist
|
||||
|
||||
clean-schema:
|
||||
rm -rf swagger.json
|
||||
rm -rf schema.json
|
||||
rm -rf reference-schema.json
|
||||
|
||||
# Remove temporary build files, compiled Python files.
|
||||
clean: clean-ui clean-dist
|
||||
rm -rf awx/public
|
||||
@@ -116,23 +122,30 @@ virtualenv_ansible:
|
||||
mkdir $(VENV_BASE); \
|
||||
fi; \
|
||||
if [ ! -d "$(VENV_BASE)/ansible" ]; then \
|
||||
virtualenv --system-site-packages $(VENV_BASE)/ansible && \
|
||||
virtualenv -p python --system-site-packages $(VENV_BASE)/ansible && \
|
||||
$(VENV_BASE)/ansible/bin/pip install $(PIP_OPTIONS) --ignore-installed six packaging appdirs && \
|
||||
$(VENV_BASE)/ansible/bin/pip install $(PIP_OPTIONS) --ignore-installed setuptools==36.0.1 && \
|
||||
$(VENV_BASE)/ansible/bin/pip install $(PIP_OPTIONS) --ignore-installed pip==9.0.1; \
|
||||
fi; \
|
||||
fi
|
||||
|
||||
virtualenv_ansible_py3:
|
||||
if [ "$(VENV_BASE)" ]; then \
|
||||
if [ ! -d "$(VENV_BASE)" ]; then \
|
||||
mkdir $(VENV_BASE); \
|
||||
fi; \
|
||||
if [ ! -d "$(VENV_BASE)/ansible3" ]; then \
|
||||
python3 -m venv --system-site-packages $(VENV_BASE)/ansible3; \
|
||||
fi; \
|
||||
fi
|
||||
|
||||
virtualenv_awx:
|
||||
if [ "$(VENV_BASE)" ]; then \
|
||||
if [ ! -d "$(VENV_BASE)" ]; then \
|
||||
mkdir $(VENV_BASE); \
|
||||
fi; \
|
||||
if [ ! -d "$(VENV_BASE)/awx" ]; then \
|
||||
virtualenv --system-site-packages $(VENV_BASE)/awx && \
|
||||
$(VENV_BASE)/awx/bin/pip install $(PIP_OPTIONS) --ignore-installed six packaging appdirs && \
|
||||
$(VENV_BASE)/awx/bin/pip install $(PIP_OPTIONS) --ignore-installed setuptools==36.0.1 && \
|
||||
$(VENV_BASE)/awx/bin/pip install $(PIP_OPTIONS) --ignore-installed pip==9.0.1; \
|
||||
$(PYTHON) -m venv $(VENV_BASE)/awx; \
|
||||
fi; \
|
||||
fi
|
||||
|
||||
@@ -144,6 +157,11 @@ requirements_ansible: virtualenv_ansible
|
||||
fi
|
||||
$(VENV_BASE)/ansible/bin/pip uninstall --yes -r requirements/requirements_ansible_uninstall.txt
|
||||
|
||||
requirements_ansible_py3: virtualenv_ansible_py3
|
||||
cat requirements/requirements_ansible.txt requirements/requirements_ansible_git.txt | $(VENV_BASE)/ansible3/bin/pip3 install $(PIP_OPTIONS) --no-binary $(SRC_ONLY_PKGS) --ignore-installed -r /dev/stdin
|
||||
$(VENV_BASE)/ansible3/bin/pip3 install ansible # can't inherit from system ansible, it's py2
|
||||
$(VENV_BASE)/ansible3/bin/pip3 uninstall --yes -r requirements/requirements_ansible_uninstall.txt
|
||||
|
||||
requirements_ansible_dev:
|
||||
if [ "$(VENV_BASE)" ]; then \
|
||||
$(VENV_BASE)/ansible/bin/pip install pytest mock; \
|
||||
@@ -151,11 +169,9 @@ requirements_ansible_dev:
|
||||
|
||||
requirements_isolated:
|
||||
if [ ! -d "$(VENV_BASE)/awx" ]; then \
|
||||
virtualenv --system-site-packages $(VENV_BASE)/awx && \
|
||||
$(VENV_BASE)/awx/bin/pip install $(PIP_OPTIONS) --ignore-installed six packaging appdirs && \
|
||||
$(VENV_BASE)/awx/bin/pip install $(PIP_OPTIONS) --ignore-installed setuptools==35.0.2 && \
|
||||
$(VENV_BASE)/awx/bin/pip install $(PIP_OPTIONS) --ignore-installed pip==9.0.1; \
|
||||
$(PYTHON) -m venv $(VENV_BASE)/awx; \
|
||||
fi;
|
||||
echo "include-system-site-packages = true" >> $(VENV_BASE)/awx/lib/python$(PYTHON_VERSION)/pyvenv.cfg
|
||||
$(VENV_BASE)/awx/bin/pip install -r requirements/requirements_isolated.txt
|
||||
|
||||
# Install third-party requirements needed for AWX's environment.
|
||||
@@ -165,6 +181,7 @@ requirements_awx: virtualenv_awx
|
||||
else \
|
||||
cat requirements/requirements.txt requirements/requirements_git.txt | $(VENV_BASE)/awx/bin/pip install $(PIP_OPTIONS) --no-binary $(SRC_ONLY_PKGS) --ignore-installed -r /dev/stdin ; \
|
||||
fi
|
||||
echo "include-system-site-packages = true" >> $(VENV_BASE)/awx/lib/python$(PYTHON_VERSION)/pyvenv.cfg
|
||||
#$(VENV_BASE)/awx/bin/pip uninstall --yes -r requirements/requirements_tower_uninstall.txt
|
||||
|
||||
requirements_awx_dev:
|
||||
@@ -172,7 +189,7 @@ requirements_awx_dev:
|
||||
|
||||
requirements: requirements_ansible requirements_awx
|
||||
|
||||
requirements_dev: requirements requirements_awx_dev requirements_ansible_dev
|
||||
requirements_dev: requirements requirements_ansible_py3 requirements_awx_dev requirements_ansible_dev
|
||||
|
||||
requirements_test: requirements
|
||||
|
||||
@@ -191,7 +208,7 @@ version_file:
|
||||
if [ "$(VENV_BASE)" ]; then \
|
||||
. $(VENV_BASE)/awx/bin/activate; \
|
||||
fi; \
|
||||
python -c "import awx as awx; print awx.__version__" > /var/lib/awx/.awx_version; \
|
||||
python -c "import awx; print(awx.__version__)" > /var/lib/awx/.awx_version; \
|
||||
|
||||
# Do any one-time init tasks.
|
||||
comma := ,
|
||||
@@ -204,7 +221,7 @@ init:
|
||||
if [ "$(AWX_GROUP_QUEUES)" == "tower,thepentagon" ]; then \
|
||||
$(MANAGEMENT_COMMAND) provision_instance --hostname=isolated; \
|
||||
$(MANAGEMENT_COMMAND) register_queue --queuename='thepentagon' --hostnames=isolated --controller=tower; \
|
||||
$(MANAGEMENT_COMMAND) generate_isolated_key | ssh -o "StrictHostKeyChecking no" root@isolated 'cat >> /root/.ssh/authorized_keys'; \
|
||||
$(MANAGEMENT_COMMAND) generate_isolated_key > /awx_devel/awx/main/expect/authorized_keys; \
|
||||
fi;
|
||||
|
||||
# Refresh development environment after pulling new code.
|
||||
@@ -255,7 +272,7 @@ supervisor:
|
||||
@if [ "$(VENV_BASE)" ]; then \
|
||||
. $(VENV_BASE)/awx/bin/activate; \
|
||||
fi; \
|
||||
supervisord --configuration /supervisor.conf --pidfile=/tmp/supervisor_pid
|
||||
supervisord --pidfile=/tmp/supervisor_pid
|
||||
|
||||
# Alternate approach to tmux to run all development tasks specified in
|
||||
# Procfile.
|
||||
@@ -338,18 +355,21 @@ pyflakes: reports
|
||||
pylint: reports
|
||||
@(set -o pipefail && $@ | reports/$@.report)
|
||||
|
||||
genschema: reports
|
||||
$(MAKE) swagger PYTEST_ARGS="--genschema"
|
||||
|
||||
swagger: reports
|
||||
@if [ "$(VENV_BASE)" ]; then \
|
||||
. $(VENV_BASE)/awx/bin/activate; \
|
||||
fi; \
|
||||
(set -o pipefail && py.test awx/conf/tests/functional awx/main/tests/functional/api awx/main/tests/docs --release=$(VERSION_TARGET) | tee reports/$@.report)
|
||||
(set -o pipefail && py.test $(PYTEST_ARGS) awx/conf/tests/functional awx/main/tests/functional/api awx/main/tests/docs --release=$(VERSION_TARGET) | tee reports/$@.report)
|
||||
|
||||
check: flake8 pep8 # pyflakes pylint
|
||||
|
||||
awx-link:
|
||||
cp -R /tmp/awx.egg-info /awx_devel/ || true
|
||||
sed -i "s/placeholder/$(shell git describe --long | sed 's/\./\\./g')/" /awx_devel/awx.egg-info/PKG-INFO
|
||||
cp -f /tmp/awx.egg-link /venv/awx/lib/python2.7/site-packages/awx.egg-link
|
||||
cp -f /tmp/awx.egg-link /venv/awx/lib/python$(PYTHON_VERSION)/site-packages/awx.egg-link
|
||||
|
||||
TEST_DIRS ?= awx/main/tests/unit awx/main/tests/functional awx/conf/tests awx/sso/tests
|
||||
|
||||
@@ -446,7 +466,7 @@ messages:
|
||||
# generate l10n .json .mo
|
||||
languages: $(I18N_FLAG_FILE)
|
||||
|
||||
$(I18N_FLAG_FILE): $(UI_DEPS_FLAG_FILE)
|
||||
$(I18N_FLAG_FILE): $(UI_RELEASE_DEPS_FLAG_FILE)
|
||||
$(NPM_BIN) --prefix awx/ui run languages
|
||||
$(PYTHON) tools/scripts/compilemessages.py
|
||||
touch $(I18N_FLAG_FILE)
|
||||
@@ -454,12 +474,30 @@ $(I18N_FLAG_FILE): $(UI_DEPS_FLAG_FILE)
|
||||
# End l10n TASKS
|
||||
# --------------------------------------
|
||||
|
||||
# UI TASKS
|
||||
# UI RELEASE TASKS
|
||||
# --------------------------------------
|
||||
ui-release: $(UI_RELEASE_FLAG_FILE)
|
||||
|
||||
$(UI_RELEASE_FLAG_FILE): $(I18N_FLAG_FILE) $(UI_RELEASE_DEPS_FLAG_FILE)
|
||||
$(NPM_BIN) --prefix awx/ui run build-release
|
||||
touch $(UI_RELEASE_FLAG_FILE)
|
||||
|
||||
$(UI_RELEASE_DEPS_FLAG_FILE):
|
||||
PUPPETEER_SKIP_CHROMIUM_DOWNLOAD=1 $(NPM_BIN) --unsafe-perm --prefix awx/ui install --no-save awx/ui
|
||||
touch $(UI_RELEASE_DEPS_FLAG_FILE)
|
||||
|
||||
# END UI RELEASE TASKS
|
||||
# --------------------------------------
|
||||
|
||||
# UI TASKS
|
||||
# --------------------------------------
|
||||
ui-deps: $(UI_DEPS_FLAG_FILE)
|
||||
|
||||
$(UI_DEPS_FLAG_FILE):
|
||||
@if [ -f ${UI_RELEASE_DEPS_FLAG_FILE} ]; then \
|
||||
rm -rf awx/ui/node_modules; \
|
||||
rm -f ${UI_RELEASE_DEPS_FLAG_FILE}; \
|
||||
fi; \
|
||||
$(NPM_BIN) --unsafe-perm --prefix awx/ui install --no-save awx/ui
|
||||
touch $(UI_DEPS_FLAG_FILE)
|
||||
|
||||
@@ -474,12 +512,6 @@ ui-docker: $(UI_DEPS_FLAG_FILE)
|
||||
ui-devel: $(UI_DEPS_FLAG_FILE)
|
||||
$(NPM_BIN) --prefix awx/ui run build-devel -- $(MAKEFLAGS)
|
||||
|
||||
ui-release: $(UI_RELEASE_FLAG_FILE)
|
||||
|
||||
$(UI_RELEASE_FLAG_FILE): $(I18N_FLAG_FILE) $(UI_DEPS_FLAG_FILE)
|
||||
$(NPM_BIN) --prefix awx/ui run build-release
|
||||
touch $(UI_RELEASE_FLAG_FILE)
|
||||
|
||||
ui-test: $(UI_DEPS_FLAG_FILE)
|
||||
$(NPM_BIN) --prefix awx/ui run test
|
||||
|
||||
@@ -490,9 +522,6 @@ ui-test-ci: $(UI_DEPS_FLAG_FILE)
|
||||
$(NPM_BIN) --prefix awx/ui run test:ci
|
||||
$(NPM_BIN) --prefix awx/ui run unit
|
||||
|
||||
testjs_ci:
|
||||
echo "Update UI unittests later" #ui-test-ci
|
||||
|
||||
jshint: $(UI_DEPS_FLAG_FILE)
|
||||
$(NPM_BIN) run --prefix awx/ui jshint
|
||||
$(NPM_BIN) run --prefix awx/ui lint
|
||||
@@ -540,12 +569,7 @@ docker-isolated:
|
||||
TAG=$(COMPOSE_TAG) DEV_DOCKER_TAG_BASE=$(DEV_DOCKER_TAG_BASE) docker-compose -f tools/docker-compose.yml -f tools/docker-isolated-override.yml create
|
||||
docker start tools_awx_1
|
||||
docker start tools_isolated_1
|
||||
echo "__version__ = '`git describe --long | cut -d - -f 1-1`'" | docker exec -i tools_isolated_1 /bin/bash -c "cat > /venv/awx/lib/python2.7/site-packages/awx.py"
|
||||
if [ "`docker exec -i -t tools_isolated_1 cat /root/.ssh/authorized_keys`" == "`docker exec -t tools_awx_1 cat /root/.ssh/id_rsa.pub`" ]; then \
|
||||
echo "SSH keys already copied to isolated instance"; \
|
||||
else \
|
||||
docker exec "tools_isolated_1" bash -c "mkdir -p /root/.ssh && rm -f /root/.ssh/authorized_keys && echo $$(docker exec -t tools_awx_1 cat /root/.ssh/id_rsa.pub) >> /root/.ssh/authorized_keys"; \
|
||||
fi
|
||||
echo "__version__ = '`git describe --long | cut -d - -f 1-1`'" | docker exec -i tools_isolated_1 /bin/bash -c "cat > /venv/awx/lib/python$(PYTHON_VERSION)/site-packages/awx.py"
|
||||
CURRENT_UID=$(shell id -u) TAG=$(COMPOSE_TAG) DEV_DOCKER_TAG_BASE=$(DEV_DOCKER_TAG_BASE) docker-compose -f tools/docker-compose.yml -f tools/docker-isolated-override.yml up
|
||||
|
||||
# Docker Compose Development environment
|
||||
@@ -564,6 +588,16 @@ docker-compose-runtest:
|
||||
docker-compose-build-swagger:
|
||||
cd tools && CURRENT_UID=$(shell id -u) TAG=$(COMPOSE_TAG) DEV_DOCKER_TAG_BASE=$(DEV_DOCKER_TAG_BASE) docker-compose run --rm --service-ports awx /start_tests.sh swagger
|
||||
|
||||
docker-compose-genschema:
|
||||
cd tools && CURRENT_UID=$(shell id -u) TAG=$(COMPOSE_TAG) DEV_DOCKER_TAG_BASE=$(DEV_DOCKER_TAG_BASE) docker-compose run --rm --service-ports awx /start_tests.sh genschema
|
||||
mv swagger.json schema.json
|
||||
|
||||
docker-compose-detect-schema-change:
|
||||
$(MAKE) docker-compose-genschema
|
||||
curl https://s3.amazonaws.com/awx-public-ci-files/schema.json -o reference-schema.json
|
||||
# Ignore differences in whitespace with -b
|
||||
diff -u -b reference-schema.json schema.json
|
||||
|
||||
docker-compose-clean:
|
||||
cd tools && CURRENT_UID=$(shell id -u) TAG=$(COMPOSE_TAG) DEV_DOCKER_TAG_BASE=$(DEV_DOCKER_TAG_BASE) docker-compose run --rm -w /awx_devel --service-ports awx make clean
|
||||
cd tools && TAG=$(COMPOSE_TAG) DEV_DOCKER_TAG_BASE=$(DEV_DOCKER_TAG_BASE) docker-compose rm -sf
|
||||
@@ -600,7 +634,6 @@ docker-compose-cluster-elk: docker-auth
|
||||
minishift-dev:
|
||||
ansible-playbook -i localhost, -e devtree_directory=$(CURDIR) tools/clusterdevel/start_minishift_dev.yml
|
||||
|
||||
|
||||
clean-elk:
|
||||
docker stop tools_kibana_1
|
||||
docker stop tools_logstash_1
|
||||
|
||||
@@ -21,6 +21,48 @@ except ImportError: # pragma: no cover
|
||||
MODE = 'production'
|
||||
|
||||
|
||||
import hashlib
|
||||
|
||||
try:
|
||||
import django
|
||||
from django.utils.encoding import force_bytes
|
||||
from django.db.backends.base.schema import BaseDatabaseSchemaEditor
|
||||
from django.db.backends.base import schema
|
||||
HAS_DJANGO = True
|
||||
except ImportError:
|
||||
HAS_DJANGO = False
|
||||
|
||||
|
||||
if HAS_DJANGO is True:
|
||||
# This line exists to make sure we don't regress on FIPS support if we
|
||||
# upgrade Django; if you're upgrading Django and see this error,
|
||||
# update the version check below, and confirm that FIPS still works.
|
||||
if django.__version__ != '1.11.16':
|
||||
raise RuntimeError("Django version other than 1.11.16 detected {}. \
|
||||
Subclassing BaseDatabaseSchemaEditor is known to work for Django 1.11.16 \
|
||||
and may not work in newer Django versions.".format(django.__version__))
|
||||
|
||||
|
||||
class FipsBaseDatabaseSchemaEditor(BaseDatabaseSchemaEditor):
|
||||
|
||||
@classmethod
|
||||
def _digest(cls, *args):
|
||||
"""
|
||||
Generates a 32-bit digest of a set of arguments that can be used to
|
||||
shorten identifying names.
|
||||
"""
|
||||
try:
|
||||
h = hashlib.md5()
|
||||
except ValueError:
|
||||
h = hashlib.md5(usedforsecurity=False)
|
||||
for arg in args:
|
||||
h.update(force_bytes(arg))
|
||||
return h.hexdigest()[:8]
|
||||
|
||||
|
||||
schema.BaseDatabaseSchemaEditor = FipsBaseDatabaseSchemaEditor
|
||||
|
||||
|
||||
def find_commands(management_dir):
|
||||
# Modified version of function from django/core/management/__init__.py.
|
||||
command_dir = os.path.join(management_dir, 'commands')
|
||||
|
||||
@@ -43,7 +43,7 @@ register(
|
||||
help_text=_('Dictionary for customizing OAuth 2 timeouts, available items are '
|
||||
'`ACCESS_TOKEN_EXPIRE_SECONDS`, the duration of access tokens in the number '
|
||||
'of seconds, and `AUTHORIZATION_CODE_EXPIRE_SECONDS`, the duration of '
|
||||
'authorization grants in the number of seconds.'),
|
||||
'authorization codes in the number of seconds.'),
|
||||
category=_('Authentication'),
|
||||
category_slug='authentication',
|
||||
)
|
||||
|
||||
@@ -25,7 +25,6 @@ from rest_framework.filters import BaseFilterBackend
|
||||
from awx.main.utils import get_type_for_model, to_python_boolean
|
||||
from awx.main.utils.db import get_all_field_names
|
||||
from awx.main.models.credential import CredentialType
|
||||
from awx.main.models.rbac import RoleAncestorEntry
|
||||
|
||||
|
||||
class V1CredentialFilterBackend(BaseFilterBackend):
|
||||
@@ -66,7 +65,7 @@ class TypeFilterBackend(BaseFilterBackend):
|
||||
model = queryset.model
|
||||
model_type = get_type_for_model(model)
|
||||
if 'polymorphic_ctype' in get_all_field_names(model):
|
||||
types_pks = set([v for k,v in types_map.items() if k in types])
|
||||
types_pks = set([v for k, v in types_map.items() if k in types])
|
||||
queryset = queryset.filter(polymorphic_ctype_id__in=types_pks)
|
||||
elif model_type in types:
|
||||
queryset = queryset
|
||||
@@ -193,7 +192,7 @@ class FieldLookupBackend(BaseFilterBackend):
|
||||
|
||||
def value_to_python(self, model, lookup, value):
|
||||
try:
|
||||
lookup = lookup.encode("ascii")
|
||||
lookup.encode("ascii")
|
||||
except UnicodeEncodeError:
|
||||
raise ValueError("%r is not an allowed field name. Must be ascii encodable." % lookup)
|
||||
|
||||
@@ -347,12 +346,12 @@ class FieldLookupBackend(BaseFilterBackend):
|
||||
else:
|
||||
args.append(Q(**{k:v}))
|
||||
for role_name in role_filters:
|
||||
if not hasattr(queryset.model, 'accessible_pk_qs'):
|
||||
raise ParseError(_(
|
||||
'Cannot apply role_level filter to this list because its model '
|
||||
'does not use roles for access control.'))
|
||||
args.append(
|
||||
Q(pk__in=RoleAncestorEntry.objects.filter(
|
||||
ancestor__in=request.user.roles.all(),
|
||||
content_type_id=ContentType.objects.get_for_model(queryset.model).id,
|
||||
role_field=role_name
|
||||
).values_list('object_id').distinct())
|
||||
Q(pk__in=queryset.model.accessible_pk_qs(request.user, role_name))
|
||||
)
|
||||
if or_filters:
|
||||
q = Q()
|
||||
@@ -364,12 +363,12 @@ class FieldLookupBackend(BaseFilterBackend):
|
||||
args.append(q)
|
||||
if search_filters and search_filter_relation == 'OR':
|
||||
q = Q()
|
||||
for term, constrains in search_filters.iteritems():
|
||||
for term, constrains in search_filters.items():
|
||||
for constrain in constrains:
|
||||
q |= Q(**{constrain: term})
|
||||
args.append(q)
|
||||
elif search_filters and search_filter_relation == 'AND':
|
||||
for term, constrains in search_filters.iteritems():
|
||||
for term, constrains in search_filters.items():
|
||||
q_chain = Q()
|
||||
for constrain in constrains:
|
||||
q_chain |= Q(**{constrain: term})
|
||||
|
||||
@@ -6,7 +6,7 @@ import inspect
|
||||
import logging
|
||||
import time
|
||||
import six
|
||||
import urllib
|
||||
import urllib.parse
|
||||
|
||||
# Django
|
||||
from django.conf import settings
|
||||
@@ -56,7 +56,7 @@ __all__ = ['APIView', 'GenericAPIView', 'ListAPIView', 'SimpleListAPIView',
|
||||
'ParentMixin',
|
||||
'DeleteLastUnattachLabelMixin',
|
||||
'SubListAttachDetachAPIView',
|
||||
'CopyAPIView']
|
||||
'CopyAPIView', 'BaseUsersList',]
|
||||
|
||||
logger = logging.getLogger('awx.api.generics')
|
||||
analytics_logger = logging.getLogger('awx.analytics.performance')
|
||||
@@ -90,9 +90,9 @@ class LoggedLoginView(auth_views.LoginView):
|
||||
logger.info(smart_text(u"User {} logged in.".format(self.request.user.username)))
|
||||
ret.set_cookie('userLoggedIn', 'true')
|
||||
current_user = UserSerializer(self.request.user)
|
||||
current_user = JSONRenderer().render(current_user.data)
|
||||
current_user = urllib.quote('%s' % current_user, '')
|
||||
ret.set_cookie('current_user', current_user)
|
||||
current_user = smart_text(JSONRenderer().render(current_user.data))
|
||||
current_user = urllib.parse.quote('%s' % current_user, '')
|
||||
ret.set_cookie('current_user', current_user, secure=settings.SESSION_COOKIE_SECURE or None)
|
||||
|
||||
return ret
|
||||
else:
|
||||
@@ -305,7 +305,7 @@ class APIView(views.APIView):
|
||||
# submitted data was rejected.
|
||||
request_method = getattr(self, '_raw_data_request_method', None)
|
||||
response_status = getattr(self, '_raw_data_response_status', 0)
|
||||
if request_method in ('POST', 'PUT', 'PATCH') and response_status in xrange(400, 500):
|
||||
if request_method in ('POST', 'PUT', 'PATCH') and response_status in range(400, 500):
|
||||
return self.request.data.copy()
|
||||
|
||||
return data
|
||||
@@ -348,7 +348,7 @@ class GenericAPIView(generics.GenericAPIView, APIView):
|
||||
# form.
|
||||
if hasattr(self, '_raw_data_form_marker'):
|
||||
# Always remove read only fields from serializer.
|
||||
for name, field in serializer.fields.items():
|
||||
for name, field in list(serializer.fields.items()):
|
||||
if getattr(field, 'read_only', None):
|
||||
del serializer.fields[name]
|
||||
serializer._data = self.update_raw_data(serializer.data)
|
||||
@@ -552,9 +552,8 @@ class SubListDestroyAPIView(DestroyAPIView, SubListAPIView):
|
||||
|
||||
def perform_list_destroy(self, instance_list):
|
||||
if self.check_sub_obj_permission:
|
||||
# Check permissions for all before deleting, avoiding half-deleted lists
|
||||
for instance in instance_list:
|
||||
if self.has_delete_permission(instance):
|
||||
if not self.has_delete_permission(instance):
|
||||
raise PermissionDenied()
|
||||
for instance in instance_list:
|
||||
self.perform_destroy(instance, check_permission=False)
|
||||
@@ -749,7 +748,7 @@ class SubListAttachDetachAPIView(SubListCreateAttachDetachAPIView):
|
||||
def update_raw_data(self, data):
|
||||
request_method = getattr(self, '_raw_data_request_method', None)
|
||||
response_status = getattr(self, '_raw_data_response_status', 0)
|
||||
if request_method == 'POST' and response_status in xrange(400, 500):
|
||||
if request_method == 'POST' and response_status in range(400, 500):
|
||||
return super(SubListAttachDetachAPIView, self).update_raw_data(data)
|
||||
return {'id': None}
|
||||
|
||||
@@ -990,3 +989,22 @@ class CopyAPIView(GenericAPIView):
|
||||
serializer = self._get_copy_return_serializer(new_obj)
|
||||
headers = {'Location': new_obj.get_absolute_url(request=request)}
|
||||
return Response(serializer.data, status=status.HTTP_201_CREATED, headers=headers)
|
||||
|
||||
|
||||
class BaseUsersList(SubListCreateAttachDetachAPIView):
|
||||
def post(self, request, *args, **kwargs):
|
||||
ret = super(BaseUsersList, self).post( request, *args, **kwargs)
|
||||
if ret.status_code != 201:
|
||||
return ret
|
||||
try:
|
||||
if ret.data is not None and request.data.get('is_system_auditor', False):
|
||||
# This is a faux-field that just maps to checking the system
|
||||
# auditor role member list.. unfortunately this means we can't
|
||||
# set it on creation, and thus needs to be set here.
|
||||
user = User.objects.get(id=ret.data['id'])
|
||||
user.is_system_auditor = request.data['is_system_auditor']
|
||||
ret.data['is_system_auditor'] = request.data['is_system_auditor']
|
||||
except AttributeError as exc:
|
||||
print(exc)
|
||||
pass
|
||||
return ret
|
||||
|
||||
@@ -157,7 +157,7 @@ class Metadata(metadata.SimpleMetadata):
|
||||
finally:
|
||||
view.request = request
|
||||
|
||||
for field, meta in actions[method].items():
|
||||
for field, meta in list(actions[method].items()):
|
||||
if not isinstance(meta, dict):
|
||||
continue
|
||||
|
||||
|
||||
@@ -5,6 +5,7 @@ import json
|
||||
# Django
|
||||
from django.conf import settings
|
||||
from django.utils import six
|
||||
from django.utils.encoding import smart_str
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
|
||||
# Django REST Framework
|
||||
@@ -25,7 +26,7 @@ class JSONParser(parsers.JSONParser):
|
||||
encoding = parser_context.get('encoding', settings.DEFAULT_CHARSET)
|
||||
|
||||
try:
|
||||
data = stream.read().decode(encoding)
|
||||
data = smart_str(stream.read(), encoding=encoding)
|
||||
if not data:
|
||||
return {}
|
||||
obj = json.loads(data, object_pairs_hook=OrderedDict)
|
||||
|
||||
@@ -8,7 +8,7 @@ import logging
|
||||
import operator
|
||||
import re
|
||||
import six
|
||||
import urllib
|
||||
import urllib.parse
|
||||
from collections import OrderedDict
|
||||
from datetime import timedelta
|
||||
|
||||
@@ -40,6 +40,7 @@ from rest_framework.utils.serializer_helpers import ReturnList
|
||||
from polymorphic.models import PolymorphicModel
|
||||
|
||||
# AWX
|
||||
from awx.main.access import get_user_capabilities
|
||||
from awx.main.constants import (
|
||||
SCHEDULEABLE_PROVIDERS,
|
||||
ANSI_SGR_PATTERN,
|
||||
@@ -49,7 +50,6 @@ from awx.main.constants import (
|
||||
)
|
||||
from awx.main.models import * # noqa
|
||||
from awx.main.models.base import NEW_JOB_TYPE_CHOICES
|
||||
from awx.main.access import get_user_capabilities
|
||||
from awx.main.fields import ImplicitRoleField
|
||||
from awx.main.utils import (
|
||||
get_type_for_model, get_model_for_type, timestamp_apiformat,
|
||||
@@ -61,7 +61,7 @@ from awx.main.redact import UriCleaner, REPLACE_STR
|
||||
|
||||
from awx.main.validators import vars_validate_or_raise
|
||||
|
||||
from awx.conf.license import feature_enabled
|
||||
from awx.conf.license import feature_enabled, LicenseForbids
|
||||
from awx.api.versioning import reverse, get_request_version
|
||||
from awx.api.fields import (BooleanNullField, CharNullField, ChoiceNullField,
|
||||
VerbatimField, DeprecatedCredentialField)
|
||||
@@ -104,7 +104,7 @@ SUMMARIZABLE_FK_FIELDS = {
|
||||
'project_update': DEFAULT_SUMMARY_FIELDS + ('status', 'failed',),
|
||||
'credential': DEFAULT_SUMMARY_FIELDS + ('kind', 'cloud', 'credential_type_id'),
|
||||
'vault_credential': DEFAULT_SUMMARY_FIELDS + ('kind', 'cloud', 'credential_type_id'),
|
||||
'job': DEFAULT_SUMMARY_FIELDS + ('status', 'failed', 'elapsed'),
|
||||
'job': DEFAULT_SUMMARY_FIELDS + ('status', 'failed', 'elapsed', 'type'),
|
||||
'job_template': DEFAULT_SUMMARY_FIELDS,
|
||||
'workflow_job_template': DEFAULT_SUMMARY_FIELDS,
|
||||
'workflow_job': DEFAULT_SUMMARY_FIELDS,
|
||||
@@ -203,11 +203,11 @@ class BaseSerializerMetaclass(serializers.SerializerMetaclass):
|
||||
|
||||
@staticmethod
|
||||
def _is_list_of_strings(x):
|
||||
return isinstance(x, (list, tuple)) and all([isinstance(y, basestring) for y in x])
|
||||
return isinstance(x, (list, tuple)) and all([isinstance(y, str) for y in x])
|
||||
|
||||
@staticmethod
|
||||
def _is_extra_kwargs(x):
|
||||
return isinstance(x, dict) and all([isinstance(k, basestring) and isinstance(v, dict) for k,v in x.items()])
|
||||
return isinstance(x, dict) and all([isinstance(k, str) and isinstance(v, dict) for k,v in x.items()])
|
||||
|
||||
@classmethod
|
||||
def _update_meta(cls, base, meta, other=None):
|
||||
@@ -259,9 +259,7 @@ class BaseSerializerMetaclass(serializers.SerializerMetaclass):
|
||||
return super(BaseSerializerMetaclass, cls).__new__(cls, name, bases, attrs)
|
||||
|
||||
|
||||
class BaseSerializer(serializers.ModelSerializer):
|
||||
|
||||
__metaclass__ = BaseSerializerMetaclass
|
||||
class BaseSerializer(serializers.ModelSerializer, metaclass=BaseSerializerMetaclass):
|
||||
|
||||
class Meta:
|
||||
fields = ('id', 'type', 'url', 'related', 'summary_fields', 'created',
|
||||
@@ -284,7 +282,7 @@ class BaseSerializer(serializers.ModelSerializer):
|
||||
# The following lines fix the problem of being able to pass JSON dict into PrimaryKeyRelatedField.
|
||||
data = kwargs.get('data', False)
|
||||
if data:
|
||||
for field_name, field_instance in six.iteritems(self.fields):
|
||||
for field_name, field_instance in self.fields.items():
|
||||
if isinstance(field_instance, ManyRelatedField) and not field_instance.read_only:
|
||||
if isinstance(data.get(field_name, False), dict):
|
||||
raise serializers.ValidationError(_('Cannot use dictionary for %s' % field_name))
|
||||
@@ -294,7 +292,7 @@ class BaseSerializer(serializers.ModelSerializer):
|
||||
"""
|
||||
The request version component of the URL as an integer i.e., 1 or 2
|
||||
"""
|
||||
return get_request_version(self.context.get('request'))
|
||||
return get_request_version(self.context.get('request')) or 1
|
||||
|
||||
def get_type(self, obj):
|
||||
return get_type_for_model(self.Meta.model)
|
||||
@@ -612,7 +610,7 @@ class BaseSerializer(serializers.ModelSerializer):
|
||||
v2.extend(e)
|
||||
else:
|
||||
v2.append(e)
|
||||
d[k] = map(force_text, v2)
|
||||
d[k] = list(map(force_text, v2))
|
||||
raise ValidationError(d)
|
||||
return attrs
|
||||
|
||||
@@ -632,9 +630,7 @@ class EmptySerializer(serializers.Serializer):
|
||||
pass
|
||||
|
||||
|
||||
class BaseFactSerializer(BaseSerializer):
|
||||
|
||||
__metaclass__ = BaseSerializerMetaclass
|
||||
class BaseFactSerializer(BaseSerializer, metaclass=BaseSerializerMetaclass):
|
||||
|
||||
def get_fields(self):
|
||||
ret = super(BaseFactSerializer, self).get_fields()
|
||||
@@ -1311,16 +1307,12 @@ class ProjectSerializer(UnifiedJobTemplateSerializer, ProjectOptionsSerializer):
|
||||
'admin', 'update',
|
||||
{'copy': 'organization.project_admin'}
|
||||
]
|
||||
scm_delete_on_next_update = serializers.BooleanField(
|
||||
read_only=True,
|
||||
help_text=_('This field has been deprecated and will be removed in a future release'))
|
||||
|
||||
class Meta:
|
||||
model = Project
|
||||
fields = ('*', 'organization', 'scm_delete_on_next_update', 'scm_update_on_launch',
|
||||
fields = ('*', 'organization', 'scm_update_on_launch',
|
||||
'scm_update_cache_timeout', 'scm_revision', 'custom_virtualenv',) + \
|
||||
('last_update_failed', 'last_updated') # Backwards compatibility
|
||||
read_only_fields = ('scm_delete_on_next_update',)
|
||||
|
||||
def get_related(self, obj):
|
||||
res = super(ProjectSerializer, self).get_related(obj)
|
||||
@@ -1503,6 +1495,12 @@ class InventorySerializer(BaseSerializerWithVariables):
|
||||
'admin', 'adhoc',
|
||||
{'copy': 'organization.inventory_admin'}
|
||||
]
|
||||
groups_with_active_failures = serializers.IntegerField(
|
||||
read_only=True,
|
||||
min_value=0,
|
||||
help_text=_('This field has been deprecated and will be removed in a future release')
|
||||
)
|
||||
|
||||
|
||||
class Meta:
|
||||
model = Inventory
|
||||
@@ -1724,6 +1722,11 @@ class AnsibleFactsSerializer(BaseSerializer):
|
||||
|
||||
class GroupSerializer(BaseSerializerWithVariables):
|
||||
capabilities_prefetch = ['inventory.admin', 'inventory.adhoc']
|
||||
groups_with_active_failures = serializers.IntegerField(
|
||||
read_only=True,
|
||||
min_value=0,
|
||||
help_text=_('This field has been deprecated and will be removed in a future release')
|
||||
)
|
||||
|
||||
class Meta:
|
||||
model = Group
|
||||
@@ -2132,10 +2135,10 @@ class InventorySourceSerializer(UnifiedJobTemplateSerializer, InventorySourceOpt
|
||||
return attrs.get(fd, self.instance and getattr(self.instance, fd) or None)
|
||||
|
||||
if get_field_from_model_or_attrs('source') != 'scm':
|
||||
redundant_scm_fields = filter(
|
||||
redundant_scm_fields = list(filter(
|
||||
lambda x: attrs.get(x, None),
|
||||
['source_project', 'source_path', 'update_on_project_update']
|
||||
)
|
||||
))
|
||||
if redundant_scm_fields:
|
||||
raise serializers.ValidationError(
|
||||
{"detail": _("Cannot set %s if not SCM type." % ' '.join(redundant_scm_fields))}
|
||||
@@ -2458,17 +2461,17 @@ class CredentialTypeSerializer(BaseSerializer):
|
||||
field['help_text'] = _(field['help_text'])
|
||||
if field['type'] == 'become_method':
|
||||
field.pop('type')
|
||||
field['choices'] = map(operator.itemgetter(0), CHOICES_PRIVILEGE_ESCALATION_METHODS)
|
||||
field['choices'] = list(map(operator.itemgetter(0), CHOICES_PRIVILEGE_ESCALATION_METHODS))
|
||||
return value
|
||||
|
||||
def filter_field_metadata(self, fields, method):
|
||||
# API-created/modified CredentialType kinds are limited to
|
||||
# `cloud` and `net`
|
||||
if method in ('PUT', 'POST'):
|
||||
fields['kind']['choices'] = filter(
|
||||
fields['kind']['choices'] = list(filter(
|
||||
lambda choice: choice[0] in ('cloud', 'net'),
|
||||
fields['kind']['choices']
|
||||
)
|
||||
))
|
||||
return fields
|
||||
|
||||
|
||||
@@ -2619,8 +2622,8 @@ class CredentialSerializer(BaseSerializer):
|
||||
raise serializers.ValidationError({"kind": _('"%s" is not a valid choice' % kind)})
|
||||
data['credential_type'] = credential_type.pk
|
||||
value = OrderedDict(
|
||||
{'credential_type': credential_type}.items() +
|
||||
super(CredentialSerializer, self).to_internal_value(data).items()
|
||||
list({'credential_type': credential_type}.items()) +
|
||||
list(super(CredentialSerializer, self).to_internal_value(data).items())
|
||||
)
|
||||
|
||||
# Make a set of the keys in the POST/PUT payload
|
||||
@@ -2976,12 +2979,16 @@ class JobTemplateMixin(object):
|
||||
'''
|
||||
|
||||
def _recent_jobs(self, obj):
|
||||
if hasattr(obj, 'workflow_jobs'):
|
||||
job_mgr = obj.workflow_jobs
|
||||
else:
|
||||
job_mgr = obj.jobs
|
||||
return [{'id': x.id, 'status': x.status, 'finished': x.finished}
|
||||
for x in job_mgr.all().order_by('-created')[:10]]
|
||||
# Exclude "joblets", jobs that ran as part of a sliced workflow job
|
||||
uj_qs = obj.unifiedjob_unified_jobs.exclude(job__job_slice_count__gt=1).order_by('-created')
|
||||
# Would like to apply an .only, but does not play well with non_polymorphic
|
||||
# .only('id', 'status', 'finished', 'polymorphic_ctype_id')
|
||||
optimized_qs = uj_qs.non_polymorphic()
|
||||
return [{
|
||||
'id': x.id, 'status': x.status, 'finished': x.finished,
|
||||
# Make type consistent with API top-level key, for instance workflow_job
|
||||
'type': x.get_real_instance_class()._meta.verbose_name.replace(' ', '_')
|
||||
} for x in optimized_qs[:10]]
|
||||
|
||||
def get_summary_fields(self, obj):
|
||||
d = super(JobTemplateMixin, self).get_summary_fields(obj)
|
||||
@@ -3050,7 +3057,7 @@ class JobTemplateSerializer(JobTemplateMixin, UnifiedJobTemplateSerializer, JobO
|
||||
|
||||
prompting_error_message = _("Must either set a default value or ask to prompt on launch.")
|
||||
if project is None:
|
||||
raise serializers.ValidationError({'project': _("Job types 'run' and 'check' must have assigned a project.")})
|
||||
raise serializers.ValidationError({'project': _("Job Templates must have a project assigned.")})
|
||||
elif inventory is None and not get_field_from_model_or_attrs('ask_inventory_on_launch'):
|
||||
raise serializers.ValidationError({'inventory': prompting_error_message})
|
||||
|
||||
@@ -3059,6 +3066,13 @@ class JobTemplateSerializer(JobTemplateMixin, UnifiedJobTemplateSerializer, JobO
|
||||
def validate_extra_vars(self, value):
|
||||
return vars_validate_or_raise(value)
|
||||
|
||||
def validate_job_slice_count(self, value):
|
||||
if value > 1 and not feature_enabled('workflows'):
|
||||
raise LicenseForbids({'job_slice_count': [_(
|
||||
"Job slicing is a workflows-based feature and your license does not allow use of workflows."
|
||||
)]})
|
||||
return value
|
||||
|
||||
def get_summary_fields(self, obj):
|
||||
summary_fields = super(JobTemplateSerializer, self).get_summary_fields(obj)
|
||||
all_creds = []
|
||||
@@ -3103,19 +3117,46 @@ class JobTemplateSerializer(JobTemplateMixin, UnifiedJobTemplateSerializer, JobO
|
||||
return summary_fields
|
||||
|
||||
|
||||
class JobTemplateWithSpecSerializer(JobTemplateSerializer):
|
||||
'''
|
||||
Used for activity stream entries.
|
||||
'''
|
||||
|
||||
class Meta:
|
||||
model = JobTemplate
|
||||
fields = ('*', 'survey_spec')
|
||||
|
||||
|
||||
class JobSerializer(UnifiedJobSerializer, JobOptionsSerializer):
|
||||
|
||||
passwords_needed_to_start = serializers.ReadOnlyField()
|
||||
ask_diff_mode_on_launch = serializers.ReadOnlyField()
|
||||
ask_variables_on_launch = serializers.ReadOnlyField()
|
||||
ask_limit_on_launch = serializers.ReadOnlyField()
|
||||
ask_skip_tags_on_launch = serializers.ReadOnlyField()
|
||||
ask_tags_on_launch = serializers.ReadOnlyField()
|
||||
ask_job_type_on_launch = serializers.ReadOnlyField()
|
||||
ask_verbosity_on_launch = serializers.ReadOnlyField()
|
||||
ask_inventory_on_launch = serializers.ReadOnlyField()
|
||||
ask_credential_on_launch = serializers.ReadOnlyField()
|
||||
ask_diff_mode_on_launch = serializers.BooleanField(
|
||||
read_only=True,
|
||||
help_text=_('This field has been deprecated and will be removed in a future release'))
|
||||
ask_variables_on_launch = serializers.BooleanField(
|
||||
read_only=True,
|
||||
help_text=_('This field has been deprecated and will be removed in a future release'))
|
||||
ask_limit_on_launch = serializers.BooleanField(
|
||||
read_only=True,
|
||||
help_text=_('This field has been deprecated and will be removed in a future release'))
|
||||
ask_skip_tags_on_launch = serializers.BooleanField(
|
||||
read_only=True,
|
||||
help_text=_('This field has been deprecated and will be removed in a future release'))
|
||||
ask_tags_on_launch = serializers.BooleanField(
|
||||
read_only=True,
|
||||
help_text=_('This field has been deprecated and will be removed in a future release'))
|
||||
ask_job_type_on_launch = serializers.BooleanField(
|
||||
read_only=True,
|
||||
help_text=_('This field has been deprecated and will be removed in a future release'))
|
||||
ask_verbosity_on_launch = serializers.BooleanField(
|
||||
read_only=True,
|
||||
help_text=_('This field has been deprecated and will be removed in a future release'))
|
||||
ask_inventory_on_launch = serializers.BooleanField(
|
||||
read_only=True,
|
||||
help_text=_('This field has been deprecated and will be removed in a future release'))
|
||||
ask_credential_on_launch = serializers.BooleanField(
|
||||
read_only=True,
|
||||
help_text=_('This field has been deprecated and will be removed in a future release'))
|
||||
artifacts = serializers.SerializerMethodField()
|
||||
|
||||
class Meta:
|
||||
@@ -3442,12 +3483,16 @@ class AdHocCommandSerializer(UnifiedJobSerializer):
|
||||
ret['name'] = obj.module_name
|
||||
return ret
|
||||
|
||||
def validate(self, attrs):
|
||||
ret = super(AdHocCommandSerializer, self).validate(attrs)
|
||||
return ret
|
||||
|
||||
def validate_extra_vars(self, value):
|
||||
redacted_extra_vars, removed_vars = extract_ansible_vars(value)
|
||||
if removed_vars:
|
||||
raise serializers.ValidationError(_(
|
||||
"{} are prohibited from use in ad hoc commands."
|
||||
).format(", ".join(removed_vars)))
|
||||
).format(", ".join(sorted(removed_vars, reverse=True))))
|
||||
return vars_validate_or_raise(value)
|
||||
|
||||
|
||||
@@ -3558,7 +3603,7 @@ class WorkflowJobTemplateSerializer(JobTemplateMixin, LabelsListMixin, UnifiedJo
|
||||
class Meta:
|
||||
model = WorkflowJobTemplate
|
||||
fields = ('*', 'extra_vars', 'organization', 'survey_enabled', 'allow_simultaneous',
|
||||
'ask_variables_on_launch',)
|
||||
'ask_variables_on_launch', 'inventory', 'ask_inventory_on_launch',)
|
||||
|
||||
def get_related(self, obj):
|
||||
res = super(WorkflowJobTemplateSerializer, self).get_related(obj)
|
||||
@@ -3586,13 +3631,24 @@ class WorkflowJobTemplateSerializer(JobTemplateMixin, LabelsListMixin, UnifiedJo
|
||||
return vars_validate_or_raise(value)
|
||||
|
||||
|
||||
class WorkflowJobTemplateWithSpecSerializer(WorkflowJobTemplateSerializer):
|
||||
'''
|
||||
Used for activity stream entries.
|
||||
'''
|
||||
|
||||
class Meta:
|
||||
model = WorkflowJobTemplate
|
||||
fields = ('*', 'survey_spec')
|
||||
|
||||
|
||||
class WorkflowJobSerializer(LabelsListMixin, UnifiedJobSerializer):
|
||||
|
||||
class Meta:
|
||||
model = WorkflowJob
|
||||
fields = ('*', 'workflow_job_template', 'extra_vars', 'allow_simultaneous',
|
||||
'job_template', 'is_sliced_job',
|
||||
'-execution_node', '-event_processing_finished', '-controller_node',)
|
||||
'-execution_node', '-event_processing_finished', '-controller_node',
|
||||
'inventory',)
|
||||
|
||||
def get_related(self, obj):
|
||||
res = super(WorkflowJobSerializer, self).get_related(obj)
|
||||
@@ -3664,7 +3720,7 @@ class LaunchConfigurationBaseSerializer(BaseSerializer):
|
||||
for field in self.instance._meta.fields:
|
||||
setattr(mock_obj, field.name, getattr(self.instance, field.name))
|
||||
field_names = set(field.name for field in self.Meta.model._meta.fields)
|
||||
for field_name, value in attrs.items():
|
||||
for field_name, value in list(attrs.items()):
|
||||
setattr(mock_obj, field_name, value)
|
||||
if field_name not in field_names:
|
||||
attrs.pop(field_name)
|
||||
@@ -3675,7 +3731,7 @@ class LaunchConfigurationBaseSerializer(BaseSerializer):
|
||||
if obj is None:
|
||||
return ret
|
||||
if 'extra_data' in ret and obj.survey_passwords:
|
||||
ret['extra_data'] = obj.display_extra_data()
|
||||
ret['extra_data'] = obj.display_extra_vars()
|
||||
return ret
|
||||
|
||||
def get_summary_fields(self, obj):
|
||||
@@ -3816,9 +3872,6 @@ class WorkflowJobTemplateNodeSerializer(LaunchConfigurationBaseSerializer):
|
||||
ujt_obj = attrs['unified_job_template']
|
||||
elif self.instance:
|
||||
ujt_obj = self.instance.unified_job_template
|
||||
if isinstance(ujt_obj, (WorkflowJobTemplate)):
|
||||
raise serializers.ValidationError({
|
||||
"unified_job_template": _("Cannot nest a %s inside a WorkflowJobTemplate") % ujt_obj.__class__.__name__})
|
||||
if 'credential' in deprecated_fields: # TODO: remove when v2 API is deprecated
|
||||
cred = deprecated_fields['credential']
|
||||
attrs['credential'] = cred
|
||||
@@ -3867,7 +3920,8 @@ class WorkflowJobNodeSerializer(LaunchConfigurationBaseSerializer):
|
||||
class Meta:
|
||||
model = WorkflowJobNode
|
||||
fields = ('*', 'credential', 'job', 'workflow_job', '-name', '-description', 'id', 'url', 'related',
|
||||
'unified_job_template', 'success_nodes', 'failure_nodes', 'always_nodes',)
|
||||
'unified_job_template', 'success_nodes', 'failure_nodes', 'always_nodes',
|
||||
'do_not_run',)
|
||||
|
||||
def get_related(self, obj):
|
||||
res = super(WorkflowJobNodeSerializer, self).get_related(obj)
|
||||
@@ -4280,7 +4334,7 @@ class JobLaunchSerializer(BaseSerializer):
|
||||
passwords_needed=cred.passwords_needed
|
||||
)
|
||||
if cred.credential_type.managed_by_tower and 'vault_id' in cred.credential_type.defined_fields:
|
||||
cred_dict['vault_id'] = cred.inputs.get('vault_id') or None
|
||||
cred_dict['vault_id'] = cred.get_input('vault_id', default=None)
|
||||
defaults_dict.setdefault(field_name, []).append(cred_dict)
|
||||
else:
|
||||
defaults_dict[field_name] = getattr(obj, field_name)
|
||||
@@ -4369,37 +4423,63 @@ class JobLaunchSerializer(BaseSerializer):
|
||||
class WorkflowJobLaunchSerializer(BaseSerializer):
|
||||
|
||||
can_start_without_user_input = serializers.BooleanField(read_only=True)
|
||||
defaults = serializers.SerializerMethodField()
|
||||
variables_needed_to_start = serializers.ReadOnlyField()
|
||||
survey_enabled = serializers.SerializerMethodField()
|
||||
extra_vars = VerbatimField(required=False, write_only=True)
|
||||
inventory = serializers.PrimaryKeyRelatedField(
|
||||
queryset=Inventory.objects.all(),
|
||||
required=False, write_only=True
|
||||
)
|
||||
workflow_job_template_data = serializers.SerializerMethodField()
|
||||
|
||||
class Meta:
|
||||
model = WorkflowJobTemplate
|
||||
fields = ('can_start_without_user_input', 'extra_vars',
|
||||
'survey_enabled', 'variables_needed_to_start',
|
||||
fields = ('ask_inventory_on_launch', 'can_start_without_user_input', 'defaults', 'extra_vars',
|
||||
'inventory', 'survey_enabled', 'variables_needed_to_start',
|
||||
'node_templates_missing', 'node_prompts_rejected',
|
||||
'workflow_job_template_data')
|
||||
'workflow_job_template_data', 'survey_enabled')
|
||||
read_only_fields = ('ask_inventory_on_launch',)
|
||||
|
||||
def get_survey_enabled(self, obj):
|
||||
if obj:
|
||||
return obj.survey_enabled and 'spec' in obj.survey_spec
|
||||
return False
|
||||
|
||||
def get_defaults(self, obj):
|
||||
defaults_dict = {}
|
||||
for field_name in WorkflowJobTemplate.get_ask_mapping().keys():
|
||||
if field_name == 'inventory':
|
||||
defaults_dict[field_name] = dict(
|
||||
name=getattrd(obj, '%s.name' % field_name, None),
|
||||
id=getattrd(obj, '%s.pk' % field_name, None))
|
||||
else:
|
||||
defaults_dict[field_name] = getattr(obj, field_name)
|
||||
return defaults_dict
|
||||
|
||||
def get_workflow_job_template_data(self, obj):
|
||||
return dict(name=obj.name, id=obj.id, description=obj.description)
|
||||
|
||||
def validate(self, attrs):
|
||||
obj = self.instance
|
||||
template = self.instance
|
||||
|
||||
accepted, rejected, errors = obj._accept_or_ignore_job_kwargs(
|
||||
_exclude_errors=['required'],
|
||||
**attrs)
|
||||
accepted, rejected, errors = template._accept_or_ignore_job_kwargs(**attrs)
|
||||
self._ignored_fields = rejected
|
||||
|
||||
WFJT_extra_vars = obj.extra_vars
|
||||
attrs = super(WorkflowJobLaunchSerializer, self).validate(attrs)
|
||||
obj.extra_vars = WFJT_extra_vars
|
||||
return attrs
|
||||
if template.inventory and template.inventory.pending_deletion is True:
|
||||
errors['inventory'] = _("The inventory associated with this Workflow is being deleted.")
|
||||
elif 'inventory' in accepted and accepted['inventory'].pending_deletion:
|
||||
errors['inventory'] = _("The provided inventory is being deleted.")
|
||||
|
||||
if errors:
|
||||
raise serializers.ValidationError(errors)
|
||||
|
||||
WFJT_extra_vars = template.extra_vars
|
||||
WFJT_inventory = template.inventory
|
||||
super(WorkflowJobLaunchSerializer, self).validate(attrs)
|
||||
template.extra_vars = WFJT_extra_vars
|
||||
template.inventory = WFJT_inventory
|
||||
return accepted
|
||||
|
||||
|
||||
class NotificationTemplateSerializer(BaseSerializer):
|
||||
@@ -4410,11 +4490,11 @@ class NotificationTemplateSerializer(BaseSerializer):
|
||||
model = NotificationTemplate
|
||||
fields = ('*', 'organization', 'notification_type', 'notification_configuration')
|
||||
|
||||
type_map = {"string": (str, unicode),
|
||||
type_map = {"string": (str,),
|
||||
"int": (int,),
|
||||
"bool": (bool,),
|
||||
"list": (list,),
|
||||
"password": (str, unicode),
|
||||
"password": (str,),
|
||||
"object": (dict, OrderedDict)}
|
||||
|
||||
def to_representation(self, obj):
|
||||
@@ -4618,6 +4698,23 @@ class ScheduleSerializer(LaunchConfigurationBaseSerializer, SchedulePreviewSeria
|
||||
res['inventory'] = obj.unified_job_template.inventory.get_absolute_url(self.context.get('request'))
|
||||
return res
|
||||
|
||||
def get_summary_fields(self, obj):
|
||||
summary_fields = super(ScheduleSerializer, self).get_summary_fields(obj)
|
||||
if 'inventory' in summary_fields:
|
||||
return summary_fields
|
||||
|
||||
inventory = None
|
||||
if obj.unified_job_template and getattr(obj.unified_job_template, 'inventory', None):
|
||||
inventory = obj.unified_job_template.inventory
|
||||
else:
|
||||
return summary_fields
|
||||
|
||||
summary_fields['inventory'] = dict()
|
||||
for field in SUMMARIZABLE_FK_FIELDS['inventory']:
|
||||
summary_fields['inventory'][field] = getattr(inventory, field, None)
|
||||
|
||||
return summary_fields
|
||||
|
||||
def validate_unified_job_template(self, value):
|
||||
if type(value) == InventorySource and value.source not in SCHEDULEABLE_PROVIDERS:
|
||||
raise serializers.ValidationError(_('Inventory Source must be a cloud resource.'))
|
||||
@@ -4780,7 +4877,7 @@ class ActivityStreamSerializer(BaseSerializer):
|
||||
for key in summary_dict.keys():
|
||||
if 'id' not in summary_dict[key]:
|
||||
summary_dict[key] = summary_dict[key] + ('id',)
|
||||
field_list = summary_dict.items()
|
||||
field_list = list(summary_dict.items())
|
||||
# Needed related fields that are not in the default summary fields
|
||||
field_list += [
|
||||
('workflow_job_template_node', ('id', 'unified_job_template_id')),
|
||||
@@ -4800,7 +4897,7 @@ class ActivityStreamSerializer(BaseSerializer):
|
||||
|
||||
def get_fields(self):
|
||||
ret = super(ActivityStreamSerializer, self).get_fields()
|
||||
for key, field in ret.items():
|
||||
for key, field in list(ret.items()):
|
||||
if key == 'changes':
|
||||
field.help_text = _('A summary of the new and changed values when an object is created, updated, or deleted')
|
||||
if key == 'object1':
|
||||
@@ -4841,10 +4938,6 @@ class ActivityStreamSerializer(BaseSerializer):
|
||||
|
||||
def get_related(self, obj):
|
||||
rel = {}
|
||||
VIEW_NAME_EXCEPTIONS = {
|
||||
'custom_inventory_script': 'inventory_script_detail',
|
||||
'o_auth2_access_token': 'o_auth2_token_detail'
|
||||
}
|
||||
if obj.actor is not None:
|
||||
rel['actor'] = self.reverse('api:user_detail', kwargs={'pk': obj.actor.pk})
|
||||
for fk, __ in self._local_summarizable_fk_fields:
|
||||
@@ -4858,11 +4951,12 @@ class ActivityStreamSerializer(BaseSerializer):
|
||||
if getattr(thisItem, 'id', None) in id_list:
|
||||
continue
|
||||
id_list.append(getattr(thisItem, 'id', None))
|
||||
if fk in VIEW_NAME_EXCEPTIONS:
|
||||
view_name = VIEW_NAME_EXCEPTIONS[fk]
|
||||
if hasattr(thisItem, 'get_absolute_url'):
|
||||
rel_url = thisItem.get_absolute_url(self.context.get('request'))
|
||||
else:
|
||||
view_name = fk + '_detail'
|
||||
rel[fk].append(self.reverse('api:' + view_name, kwargs={'pk': thisItem.id}))
|
||||
rel_url = self.reverse('api:' + view_name, kwargs={'pk': thisItem.id})
|
||||
rel[fk].append(rel_url)
|
||||
|
||||
if fk == 'schedule':
|
||||
rel['unified_job_template'] = thisItem.unified_job_template.get_absolute_url(self.context.get('request'))
|
||||
@@ -4945,7 +5039,7 @@ class FactVersionSerializer(BaseFactSerializer):
|
||||
}
|
||||
res['fact_view'] = '%s?%s' % (
|
||||
reverse('api:host_fact_compare_view', kwargs={'pk': obj.host.pk}, request=self.context.get('request')),
|
||||
urllib.urlencode(params)
|
||||
urllib.parse.urlencode(params)
|
||||
)
|
||||
return res
|
||||
|
||||
|
||||
@@ -8,8 +8,8 @@ import dateutil
|
||||
import time
|
||||
import socket
|
||||
import sys
|
||||
import logging
|
||||
import requests
|
||||
import functools
|
||||
from base64 import b64encode
|
||||
from collections import OrderedDict, Iterable
|
||||
import six
|
||||
@@ -18,14 +18,12 @@ import six
|
||||
# Django
|
||||
from django.conf import settings
|
||||
from django.core.exceptions import FieldError, ObjectDoesNotExist
|
||||
from django.db.models import Q, Count
|
||||
from django.db.models import Q, Sum
|
||||
from django.db import IntegrityError, transaction, connection
|
||||
from django.shortcuts import get_object_or_404
|
||||
from django.utils.encoding import smart_text
|
||||
from django.utils.safestring import mark_safe
|
||||
from django.utils.timezone import now
|
||||
from django.utils.decorators import method_decorator
|
||||
from django.views.decorators.csrf import csrf_exempt, ensure_csrf_cookie
|
||||
from django.views.decorators.csrf import csrf_exempt
|
||||
from django.template.loader import render_to_string
|
||||
from django.http import HttpResponse
|
||||
from django.contrib.contenttypes.models import ContentType
|
||||
@@ -35,7 +33,7 @@ from django.utils.translation import ugettext_lazy as _
|
||||
# Django REST Framework
|
||||
from rest_framework.exceptions import PermissionDenied, ParseError
|
||||
from rest_framework.parsers import FormParser
|
||||
from rest_framework.permissions import AllowAny, IsAuthenticated, SAFE_METHODS
|
||||
from rest_framework.permissions import AllowAny, IsAuthenticated
|
||||
from rest_framework.response import Response
|
||||
from rest_framework.settings import api_settings
|
||||
from rest_framework.views import exception_handler
|
||||
@@ -63,17 +61,15 @@ from wsgiref.util import FileWrapper
|
||||
# AWX
|
||||
from awx.main.tasks import send_notifications
|
||||
from awx.main.access import get_user_queryset
|
||||
from awx.main.ha import is_ha_environment
|
||||
from awx.api.filters import V1CredentialFilterBackend
|
||||
from awx.api.generics import get_view_name
|
||||
from awx.api.generics import * # noqa
|
||||
from awx.api.versioning import reverse, get_request_version, drf_reverse
|
||||
from awx.conf.license import get_license, feature_enabled, feature_exists, LicenseForbids
|
||||
from awx.api.versioning import reverse, get_request_version
|
||||
from awx.conf.license import feature_enabled, feature_exists, LicenseForbids, get_license
|
||||
from awx.main.models import * # noqa
|
||||
from awx.main.utils import * # noqa
|
||||
from awx.main.utils import (
|
||||
extract_ansible_vars,
|
||||
decrypt_field,
|
||||
)
|
||||
from awx.main.utils.encryption import encrypt_value
|
||||
from awx.main.utils.filters import SmartFilter
|
||||
@@ -91,7 +87,7 @@ from awx.api.renderers import * # noqa
|
||||
from awx.api.serializers import * # noqa
|
||||
from awx.api.metadata import RoleMetadata, JobTypeMetadata
|
||||
from awx.main.constants import ACTIVE_STATES
|
||||
from awx.main.scheduler.tasks import run_job_complete
|
||||
from awx.main.scheduler.dag_workflow import WorkflowDAG
|
||||
from awx.api.views.mixin import (
|
||||
ActivityStreamEnforcementMixin,
|
||||
SystemTrackingEnforcementMixin,
|
||||
@@ -100,7 +96,53 @@ from awx.api.views.mixin import (
|
||||
InstanceGroupMembershipMixin,
|
||||
RelatedJobsPreventDeleteMixin,
|
||||
OrganizationCountsMixin,
|
||||
ControlledByScmMixin,
|
||||
)
|
||||
from awx.api.views.organization import ( # noqa
|
||||
OrganizationList,
|
||||
OrganizationDetail,
|
||||
OrganizationInventoriesList,
|
||||
OrganizationUsersList,
|
||||
OrganizationAdminsList,
|
||||
OrganizationProjectsList,
|
||||
OrganizationWorkflowJobTemplatesList,
|
||||
OrganizationTeamsList,
|
||||
OrganizationActivityStreamList,
|
||||
OrganizationNotificationTemplatesList,
|
||||
OrganizationNotificationTemplatesAnyList,
|
||||
OrganizationNotificationTemplatesErrorList,
|
||||
OrganizationNotificationTemplatesSuccessList,
|
||||
OrganizationInstanceGroupsList,
|
||||
OrganizationAccessList,
|
||||
OrganizationObjectRolesList,
|
||||
)
|
||||
from awx.api.views.inventory import ( # noqa
|
||||
InventoryList,
|
||||
InventoryDetail,
|
||||
InventoryUpdateEventsList,
|
||||
InventoryScriptList,
|
||||
InventoryScriptDetail,
|
||||
InventoryScriptObjectRolesList,
|
||||
InventoryScriptCopy,
|
||||
InventoryList,
|
||||
InventoryDetail,
|
||||
InventoryActivityStreamList,
|
||||
InventoryInstanceGroupsList,
|
||||
InventoryAccessList,
|
||||
InventoryObjectRolesList,
|
||||
InventoryJobTemplateList,
|
||||
InventoryCopy,
|
||||
)
|
||||
from awx.api.views.root import ( # noqa
|
||||
ApiRootView,
|
||||
ApiOAuthAuthorizationRootView,
|
||||
ApiVersionRootView,
|
||||
ApiV1RootView,
|
||||
ApiV2RootView,
|
||||
ApiV1PingView,
|
||||
ApiV1ConfigView,
|
||||
)
|
||||
|
||||
|
||||
logger = logging.getLogger('awx.api.views')
|
||||
|
||||
@@ -118,244 +160,6 @@ def api_exception_handler(exc, context):
|
||||
return exception_handler(exc, context)
|
||||
|
||||
|
||||
class ApiRootView(APIView):
|
||||
|
||||
permission_classes = (AllowAny,)
|
||||
view_name = _('REST API')
|
||||
versioning_class = None
|
||||
swagger_topic = 'Versioning'
|
||||
|
||||
@method_decorator(ensure_csrf_cookie)
|
||||
def get(self, request, format=None):
|
||||
''' List supported API versions '''
|
||||
|
||||
v1 = reverse('api:api_v1_root_view', kwargs={'version': 'v1'})
|
||||
v2 = reverse('api:api_v2_root_view', kwargs={'version': 'v2'})
|
||||
data = OrderedDict()
|
||||
data['description'] = _('AWX REST API')
|
||||
data['current_version'] = v2
|
||||
data['available_versions'] = dict(v1 = v1, v2 = v2)
|
||||
data['oauth2'] = drf_reverse('api:oauth_authorization_root_view')
|
||||
if feature_enabled('rebranding'):
|
||||
data['custom_logo'] = settings.CUSTOM_LOGO
|
||||
data['custom_login_info'] = settings.CUSTOM_LOGIN_INFO
|
||||
return Response(data)
|
||||
|
||||
|
||||
class ApiOAuthAuthorizationRootView(APIView):
|
||||
|
||||
permission_classes = (AllowAny,)
|
||||
view_name = _("API OAuth 2 Authorization Root")
|
||||
versioning_class = None
|
||||
swagger_topic = 'Authentication'
|
||||
|
||||
def get(self, request, format=None):
|
||||
data = OrderedDict()
|
||||
data['authorize'] = drf_reverse('api:authorize')
|
||||
data['token'] = drf_reverse('api:token')
|
||||
data['revoke_token'] = drf_reverse('api:revoke-token')
|
||||
return Response(data)
|
||||
|
||||
|
||||
class ApiVersionRootView(APIView):
|
||||
|
||||
permission_classes = (AllowAny,)
|
||||
swagger_topic = 'Versioning'
|
||||
|
||||
def get(self, request, format=None):
|
||||
''' List top level resources '''
|
||||
data = OrderedDict()
|
||||
data['ping'] = reverse('api:api_v1_ping_view', request=request)
|
||||
data['instances'] = reverse('api:instance_list', request=request)
|
||||
data['instance_groups'] = reverse('api:instance_group_list', request=request)
|
||||
data['config'] = reverse('api:api_v1_config_view', request=request)
|
||||
data['settings'] = reverse('api:setting_category_list', request=request)
|
||||
data['me'] = reverse('api:user_me_list', request=request)
|
||||
data['dashboard'] = reverse('api:dashboard_view', request=request)
|
||||
data['organizations'] = reverse('api:organization_list', request=request)
|
||||
data['users'] = reverse('api:user_list', request=request)
|
||||
data['projects'] = reverse('api:project_list', request=request)
|
||||
data['project_updates'] = reverse('api:project_update_list', request=request)
|
||||
data['teams'] = reverse('api:team_list', request=request)
|
||||
data['credentials'] = reverse('api:credential_list', request=request)
|
||||
if get_request_version(request) > 1:
|
||||
data['credential_types'] = reverse('api:credential_type_list', request=request)
|
||||
data['applications'] = reverse('api:o_auth2_application_list', request=request)
|
||||
data['tokens'] = reverse('api:o_auth2_token_list', request=request)
|
||||
data['inventory'] = reverse('api:inventory_list', request=request)
|
||||
data['inventory_scripts'] = reverse('api:inventory_script_list', request=request)
|
||||
data['inventory_sources'] = reverse('api:inventory_source_list', request=request)
|
||||
data['inventory_updates'] = reverse('api:inventory_update_list', request=request)
|
||||
data['groups'] = reverse('api:group_list', request=request)
|
||||
data['hosts'] = reverse('api:host_list', request=request)
|
||||
data['job_templates'] = reverse('api:job_template_list', request=request)
|
||||
data['jobs'] = reverse('api:job_list', request=request)
|
||||
data['job_events'] = reverse('api:job_event_list', request=request)
|
||||
data['ad_hoc_commands'] = reverse('api:ad_hoc_command_list', request=request)
|
||||
data['system_job_templates'] = reverse('api:system_job_template_list', request=request)
|
||||
data['system_jobs'] = reverse('api:system_job_list', request=request)
|
||||
data['schedules'] = reverse('api:schedule_list', request=request)
|
||||
data['roles'] = reverse('api:role_list', request=request)
|
||||
data['notification_templates'] = reverse('api:notification_template_list', request=request)
|
||||
data['notifications'] = reverse('api:notification_list', request=request)
|
||||
data['labels'] = reverse('api:label_list', request=request)
|
||||
data['unified_job_templates'] = reverse('api:unified_job_template_list', request=request)
|
||||
data['unified_jobs'] = reverse('api:unified_job_list', request=request)
|
||||
data['activity_stream'] = reverse('api:activity_stream_list', request=request)
|
||||
data['workflow_job_templates'] = reverse('api:workflow_job_template_list', request=request)
|
||||
data['workflow_jobs'] = reverse('api:workflow_job_list', request=request)
|
||||
data['workflow_job_template_nodes'] = reverse('api:workflow_job_template_node_list', request=request)
|
||||
data['workflow_job_nodes'] = reverse('api:workflow_job_node_list', request=request)
|
||||
return Response(data)
|
||||
|
||||
|
||||
class ApiV1RootView(ApiVersionRootView):
|
||||
view_name = _('Version 1')
|
||||
|
||||
|
||||
class ApiV2RootView(ApiVersionRootView):
|
||||
view_name = _('Version 2')
|
||||
|
||||
|
||||
class ApiV1PingView(APIView):
|
||||
"""A simple view that reports very basic information about this
|
||||
instance, which is acceptable to be public information.
|
||||
"""
|
||||
permission_classes = (AllowAny,)
|
||||
authentication_classes = ()
|
||||
view_name = _('Ping')
|
||||
swagger_topic = 'System Configuration'
|
||||
|
||||
def get(self, request, format=None):
|
||||
"""Return some basic information about this instance
|
||||
|
||||
Everything returned here should be considered public / insecure, as
|
||||
this requires no auth and is intended for use by the installer process.
|
||||
"""
|
||||
response = {
|
||||
'ha': is_ha_environment(),
|
||||
'version': get_awx_version(),
|
||||
'active_node': settings.CLUSTER_HOST_ID,
|
||||
}
|
||||
|
||||
response['instances'] = []
|
||||
for instance in Instance.objects.all():
|
||||
response['instances'].append(dict(node=instance.hostname, heartbeat=instance.modified,
|
||||
capacity=instance.capacity, version=instance.version))
|
||||
response['instances'].sort()
|
||||
response['instance_groups'] = []
|
||||
for instance_group in InstanceGroup.objects.all():
|
||||
response['instance_groups'].append(dict(name=instance_group.name,
|
||||
capacity=instance_group.capacity,
|
||||
instances=[x.hostname for x in instance_group.instances.all()]))
|
||||
return Response(response)
|
||||
|
||||
|
||||
class ApiV1ConfigView(APIView):
|
||||
|
||||
permission_classes = (IsAuthenticated,)
|
||||
view_name = _('Configuration')
|
||||
swagger_topic = 'System Configuration'
|
||||
|
||||
def check_permissions(self, request):
|
||||
super(ApiV1ConfigView, self).check_permissions(request)
|
||||
if not request.user.is_superuser and request.method.lower() not in {'options', 'head', 'get'}:
|
||||
self.permission_denied(request) # Raises PermissionDenied exception.
|
||||
|
||||
def get(self, request, format=None):
|
||||
'''Return various sitewide configuration settings'''
|
||||
|
||||
if request.user.is_superuser or request.user.is_system_auditor:
|
||||
license_data = get_license(show_key=True)
|
||||
else:
|
||||
license_data = get_license(show_key=False)
|
||||
if not license_data.get('valid_key', False):
|
||||
license_data = {}
|
||||
if license_data and 'features' in license_data and 'activity_streams' in license_data['features']:
|
||||
# FIXME: Make the final setting value dependent on the feature?
|
||||
license_data['features']['activity_streams'] &= settings.ACTIVITY_STREAM_ENABLED
|
||||
|
||||
pendo_state = settings.PENDO_TRACKING_STATE if settings.PENDO_TRACKING_STATE in ('off', 'anonymous', 'detailed') else 'off'
|
||||
|
||||
data = dict(
|
||||
time_zone=settings.TIME_ZONE,
|
||||
license_info=license_data,
|
||||
version=get_awx_version(),
|
||||
ansible_version=get_ansible_version(),
|
||||
eula=render_to_string("eula.md") if license_data.get('license_type', 'UNLICENSED') != 'open' else '',
|
||||
analytics_status=pendo_state
|
||||
)
|
||||
|
||||
# If LDAP is enabled, user_ldap_fields will return a list of field
|
||||
# names that are managed by LDAP and should be read-only for users with
|
||||
# a non-empty ldap_dn attribute.
|
||||
if getattr(settings, 'AUTH_LDAP_SERVER_URI', None) and feature_enabled('ldap'):
|
||||
user_ldap_fields = ['username', 'password']
|
||||
user_ldap_fields.extend(getattr(settings, 'AUTH_LDAP_USER_ATTR_MAP', {}).keys())
|
||||
user_ldap_fields.extend(getattr(settings, 'AUTH_LDAP_USER_FLAGS_BY_GROUP', {}).keys())
|
||||
data['user_ldap_fields'] = user_ldap_fields
|
||||
|
||||
if request.user.is_superuser \
|
||||
or request.user.is_system_auditor \
|
||||
or Organization.accessible_objects(request.user, 'admin_role').exists() \
|
||||
or Organization.accessible_objects(request.user, 'auditor_role').exists():
|
||||
data.update(dict(
|
||||
project_base_dir = settings.PROJECTS_ROOT,
|
||||
project_local_paths = Project.get_local_path_choices(),
|
||||
custom_virtualenvs = get_custom_venv_choices()
|
||||
))
|
||||
elif JobTemplate.accessible_objects(request.user, 'admin_role').exists():
|
||||
data['custom_virtualenvs'] = get_custom_venv_choices()
|
||||
|
||||
return Response(data)
|
||||
|
||||
def post(self, request):
|
||||
if not isinstance(request.data, dict):
|
||||
return Response({"error": _("Invalid license data")}, status=status.HTTP_400_BAD_REQUEST)
|
||||
if "eula_accepted" not in request.data:
|
||||
return Response({"error": _("Missing 'eula_accepted' property")}, status=status.HTTP_400_BAD_REQUEST)
|
||||
try:
|
||||
eula_accepted = to_python_boolean(request.data["eula_accepted"])
|
||||
except ValueError:
|
||||
return Response({"error": _("'eula_accepted' value is invalid")}, status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
if not eula_accepted:
|
||||
return Response({"error": _("'eula_accepted' must be True")}, status=status.HTTP_400_BAD_REQUEST)
|
||||
request.data.pop("eula_accepted")
|
||||
try:
|
||||
data_actual = json.dumps(request.data)
|
||||
except Exception:
|
||||
logger.info(smart_text(u"Invalid JSON submitted for license."),
|
||||
extra=dict(actor=request.user.username))
|
||||
return Response({"error": _("Invalid JSON")}, status=status.HTTP_400_BAD_REQUEST)
|
||||
try:
|
||||
from awx.main.utils.common import get_licenser
|
||||
license_data = json.loads(data_actual)
|
||||
license_data_validated = get_licenser(**license_data).validate()
|
||||
except Exception:
|
||||
logger.warning(smart_text(u"Invalid license submitted."),
|
||||
extra=dict(actor=request.user.username))
|
||||
return Response({"error": _("Invalid License")}, status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
# If the license is valid, write it to the database.
|
||||
if license_data_validated['valid_key']:
|
||||
settings.LICENSE = license_data
|
||||
settings.TOWER_URL_BASE = "{}://{}".format(request.scheme, request.get_host())
|
||||
return Response(license_data_validated)
|
||||
|
||||
logger.warning(smart_text(u"Invalid license submitted."),
|
||||
extra=dict(actor=request.user.username))
|
||||
return Response({"error": _("Invalid license")}, status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
def delete(self, request):
|
||||
try:
|
||||
settings.LICENSE = {}
|
||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||
except Exception:
|
||||
# FIX: Log
|
||||
return Response({"error": _("Failed to remove license (%s)") % has_error}, status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
|
||||
class DashboardView(APIView):
|
||||
|
||||
view_name = _("Dashboard")
|
||||
@@ -368,7 +172,7 @@ class DashboardView(APIView):
|
||||
user_inventory = get_user_queryset(request.user, Inventory)
|
||||
inventory_with_failed_hosts = user_inventory.filter(hosts_with_active_failures__gt=0)
|
||||
user_inventory_external = user_inventory.filter(has_inventory_sources=True)
|
||||
failed_inventory = sum(i.inventory_sources_with_failures for i in user_inventory)
|
||||
failed_inventory = user_inventory.aggregate(Sum('inventory_sources_with_failures'))['inventory_sources_with_failures__sum']
|
||||
data['inventories'] = {'url': reverse('api:inventory_list', request=request),
|
||||
'total': user_inventory.count(),
|
||||
'total_with_inventory_source': user_inventory_external.count(),
|
||||
@@ -712,7 +516,7 @@ class AuthView(APIView):
|
||||
from rest_framework.reverse import reverse
|
||||
data = OrderedDict()
|
||||
err_backend, err_message = request.session.get('social_auth_error', (None, None))
|
||||
auth_backends = load_backends(settings.AUTHENTICATION_BACKENDS, force_load=True).items()
|
||||
auth_backends = list(load_backends(settings.AUTHENTICATION_BACKENDS, force_load=True).items())
|
||||
# Return auth backends in consistent order: Google, GitHub, SAML.
|
||||
auth_backends.sort(key=lambda x: 'g' if x[0] == 'google-oauth2' else x[0])
|
||||
for name, backend in auth_backends:
|
||||
@@ -744,213 +548,6 @@ class AuthView(APIView):
|
||||
return Response(data)
|
||||
|
||||
|
||||
class OrganizationList(OrganizationCountsMixin, ListCreateAPIView):
|
||||
|
||||
model = Organization
|
||||
serializer_class = OrganizationSerializer
|
||||
|
||||
def get_queryset(self):
|
||||
qs = Organization.accessible_objects(self.request.user, 'read_role')
|
||||
qs = qs.select_related('admin_role', 'auditor_role', 'member_role', 'read_role')
|
||||
qs = qs.prefetch_related('created_by', 'modified_by')
|
||||
return qs
|
||||
|
||||
def create(self, request, *args, **kwargs):
|
||||
"""Create a new organzation.
|
||||
|
||||
If there is already an organization and the license of this
|
||||
instance does not permit multiple organizations, then raise
|
||||
LicenseForbids.
|
||||
"""
|
||||
# Sanity check: If the multiple organizations feature is disallowed
|
||||
# by the license, then we are only willing to create this organization
|
||||
# if no organizations exist in the system.
|
||||
if (not feature_enabled('multiple_organizations') and
|
||||
self.model.objects.exists()):
|
||||
raise LicenseForbids(_('Your license only permits a single '
|
||||
'organization to exist.'))
|
||||
|
||||
# Okay, create the organization as usual.
|
||||
return super(OrganizationList, self).create(request, *args, **kwargs)
|
||||
|
||||
|
||||
class OrganizationDetail(RelatedJobsPreventDeleteMixin, RetrieveUpdateDestroyAPIView):
|
||||
|
||||
model = Organization
|
||||
serializer_class = OrganizationSerializer
|
||||
|
||||
def get_serializer_context(self, *args, **kwargs):
|
||||
full_context = super(OrganizationDetail, self).get_serializer_context(*args, **kwargs)
|
||||
|
||||
if not hasattr(self, 'kwargs') or 'pk' not in self.kwargs:
|
||||
return full_context
|
||||
org_id = int(self.kwargs['pk'])
|
||||
|
||||
org_counts = {}
|
||||
access_kwargs = {'accessor': self.request.user, 'role_field': 'read_role'}
|
||||
direct_counts = Organization.objects.filter(id=org_id).annotate(
|
||||
users=Count('member_role__members', distinct=True),
|
||||
admins=Count('admin_role__members', distinct=True)
|
||||
).values('users', 'admins')
|
||||
|
||||
if not direct_counts:
|
||||
return full_context
|
||||
|
||||
org_counts = direct_counts[0]
|
||||
org_counts['inventories'] = Inventory.accessible_objects(**access_kwargs).filter(
|
||||
organization__id=org_id).count()
|
||||
org_counts['teams'] = Team.accessible_objects(**access_kwargs).filter(
|
||||
organization__id=org_id).count()
|
||||
org_counts['projects'] = Project.accessible_objects(**access_kwargs).filter(
|
||||
organization__id=org_id).count()
|
||||
org_counts['job_templates'] = JobTemplate.accessible_objects(**access_kwargs).filter(
|
||||
project__organization__id=org_id).count()
|
||||
|
||||
full_context['related_field_counts'] = {}
|
||||
full_context['related_field_counts'][org_id] = org_counts
|
||||
|
||||
return full_context
|
||||
|
||||
|
||||
class OrganizationInventoriesList(SubListAPIView):
|
||||
|
||||
model = Inventory
|
||||
serializer_class = InventorySerializer
|
||||
parent_model = Organization
|
||||
relationship = 'inventories'
|
||||
|
||||
|
||||
class BaseUsersList(SubListCreateAttachDetachAPIView):
|
||||
def post(self, request, *args, **kwargs):
|
||||
ret = super(BaseUsersList, self).post( request, *args, **kwargs)
|
||||
if ret.status_code != 201:
|
||||
return ret
|
||||
try:
|
||||
if ret.data is not None and request.data.get('is_system_auditor', False):
|
||||
# This is a faux-field that just maps to checking the system
|
||||
# auditor role member list.. unfortunately this means we can't
|
||||
# set it on creation, and thus needs to be set here.
|
||||
user = User.objects.get(id=ret.data['id'])
|
||||
user.is_system_auditor = request.data['is_system_auditor']
|
||||
ret.data['is_system_auditor'] = request.data['is_system_auditor']
|
||||
except AttributeError as exc:
|
||||
print(exc)
|
||||
pass
|
||||
return ret
|
||||
|
||||
|
||||
class OrganizationUsersList(BaseUsersList):
|
||||
|
||||
model = User
|
||||
serializer_class = UserSerializer
|
||||
parent_model = Organization
|
||||
relationship = 'member_role.members'
|
||||
|
||||
|
||||
class OrganizationAdminsList(BaseUsersList):
|
||||
|
||||
model = User
|
||||
serializer_class = UserSerializer
|
||||
parent_model = Organization
|
||||
relationship = 'admin_role.members'
|
||||
|
||||
|
||||
class OrganizationProjectsList(SubListCreateAttachDetachAPIView):
|
||||
|
||||
model = Project
|
||||
serializer_class = ProjectSerializer
|
||||
parent_model = Organization
|
||||
relationship = 'projects'
|
||||
parent_key = 'organization'
|
||||
|
||||
|
||||
class OrganizationWorkflowJobTemplatesList(SubListCreateAttachDetachAPIView):
|
||||
|
||||
model = WorkflowJobTemplate
|
||||
serializer_class = WorkflowJobTemplateSerializer
|
||||
parent_model = Organization
|
||||
relationship = 'workflows'
|
||||
parent_key = 'organization'
|
||||
|
||||
|
||||
class OrganizationTeamsList(SubListCreateAttachDetachAPIView):
|
||||
|
||||
model = Team
|
||||
serializer_class = TeamSerializer
|
||||
parent_model = Organization
|
||||
relationship = 'teams'
|
||||
parent_key = 'organization'
|
||||
|
||||
|
||||
class OrganizationActivityStreamList(ActivityStreamEnforcementMixin, SubListAPIView):
|
||||
|
||||
model = ActivityStream
|
||||
serializer_class = ActivityStreamSerializer
|
||||
parent_model = Organization
|
||||
relationship = 'activitystream_set'
|
||||
search_fields = ('changes',)
|
||||
|
||||
|
||||
class OrganizationNotificationTemplatesList(SubListCreateAttachDetachAPIView):
|
||||
|
||||
model = NotificationTemplate
|
||||
serializer_class = NotificationTemplateSerializer
|
||||
parent_model = Organization
|
||||
relationship = 'notification_templates'
|
||||
parent_key = 'organization'
|
||||
|
||||
|
||||
class OrganizationNotificationTemplatesAnyList(SubListCreateAttachDetachAPIView):
|
||||
|
||||
model = NotificationTemplate
|
||||
serializer_class = NotificationTemplateSerializer
|
||||
parent_model = Organization
|
||||
relationship = 'notification_templates_any'
|
||||
|
||||
|
||||
class OrganizationNotificationTemplatesErrorList(SubListCreateAttachDetachAPIView):
|
||||
|
||||
model = NotificationTemplate
|
||||
serializer_class = NotificationTemplateSerializer
|
||||
parent_model = Organization
|
||||
relationship = 'notification_templates_error'
|
||||
|
||||
|
||||
class OrganizationNotificationTemplatesSuccessList(SubListCreateAttachDetachAPIView):
|
||||
|
||||
model = NotificationTemplate
|
||||
serializer_class = NotificationTemplateSerializer
|
||||
parent_model = Organization
|
||||
relationship = 'notification_templates_success'
|
||||
|
||||
|
||||
class OrganizationInstanceGroupsList(SubListAttachDetachAPIView):
|
||||
|
||||
model = InstanceGroup
|
||||
serializer_class = InstanceGroupSerializer
|
||||
parent_model = Organization
|
||||
relationship = 'instance_groups'
|
||||
|
||||
|
||||
class OrganizationAccessList(ResourceAccessList):
|
||||
|
||||
model = User # needs to be User for AccessLists's
|
||||
parent_model = Organization
|
||||
|
||||
|
||||
class OrganizationObjectRolesList(SubListAPIView):
|
||||
|
||||
model = Role
|
||||
serializer_class = RoleSerializer
|
||||
parent_model = Organization
|
||||
search_fields = ('role_field', 'content_type__model',)
|
||||
|
||||
def get_queryset(self):
|
||||
po = self.get_parent_object()
|
||||
content_type = ContentType.objects.get_for_model(self.parent_model)
|
||||
return Role.objects.filter(content_type=content_type, object_id=po.pk)
|
||||
|
||||
|
||||
class TeamList(ListCreateAPIView):
|
||||
|
||||
model = Team
|
||||
@@ -1253,18 +850,6 @@ class SystemJobEventsList(SubListAPIView):
|
||||
return super(SystemJobEventsList, self).finalize_response(request, response, *args, **kwargs)
|
||||
|
||||
|
||||
class InventoryUpdateEventsList(SubListAPIView):
|
||||
|
||||
model = InventoryUpdateEvent
|
||||
serializer_class = InventoryUpdateEventSerializer
|
||||
parent_model = InventoryUpdate
|
||||
relationship = 'inventory_update_events'
|
||||
view_name = _('Inventory Update Events List')
|
||||
search_fields = ('stdout',)
|
||||
|
||||
def finalize_response(self, request, response, *args, **kwargs):
|
||||
response['X-UI-Max-Events'] = settings.MAX_UI_JOB_EVENTS
|
||||
return super(InventoryUpdateEventsList, self).finalize_response(request, response, *args, **kwargs)
|
||||
|
||||
|
||||
class ProjectUpdateCancel(RetrieveAPIView):
|
||||
@@ -1822,177 +1407,6 @@ class CredentialCopy(CopyAPIView):
|
||||
copy_return_serializer_class = CredentialSerializer
|
||||
|
||||
|
||||
class InventoryScriptList(ListCreateAPIView):
|
||||
|
||||
model = CustomInventoryScript
|
||||
serializer_class = CustomInventoryScriptSerializer
|
||||
|
||||
|
||||
class InventoryScriptDetail(RetrieveUpdateDestroyAPIView):
|
||||
|
||||
model = CustomInventoryScript
|
||||
serializer_class = CustomInventoryScriptSerializer
|
||||
|
||||
def destroy(self, request, *args, **kwargs):
|
||||
instance = self.get_object()
|
||||
can_delete = request.user.can_access(self.model, 'delete', instance)
|
||||
if not can_delete:
|
||||
raise PermissionDenied(_("Cannot delete inventory script."))
|
||||
for inv_src in InventorySource.objects.filter(source_script=instance):
|
||||
inv_src.source_script = None
|
||||
inv_src.save()
|
||||
return super(InventoryScriptDetail, self).destroy(request, *args, **kwargs)
|
||||
|
||||
|
||||
class InventoryScriptObjectRolesList(SubListAPIView):
|
||||
|
||||
model = Role
|
||||
serializer_class = RoleSerializer
|
||||
parent_model = CustomInventoryScript
|
||||
search_fields = ('role_field', 'content_type__model',)
|
||||
|
||||
def get_queryset(self):
|
||||
po = self.get_parent_object()
|
||||
content_type = ContentType.objects.get_for_model(self.parent_model)
|
||||
return Role.objects.filter(content_type=content_type, object_id=po.pk)
|
||||
|
||||
|
||||
class InventoryScriptCopy(CopyAPIView):
|
||||
|
||||
model = CustomInventoryScript
|
||||
copy_return_serializer_class = CustomInventoryScriptSerializer
|
||||
|
||||
|
||||
class InventoryList(ListCreateAPIView):
|
||||
|
||||
model = Inventory
|
||||
serializer_class = InventorySerializer
|
||||
|
||||
def get_queryset(self):
|
||||
qs = Inventory.accessible_objects(self.request.user, 'read_role')
|
||||
qs = qs.select_related('admin_role', 'read_role', 'update_role', 'use_role', 'adhoc_role')
|
||||
qs = qs.prefetch_related('created_by', 'modified_by', 'organization')
|
||||
return qs
|
||||
|
||||
|
||||
class ControlledByScmMixin(object):
|
||||
'''
|
||||
Special method to reset SCM inventory commit hash
|
||||
if anything that it manages changes.
|
||||
'''
|
||||
|
||||
def _reset_inv_src_rev(self, obj):
|
||||
if self.request.method in SAFE_METHODS or not obj:
|
||||
return
|
||||
project_following_sources = obj.inventory_sources.filter(
|
||||
update_on_project_update=True, source='scm')
|
||||
if project_following_sources:
|
||||
# Allow inventory changes unrelated to variables
|
||||
if self.model == Inventory and (
|
||||
not self.request or not self.request.data or
|
||||
parse_yaml_or_json(self.request.data.get('variables', '')) == parse_yaml_or_json(obj.variables)):
|
||||
return
|
||||
project_following_sources.update(scm_last_revision='')
|
||||
|
||||
def get_object(self):
|
||||
obj = super(ControlledByScmMixin, self).get_object()
|
||||
self._reset_inv_src_rev(obj)
|
||||
return obj
|
||||
|
||||
def get_parent_object(self):
|
||||
obj = super(ControlledByScmMixin, self).get_parent_object()
|
||||
self._reset_inv_src_rev(obj)
|
||||
return obj
|
||||
|
||||
|
||||
class InventoryDetail(RelatedJobsPreventDeleteMixin, ControlledByScmMixin, RetrieveUpdateDestroyAPIView):
|
||||
|
||||
model = Inventory
|
||||
serializer_class = InventoryDetailSerializer
|
||||
|
||||
def update(self, request, *args, **kwargs):
|
||||
obj = self.get_object()
|
||||
kind = self.request.data.get('kind') or kwargs.get('kind')
|
||||
|
||||
# Do not allow changes to an Inventory kind.
|
||||
if kind is not None and obj.kind != kind:
|
||||
return self.http_method_not_allowed(request, *args, **kwargs)
|
||||
return super(InventoryDetail, self).update(request, *args, **kwargs)
|
||||
|
||||
def destroy(self, request, *args, **kwargs):
|
||||
obj = self.get_object()
|
||||
if not request.user.can_access(self.model, 'delete', obj):
|
||||
raise PermissionDenied()
|
||||
self.check_related_active_jobs(obj) # related jobs mixin
|
||||
try:
|
||||
obj.schedule_deletion(getattr(request.user, 'id', None))
|
||||
return Response(status=status.HTTP_202_ACCEPTED)
|
||||
except RuntimeError as e:
|
||||
return Response(dict(error=_("{0}".format(e))), status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
|
||||
class InventoryActivityStreamList(ActivityStreamEnforcementMixin, SubListAPIView):
|
||||
|
||||
model = ActivityStream
|
||||
serializer_class = ActivityStreamSerializer
|
||||
parent_model = Inventory
|
||||
relationship = 'activitystream_set'
|
||||
search_fields = ('changes',)
|
||||
|
||||
def get_queryset(self):
|
||||
parent = self.get_parent_object()
|
||||
self.check_parent_access(parent)
|
||||
qs = self.request.user.get_queryset(self.model)
|
||||
return qs.filter(Q(inventory=parent) | Q(host__in=parent.hosts.all()) | Q(group__in=parent.groups.all()))
|
||||
|
||||
|
||||
class InventoryInstanceGroupsList(SubListAttachDetachAPIView):
|
||||
|
||||
model = InstanceGroup
|
||||
serializer_class = InstanceGroupSerializer
|
||||
parent_model = Inventory
|
||||
relationship = 'instance_groups'
|
||||
|
||||
|
||||
class InventoryAccessList(ResourceAccessList):
|
||||
|
||||
model = User # needs to be User for AccessLists's
|
||||
parent_model = Inventory
|
||||
|
||||
|
||||
class InventoryObjectRolesList(SubListAPIView):
|
||||
|
||||
model = Role
|
||||
serializer_class = RoleSerializer
|
||||
parent_model = Inventory
|
||||
search_fields = ('role_field', 'content_type__model',)
|
||||
|
||||
def get_queryset(self):
|
||||
po = self.get_parent_object()
|
||||
content_type = ContentType.objects.get_for_model(self.parent_model)
|
||||
return Role.objects.filter(content_type=content_type, object_id=po.pk)
|
||||
|
||||
|
||||
class InventoryJobTemplateList(SubListAPIView):
|
||||
|
||||
model = JobTemplate
|
||||
serializer_class = JobTemplateSerializer
|
||||
parent_model = Inventory
|
||||
relationship = 'jobtemplates'
|
||||
|
||||
def get_queryset(self):
|
||||
parent = self.get_parent_object()
|
||||
self.check_parent_access(parent)
|
||||
qs = self.request.user.get_queryset(self.model)
|
||||
return qs.filter(inventory=parent)
|
||||
|
||||
|
||||
class InventoryCopy(CopyAPIView):
|
||||
|
||||
model = Inventory
|
||||
copy_return_serializer_class = InventorySerializer
|
||||
|
||||
|
||||
class HostRelatedSearchMixin(object):
|
||||
|
||||
@property
|
||||
@@ -2130,6 +1544,7 @@ class HostFactVersionsList(SystemTrackingEnforcementMixin, ParentMixin, ListAPIV
|
||||
serializer_class = FactVersionSerializer
|
||||
parent_model = Host
|
||||
search_fields = ('facts',)
|
||||
deprecated = True
|
||||
|
||||
def get_queryset(self):
|
||||
from_spec = self.request.query_params.get('from', None)
|
||||
@@ -2155,6 +1570,7 @@ class HostFactCompareView(SystemTrackingEnforcementMixin, SubDetailAPIView):
|
||||
model = Fact
|
||||
parent_model = Host
|
||||
serializer_class = FactSerializer
|
||||
deprecated = True
|
||||
|
||||
def retrieve(self, request, *args, **kwargs):
|
||||
datetime_spec = request.query_params.get('datetime', None)
|
||||
@@ -2175,12 +1591,20 @@ class HostInsights(GenericAPIView):
|
||||
serializer_class = EmptySerializer
|
||||
|
||||
def _extract_insights_creds(self, credential):
|
||||
return (credential.inputs['username'], decrypt_field(credential, 'password'))
|
||||
return (credential.get_input('username', default=''), credential.get_input('password', default=''))
|
||||
|
||||
def _get_insights(self, url, username, password):
|
||||
session = requests.Session()
|
||||
session.auth = requests.auth.HTTPBasicAuth(username, password)
|
||||
headers = {'Content-Type': 'application/json'}
|
||||
license = get_license(show_key=False).get('license_type', 'UNLICENSED')
|
||||
headers = {
|
||||
'Content-Type': 'application/json',
|
||||
'User-Agent': '{} {} ({})'.format(
|
||||
'AWX' if license == 'open' else 'Red Hat Ansible Tower',
|
||||
get_awx_version(),
|
||||
license
|
||||
)
|
||||
}
|
||||
return session.get(url, headers=headers, timeout=120)
|
||||
|
||||
def get_insights(self, url, username, password):
|
||||
@@ -2643,6 +2067,14 @@ class InventorySourceHostsList(HostRelatedSearchMixin, SubListDestroyAPIView):
|
||||
relationship = 'hosts'
|
||||
check_sub_obj_permission = False
|
||||
|
||||
def perform_list_destroy(self, instance_list):
|
||||
# Activity stream doesn't record disassociation here anyway
|
||||
# no signals-related reason to not bulk-delete
|
||||
Host.groups.through.objects.filter(
|
||||
host__inventory_sources=self.get_parent_object()
|
||||
).delete()
|
||||
return super(InventorySourceHostsList, self).perform_list_destroy(instance_list)
|
||||
|
||||
|
||||
class InventorySourceGroupsList(SubListDestroyAPIView):
|
||||
|
||||
@@ -2652,6 +2084,13 @@ class InventorySourceGroupsList(SubListDestroyAPIView):
|
||||
relationship = 'groups'
|
||||
check_sub_obj_permission = False
|
||||
|
||||
def perform_list_destroy(self, instance_list):
|
||||
# Same arguments for bulk delete as with host list
|
||||
Group.hosts.through.objects.filter(
|
||||
group__inventory_sources=self.get_parent_object()
|
||||
).delete()
|
||||
return super(InventorySourceGroupsList, self).perform_list_destroy(instance_list)
|
||||
|
||||
|
||||
class InventorySourceUpdatesList(SubListAPIView):
|
||||
|
||||
@@ -2868,7 +2307,7 @@ class JobTemplateLaunch(RetrieveAPIView):
|
||||
raise ParseError({key: [msg], 'credentials': [msg]})
|
||||
|
||||
# add the deprecated credential specified in the request
|
||||
if not isinstance(prompted_value, Iterable) or isinstance(prompted_value, basestring):
|
||||
if not isinstance(prompted_value, Iterable) or isinstance(prompted_value, str):
|
||||
prompted_value = [prompted_value]
|
||||
|
||||
# If user gave extra_credentials, special case to use exactly
|
||||
@@ -3515,35 +2954,29 @@ class WorkflowJobTemplateNodeChildrenBaseList(WorkflowsEnforcementMixin, Enforce
|
||||
if created:
|
||||
return None
|
||||
|
||||
workflow_nodes = parent.workflow_job_template.workflow_job_template_nodes.all().\
|
||||
prefetch_related('success_nodes', 'failure_nodes', 'always_nodes')
|
||||
graph = {}
|
||||
for workflow_node in workflow_nodes:
|
||||
graph[workflow_node.pk] = dict(node_object=workflow_node, metadata={'parent': None, 'traversed': False})
|
||||
if parent.id == sub.id:
|
||||
return {"Error": _("Cycle detected.")}
|
||||
|
||||
find = False
|
||||
for node_type in ['success_nodes', 'failure_nodes', 'always_nodes']:
|
||||
for workflow_node in workflow_nodes:
|
||||
parent_node = graph[workflow_node.pk]
|
||||
related_nodes = getattr(parent_node['node_object'], node_type).all()
|
||||
for related_node in related_nodes:
|
||||
sub_node = graph[related_node.pk]
|
||||
sub_node['metadata']['parent'] = parent_node
|
||||
if not find and parent == workflow_node and sub == related_node and self.relationship == node_type:
|
||||
find = True
|
||||
if not find:
|
||||
sub_node = graph[sub.pk]
|
||||
parent_node = graph[parent.pk]
|
||||
if sub_node['metadata']['parent'] is not None:
|
||||
return {"Error": _("Multiple parent relationship not allowed.")}
|
||||
sub_node['metadata']['parent'] = parent_node
|
||||
iter_node = sub_node
|
||||
while iter_node is not None:
|
||||
if iter_node['metadata']['traversed']:
|
||||
return {"Error": _("Cycle detected.")}
|
||||
iter_node['metadata']['traversed'] = True
|
||||
iter_node = iter_node['metadata']['parent']
|
||||
'''
|
||||
Look for parent->child connection in all relationships except the relationship that is
|
||||
attempting to be added; because it's ok to re-add the relationship
|
||||
'''
|
||||
relationships = ['success_nodes', 'failure_nodes', 'always_nodes']
|
||||
relationships.remove(self.relationship)
|
||||
qs = functools.reduce(lambda x, y: (x | y),
|
||||
(Q(**{'{}__in'.format(rel): [sub.id]}) for rel in relationships))
|
||||
|
||||
if WorkflowJobTemplateNode.objects.filter(Q(pk=parent.id) & qs).exists():
|
||||
return {"Error": _("Relationship not allowed.")}
|
||||
|
||||
parent_node_type_relationship = getattr(parent, self.relationship)
|
||||
parent_node_type_relationship.add(sub)
|
||||
|
||||
graph = WorkflowDAG(parent.workflow_job_template)
|
||||
if graph.has_cycle():
|
||||
parent_node_type_relationship.remove(sub)
|
||||
return {"Error": _("Cycle detected.")}
|
||||
parent_node_type_relationship.remove(sub)
|
||||
return None
|
||||
|
||||
|
||||
@@ -3671,23 +3104,31 @@ class WorkflowJobTemplateLaunch(WorkflowsEnforcementMixin, RetrieveAPIView):
|
||||
extra_vars.setdefault(v, u'')
|
||||
if extra_vars:
|
||||
data['extra_vars'] = extra_vars
|
||||
if obj.ask_inventory_on_launch:
|
||||
data['inventory'] = obj.inventory_id
|
||||
else:
|
||||
data.pop('inventory', None)
|
||||
return data
|
||||
|
||||
def post(self, request, *args, **kwargs):
|
||||
obj = self.get_object()
|
||||
|
||||
if 'inventory_id' in request.data:
|
||||
request.data['inventory'] = request.data['inventory_id']
|
||||
|
||||
serializer = self.serializer_class(instance=obj, data=request.data)
|
||||
if not serializer.is_valid():
|
||||
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
prompted_fields, ignored_fields, errors = obj._accept_or_ignore_job_kwargs(**request.data)
|
||||
if not request.user.can_access(JobLaunchConfig, 'add', serializer.validated_data, template=obj):
|
||||
raise PermissionDenied()
|
||||
|
||||
new_job = obj.create_unified_job(**prompted_fields)
|
||||
new_job = obj.create_unified_job(**serializer.validated_data)
|
||||
new_job.signal_start()
|
||||
|
||||
data = OrderedDict()
|
||||
data['workflow_job'] = new_job.id
|
||||
data['ignored_fields'] = ignored_fields
|
||||
data['ignored_fields'] = serializer._ignored_fields
|
||||
data.update(WorkflowJobSerializer(new_job, context=self.get_serializer_context()).to_representation(new_job))
|
||||
headers = {'Location': new_job.get_absolute_url(request)}
|
||||
return Response(data, status=status.HTTP_201_CREATED, headers=headers)
|
||||
@@ -3711,8 +3152,12 @@ class WorkflowJobRelaunch(WorkflowsEnforcementMixin, GenericAPIView):
|
||||
|
||||
def post(self, request, *args, **kwargs):
|
||||
obj = self.get_object()
|
||||
if obj.is_sliced_job and not obj.job_template_id:
|
||||
raise ParseError(_('Cannot relaunch slice workflow job orphaned from job template.'))
|
||||
if obj.is_sliced_job:
|
||||
jt = obj.job_template
|
||||
if not jt:
|
||||
raise ParseError(_('Cannot relaunch slice workflow job orphaned from job template.'))
|
||||
elif not jt.inventory or min(jt.inventory.hosts.count(), jt.job_slice_count) != obj.workflow_nodes.count():
|
||||
raise ParseError(_('Cannot relaunch sliced workflow job after slice count has changed.'))
|
||||
new_workflow_job = obj.create_relaunch_workflow_job()
|
||||
new_workflow_job.signal_start()
|
||||
|
||||
@@ -3849,8 +3294,7 @@ class WorkflowJobCancel(WorkflowsEnforcementMixin, RetrieveAPIView):
|
||||
obj = self.get_object()
|
||||
if obj.can_cancel:
|
||||
obj.cancel()
|
||||
#TODO: Figure out whether an immediate schedule is needed.
|
||||
run_job_complete.delay(obj.id)
|
||||
schedule_task_manager()
|
||||
return Response(status=status.HTTP_202_ACCEPTED)
|
||||
else:
|
||||
return self.http_method_not_allowed(request, *args, **kwargs)
|
||||
@@ -4175,6 +3619,11 @@ class JobRelaunch(RetrieveAPIView):
|
||||
'Cannot relaunch because previous job had 0 {status_value} hosts.'
|
||||
).format(status_value=retry_hosts)}, status=status.HTTP_400_BAD_REQUEST)
|
||||
copy_kwargs['limit'] = ','.join(retry_host_list)
|
||||
limit_length = len(copy_kwargs['limit'])
|
||||
if limit_length > 1024:
|
||||
return Response({'limit': _(
|
||||
'Cannot relaunch because the limit length {limit_length} exceeds the max of {limit_max}.'
|
||||
).format(limit_length=limit_length, limit_max=1024)}, status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
new_job = obj.copy_unified_job(**copy_kwargs)
|
||||
result = new_job.signal_start(**serializer.validated_data['credential_passwords'])
|
||||
@@ -5009,7 +4458,7 @@ class RoleChildrenList(SubListAPIView):
|
||||
# in URL patterns and reverse URL lookups, converting CamelCase names to
|
||||
# lowercase_with_underscore (e.g. MyView.as_view() becomes my_view).
|
||||
this_module = sys.modules[__name__]
|
||||
for attr, value in locals().items():
|
||||
for attr, value in list(locals().items()):
|
||||
if isinstance(value, type) and issubclass(value, APIView):
|
||||
name = camelcase_to_underscore(attr)
|
||||
view = value.as_view()
|
||||
|
||||
211
awx/api/views/inventory.py
Normal file
211
awx/api/views/inventory.py
Normal file
@@ -0,0 +1,211 @@
|
||||
# Copyright (c) 2018 Red Hat, Inc.
|
||||
# All Rights Reserved.
|
||||
|
||||
# Python
|
||||
import logging
|
||||
|
||||
# Django
|
||||
from django.conf import settings
|
||||
from django.db.models import Q
|
||||
from django.contrib.contenttypes.models import ContentType
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
|
||||
# Django REST Framework
|
||||
from rest_framework.exceptions import PermissionDenied
|
||||
from rest_framework.response import Response
|
||||
from rest_framework import status
|
||||
|
||||
# AWX
|
||||
from awx.main.models import (
|
||||
ActivityStream,
|
||||
Inventory,
|
||||
JobTemplate,
|
||||
Role,
|
||||
User,
|
||||
InstanceGroup,
|
||||
InventoryUpdateEvent,
|
||||
InventoryUpdate,
|
||||
InventorySource,
|
||||
CustomInventoryScript,
|
||||
)
|
||||
from awx.api.generics import (
|
||||
ListCreateAPIView,
|
||||
RetrieveUpdateDestroyAPIView,
|
||||
SubListAPIView,
|
||||
SubListAttachDetachAPIView,
|
||||
ResourceAccessList,
|
||||
CopyAPIView,
|
||||
)
|
||||
|
||||
from awx.api.serializers import (
|
||||
InventorySerializer,
|
||||
ActivityStreamSerializer,
|
||||
RoleSerializer,
|
||||
InstanceGroupSerializer,
|
||||
InventoryUpdateEventSerializer,
|
||||
CustomInventoryScriptSerializer,
|
||||
InventoryDetailSerializer,
|
||||
JobTemplateSerializer,
|
||||
)
|
||||
from awx.api.views.mixin import (
|
||||
ActivityStreamEnforcementMixin,
|
||||
RelatedJobsPreventDeleteMixin,
|
||||
ControlledByScmMixin,
|
||||
)
|
||||
|
||||
logger = logging.getLogger('awx.api.views.organization')
|
||||
|
||||
|
||||
class InventoryUpdateEventsList(SubListAPIView):
|
||||
|
||||
model = InventoryUpdateEvent
|
||||
serializer_class = InventoryUpdateEventSerializer
|
||||
parent_model = InventoryUpdate
|
||||
relationship = 'inventory_update_events'
|
||||
view_name = _('Inventory Update Events List')
|
||||
search_fields = ('stdout',)
|
||||
|
||||
def finalize_response(self, request, response, *args, **kwargs):
|
||||
response['X-UI-Max-Events'] = settings.MAX_UI_JOB_EVENTS
|
||||
return super(InventoryUpdateEventsList, self).finalize_response(request, response, *args, **kwargs)
|
||||
|
||||
|
||||
class InventoryScriptList(ListCreateAPIView):
|
||||
|
||||
model = CustomInventoryScript
|
||||
serializer_class = CustomInventoryScriptSerializer
|
||||
|
||||
|
||||
class InventoryScriptDetail(RetrieveUpdateDestroyAPIView):
|
||||
|
||||
model = CustomInventoryScript
|
||||
serializer_class = CustomInventoryScriptSerializer
|
||||
|
||||
def destroy(self, request, *args, **kwargs):
|
||||
instance = self.get_object()
|
||||
can_delete = request.user.can_access(self.model, 'delete', instance)
|
||||
if not can_delete:
|
||||
raise PermissionDenied(_("Cannot delete inventory script."))
|
||||
for inv_src in InventorySource.objects.filter(source_script=instance):
|
||||
inv_src.source_script = None
|
||||
inv_src.save()
|
||||
return super(InventoryScriptDetail, self).destroy(request, *args, **kwargs)
|
||||
|
||||
|
||||
class InventoryScriptObjectRolesList(SubListAPIView):
|
||||
|
||||
model = Role
|
||||
serializer_class = RoleSerializer
|
||||
parent_model = CustomInventoryScript
|
||||
search_fields = ('role_field', 'content_type__model',)
|
||||
|
||||
def get_queryset(self):
|
||||
po = self.get_parent_object()
|
||||
content_type = ContentType.objects.get_for_model(self.parent_model)
|
||||
return Role.objects.filter(content_type=content_type, object_id=po.pk)
|
||||
|
||||
|
||||
class InventoryScriptCopy(CopyAPIView):
|
||||
|
||||
model = CustomInventoryScript
|
||||
copy_return_serializer_class = CustomInventoryScriptSerializer
|
||||
|
||||
|
||||
class InventoryList(ListCreateAPIView):
|
||||
|
||||
model = Inventory
|
||||
serializer_class = InventorySerializer
|
||||
|
||||
def get_queryset(self):
|
||||
qs = Inventory.accessible_objects(self.request.user, 'read_role')
|
||||
qs = qs.select_related('admin_role', 'read_role', 'update_role', 'use_role', 'adhoc_role')
|
||||
qs = qs.prefetch_related('created_by', 'modified_by', 'organization')
|
||||
return qs
|
||||
|
||||
|
||||
class InventoryDetail(RelatedJobsPreventDeleteMixin, ControlledByScmMixin, RetrieveUpdateDestroyAPIView):
|
||||
|
||||
model = Inventory
|
||||
serializer_class = InventoryDetailSerializer
|
||||
|
||||
def update(self, request, *args, **kwargs):
|
||||
obj = self.get_object()
|
||||
kind = self.request.data.get('kind') or kwargs.get('kind')
|
||||
|
||||
# Do not allow changes to an Inventory kind.
|
||||
if kind is not None and obj.kind != kind:
|
||||
return self.http_method_not_allowed(request, *args, **kwargs)
|
||||
return super(InventoryDetail, self).update(request, *args, **kwargs)
|
||||
|
||||
def destroy(self, request, *args, **kwargs):
|
||||
obj = self.get_object()
|
||||
if not request.user.can_access(self.model, 'delete', obj):
|
||||
raise PermissionDenied()
|
||||
self.check_related_active_jobs(obj) # related jobs mixin
|
||||
try:
|
||||
obj.schedule_deletion(getattr(request.user, 'id', None))
|
||||
return Response(status=status.HTTP_202_ACCEPTED)
|
||||
except RuntimeError as e:
|
||||
return Response(dict(error=_("{0}".format(e))), status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
|
||||
class InventoryActivityStreamList(ActivityStreamEnforcementMixin, SubListAPIView):
|
||||
|
||||
model = ActivityStream
|
||||
serializer_class = ActivityStreamSerializer
|
||||
parent_model = Inventory
|
||||
relationship = 'activitystream_set'
|
||||
search_fields = ('changes',)
|
||||
|
||||
def get_queryset(self):
|
||||
parent = self.get_parent_object()
|
||||
self.check_parent_access(parent)
|
||||
qs = self.request.user.get_queryset(self.model)
|
||||
return qs.filter(Q(inventory=parent) | Q(host__in=parent.hosts.all()) | Q(group__in=parent.groups.all()))
|
||||
|
||||
|
||||
class InventoryInstanceGroupsList(SubListAttachDetachAPIView):
|
||||
|
||||
model = InstanceGroup
|
||||
serializer_class = InstanceGroupSerializer
|
||||
parent_model = Inventory
|
||||
relationship = 'instance_groups'
|
||||
|
||||
|
||||
class InventoryAccessList(ResourceAccessList):
|
||||
|
||||
model = User # needs to be User for AccessLists's
|
||||
parent_model = Inventory
|
||||
|
||||
|
||||
class InventoryObjectRolesList(SubListAPIView):
|
||||
|
||||
model = Role
|
||||
serializer_class = RoleSerializer
|
||||
parent_model = Inventory
|
||||
search_fields = ('role_field', 'content_type__model',)
|
||||
|
||||
def get_queryset(self):
|
||||
po = self.get_parent_object()
|
||||
content_type = ContentType.objects.get_for_model(self.parent_model)
|
||||
return Role.objects.filter(content_type=content_type, object_id=po.pk)
|
||||
|
||||
|
||||
class InventoryJobTemplateList(SubListAPIView):
|
||||
|
||||
model = JobTemplate
|
||||
serializer_class = JobTemplateSerializer
|
||||
parent_model = Inventory
|
||||
relationship = 'jobtemplates'
|
||||
|
||||
def get_queryset(self):
|
||||
parent = self.get_parent_object()
|
||||
self.check_parent_access(parent)
|
||||
qs = self.request.user.get_queryset(self.model)
|
||||
return qs.filter(inventory=parent)
|
||||
|
||||
|
||||
class InventoryCopy(CopyAPIView):
|
||||
|
||||
model = Inventory
|
||||
copy_return_serializer_class = InventorySerializer
|
||||
@@ -13,12 +13,16 @@ from django.shortcuts import get_object_or_404
|
||||
from django.utils.timezone import now
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
|
||||
from rest_framework.permissions import SAFE_METHODS
|
||||
from rest_framework.exceptions import PermissionDenied
|
||||
from rest_framework.response import Response
|
||||
from rest_framework import status
|
||||
|
||||
from awx.main.constants import ACTIVE_STATES
|
||||
from awx.main.utils import get_object_or_400
|
||||
from awx.main.utils import (
|
||||
get_object_or_400,
|
||||
parse_yaml_or_json,
|
||||
)
|
||||
from awx.main.models.ha import (
|
||||
Instance,
|
||||
InstanceGroup,
|
||||
@@ -273,3 +277,33 @@ class OrganizationCountsMixin(object):
|
||||
full_context['related_field_counts'] = count_context
|
||||
|
||||
return full_context
|
||||
|
||||
|
||||
class ControlledByScmMixin(object):
|
||||
'''
|
||||
Special method to reset SCM inventory commit hash
|
||||
if anything that it manages changes.
|
||||
'''
|
||||
|
||||
def _reset_inv_src_rev(self, obj):
|
||||
if self.request.method in SAFE_METHODS or not obj:
|
||||
return
|
||||
project_following_sources = obj.inventory_sources.filter(
|
||||
update_on_project_update=True, source='scm')
|
||||
if project_following_sources:
|
||||
# Allow inventory changes unrelated to variables
|
||||
if self.model == Inventory and (
|
||||
not self.request or not self.request.data or
|
||||
parse_yaml_or_json(self.request.data.get('variables', '')) == parse_yaml_or_json(obj.variables)):
|
||||
return
|
||||
project_following_sources.update(scm_last_revision='')
|
||||
|
||||
def get_object(self):
|
||||
obj = super(ControlledByScmMixin, self).get_object()
|
||||
self._reset_inv_src_rev(obj)
|
||||
return obj
|
||||
|
||||
def get_parent_object(self):
|
||||
obj = super(ControlledByScmMixin, self).get_parent_object()
|
||||
self._reset_inv_src_rev(obj)
|
||||
return obj
|
||||
|
||||
247
awx/api/views/organization.py
Normal file
247
awx/api/views/organization.py
Normal file
@@ -0,0 +1,247 @@
|
||||
# Copyright (c) 2018 Red Hat, Inc.
|
||||
# All Rights Reserved.
|
||||
|
||||
# Python
|
||||
import logging
|
||||
|
||||
# Django
|
||||
from django.db.models import Count
|
||||
from django.contrib.contenttypes.models import ContentType
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
|
||||
# AWX
|
||||
from awx.conf.license import (
|
||||
feature_enabled,
|
||||
LicenseForbids,
|
||||
)
|
||||
from awx.main.models import (
|
||||
ActivityStream,
|
||||
Inventory,
|
||||
Project,
|
||||
JobTemplate,
|
||||
WorkflowJobTemplate,
|
||||
Organization,
|
||||
NotificationTemplate,
|
||||
Role,
|
||||
User,
|
||||
Team,
|
||||
InstanceGroup,
|
||||
)
|
||||
from awx.api.generics import (
|
||||
ListCreateAPIView,
|
||||
RetrieveUpdateDestroyAPIView,
|
||||
SubListAPIView,
|
||||
SubListCreateAttachDetachAPIView,
|
||||
SubListAttachDetachAPIView,
|
||||
ResourceAccessList,
|
||||
BaseUsersList,
|
||||
)
|
||||
|
||||
from awx.api.serializers import (
|
||||
OrganizationSerializer,
|
||||
InventorySerializer,
|
||||
ProjectSerializer,
|
||||
UserSerializer,
|
||||
TeamSerializer,
|
||||
ActivityStreamSerializer,
|
||||
RoleSerializer,
|
||||
NotificationTemplateSerializer,
|
||||
WorkflowJobTemplateSerializer,
|
||||
InstanceGroupSerializer,
|
||||
)
|
||||
from awx.api.views.mixin import (
|
||||
ActivityStreamEnforcementMixin,
|
||||
RelatedJobsPreventDeleteMixin,
|
||||
OrganizationCountsMixin,
|
||||
)
|
||||
|
||||
logger = logging.getLogger('awx.api.views.organization')
|
||||
|
||||
|
||||
class OrganizationList(OrganizationCountsMixin, ListCreateAPIView):
|
||||
|
||||
model = Organization
|
||||
serializer_class = OrganizationSerializer
|
||||
|
||||
def get_queryset(self):
|
||||
qs = Organization.accessible_objects(self.request.user, 'read_role')
|
||||
qs = qs.select_related('admin_role', 'auditor_role', 'member_role', 'read_role')
|
||||
qs = qs.prefetch_related('created_by', 'modified_by')
|
||||
return qs
|
||||
|
||||
def create(self, request, *args, **kwargs):
|
||||
"""Create a new organzation.
|
||||
|
||||
If there is already an organization and the license of this
|
||||
instance does not permit multiple organizations, then raise
|
||||
LicenseForbids.
|
||||
"""
|
||||
# Sanity check: If the multiple organizations feature is disallowed
|
||||
# by the license, then we are only willing to create this organization
|
||||
# if no organizations exist in the system.
|
||||
if (not feature_enabled('multiple_organizations') and
|
||||
self.model.objects.exists()):
|
||||
raise LicenseForbids(_('Your license only permits a single '
|
||||
'organization to exist.'))
|
||||
|
||||
# Okay, create the organization as usual.
|
||||
return super(OrganizationList, self).create(request, *args, **kwargs)
|
||||
|
||||
|
||||
class OrganizationDetail(RelatedJobsPreventDeleteMixin, RetrieveUpdateDestroyAPIView):
|
||||
|
||||
model = Organization
|
||||
serializer_class = OrganizationSerializer
|
||||
|
||||
def get_serializer_context(self, *args, **kwargs):
|
||||
full_context = super(OrganizationDetail, self).get_serializer_context(*args, **kwargs)
|
||||
|
||||
if not hasattr(self, 'kwargs') or 'pk' not in self.kwargs:
|
||||
return full_context
|
||||
org_id = int(self.kwargs['pk'])
|
||||
|
||||
org_counts = {}
|
||||
access_kwargs = {'accessor': self.request.user, 'role_field': 'read_role'}
|
||||
direct_counts = Organization.objects.filter(id=org_id).annotate(
|
||||
users=Count('member_role__members', distinct=True),
|
||||
admins=Count('admin_role__members', distinct=True)
|
||||
).values('users', 'admins')
|
||||
|
||||
if not direct_counts:
|
||||
return full_context
|
||||
|
||||
org_counts = direct_counts[0]
|
||||
org_counts['inventories'] = Inventory.accessible_objects(**access_kwargs).filter(
|
||||
organization__id=org_id).count()
|
||||
org_counts['teams'] = Team.accessible_objects(**access_kwargs).filter(
|
||||
organization__id=org_id).count()
|
||||
org_counts['projects'] = Project.accessible_objects(**access_kwargs).filter(
|
||||
organization__id=org_id).count()
|
||||
org_counts['job_templates'] = JobTemplate.accessible_objects(**access_kwargs).filter(
|
||||
project__organization__id=org_id).count()
|
||||
|
||||
full_context['related_field_counts'] = {}
|
||||
full_context['related_field_counts'][org_id] = org_counts
|
||||
|
||||
return full_context
|
||||
|
||||
|
||||
class OrganizationInventoriesList(SubListAPIView):
|
||||
|
||||
model = Inventory
|
||||
serializer_class = InventorySerializer
|
||||
parent_model = Organization
|
||||
relationship = 'inventories'
|
||||
|
||||
|
||||
class OrganizationUsersList(BaseUsersList):
|
||||
|
||||
model = User
|
||||
serializer_class = UserSerializer
|
||||
parent_model = Organization
|
||||
relationship = 'member_role.members'
|
||||
|
||||
|
||||
class OrganizationAdminsList(BaseUsersList):
|
||||
|
||||
model = User
|
||||
serializer_class = UserSerializer
|
||||
parent_model = Organization
|
||||
relationship = 'admin_role.members'
|
||||
|
||||
|
||||
class OrganizationProjectsList(SubListCreateAttachDetachAPIView):
|
||||
|
||||
model = Project
|
||||
serializer_class = ProjectSerializer
|
||||
parent_model = Organization
|
||||
relationship = 'projects'
|
||||
parent_key = 'organization'
|
||||
|
||||
|
||||
class OrganizationWorkflowJobTemplatesList(SubListCreateAttachDetachAPIView):
|
||||
|
||||
model = WorkflowJobTemplate
|
||||
serializer_class = WorkflowJobTemplateSerializer
|
||||
parent_model = Organization
|
||||
relationship = 'workflows'
|
||||
parent_key = 'organization'
|
||||
|
||||
|
||||
class OrganizationTeamsList(SubListCreateAttachDetachAPIView):
|
||||
|
||||
model = Team
|
||||
serializer_class = TeamSerializer
|
||||
parent_model = Organization
|
||||
relationship = 'teams'
|
||||
parent_key = 'organization'
|
||||
|
||||
|
||||
class OrganizationActivityStreamList(ActivityStreamEnforcementMixin, SubListAPIView):
|
||||
|
||||
model = ActivityStream
|
||||
serializer_class = ActivityStreamSerializer
|
||||
parent_model = Organization
|
||||
relationship = 'activitystream_set'
|
||||
search_fields = ('changes',)
|
||||
|
||||
|
||||
class OrganizationNotificationTemplatesList(SubListCreateAttachDetachAPIView):
|
||||
|
||||
model = NotificationTemplate
|
||||
serializer_class = NotificationTemplateSerializer
|
||||
parent_model = Organization
|
||||
relationship = 'notification_templates'
|
||||
parent_key = 'organization'
|
||||
|
||||
|
||||
class OrganizationNotificationTemplatesAnyList(SubListCreateAttachDetachAPIView):
|
||||
|
||||
model = NotificationTemplate
|
||||
serializer_class = NotificationTemplateSerializer
|
||||
parent_model = Organization
|
||||
relationship = 'notification_templates_any'
|
||||
|
||||
|
||||
class OrganizationNotificationTemplatesErrorList(SubListCreateAttachDetachAPIView):
|
||||
|
||||
model = NotificationTemplate
|
||||
serializer_class = NotificationTemplateSerializer
|
||||
parent_model = Organization
|
||||
relationship = 'notification_templates_error'
|
||||
|
||||
|
||||
class OrganizationNotificationTemplatesSuccessList(SubListCreateAttachDetachAPIView):
|
||||
|
||||
model = NotificationTemplate
|
||||
serializer_class = NotificationTemplateSerializer
|
||||
parent_model = Organization
|
||||
relationship = 'notification_templates_success'
|
||||
|
||||
|
||||
class OrganizationInstanceGroupsList(SubListAttachDetachAPIView):
|
||||
|
||||
model = InstanceGroup
|
||||
serializer_class = InstanceGroupSerializer
|
||||
parent_model = Organization
|
||||
relationship = 'instance_groups'
|
||||
|
||||
|
||||
class OrganizationAccessList(ResourceAccessList):
|
||||
|
||||
model = User # needs to be User for AccessLists's
|
||||
parent_model = Organization
|
||||
|
||||
|
||||
class OrganizationObjectRolesList(SubListAPIView):
|
||||
|
||||
model = Role
|
||||
serializer_class = RoleSerializer
|
||||
parent_model = Organization
|
||||
search_fields = ('role_field', 'content_type__model',)
|
||||
|
||||
def get_queryset(self):
|
||||
po = self.get_parent_object()
|
||||
content_type = ContentType.objects.get_for_model(self.parent_model)
|
||||
return Role.objects.filter(content_type=content_type, object_id=po.pk)
|
||||
|
||||
279
awx/api/views/root.py
Normal file
279
awx/api/views/root.py
Normal file
@@ -0,0 +1,279 @@
|
||||
# Copyright (c) 2018 Ansible, Inc.
|
||||
# All Rights Reserved.
|
||||
|
||||
import logging
|
||||
import operator
|
||||
import json
|
||||
from collections import OrderedDict
|
||||
|
||||
from django.conf import settings
|
||||
from django.utils.encoding import smart_text
|
||||
from django.utils.decorators import method_decorator
|
||||
from django.views.decorators.csrf import ensure_csrf_cookie
|
||||
from django.template.loader import render_to_string
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
|
||||
from rest_framework.permissions import AllowAny, IsAuthenticated
|
||||
from rest_framework.response import Response
|
||||
from rest_framework import status
|
||||
|
||||
from awx.api.generics import APIView
|
||||
from awx.main.ha import is_ha_environment
|
||||
from awx.main.utils import (
|
||||
get_awx_version,
|
||||
get_ansible_version,
|
||||
get_custom_venv_choices,
|
||||
to_python_boolean,
|
||||
)
|
||||
from awx.api.versioning import reverse, get_request_version, drf_reverse
|
||||
from awx.conf.license import get_license, feature_enabled
|
||||
from awx.main.models import (
|
||||
Project,
|
||||
Organization,
|
||||
Instance,
|
||||
InstanceGroup,
|
||||
JobTemplate,
|
||||
)
|
||||
|
||||
logger = logging.getLogger('awx.api.views.root')
|
||||
|
||||
|
||||
class ApiRootView(APIView):
|
||||
|
||||
permission_classes = (AllowAny,)
|
||||
view_name = _('REST API')
|
||||
versioning_class = None
|
||||
swagger_topic = 'Versioning'
|
||||
|
||||
@method_decorator(ensure_csrf_cookie)
|
||||
def get(self, request, format=None):
|
||||
''' List supported API versions '''
|
||||
|
||||
v1 = reverse('api:api_v1_root_view', kwargs={'version': 'v1'})
|
||||
v2 = reverse('api:api_v2_root_view', kwargs={'version': 'v2'})
|
||||
data = OrderedDict()
|
||||
data['description'] = _('AWX REST API')
|
||||
data['current_version'] = v2
|
||||
data['available_versions'] = dict(v1 = v1, v2 = v2)
|
||||
data['oauth2'] = drf_reverse('api:oauth_authorization_root_view')
|
||||
if feature_enabled('rebranding'):
|
||||
data['custom_logo'] = settings.CUSTOM_LOGO
|
||||
data['custom_login_info'] = settings.CUSTOM_LOGIN_INFO
|
||||
return Response(data)
|
||||
|
||||
|
||||
class ApiOAuthAuthorizationRootView(APIView):
|
||||
|
||||
permission_classes = (AllowAny,)
|
||||
view_name = _("API OAuth 2 Authorization Root")
|
||||
versioning_class = None
|
||||
swagger_topic = 'Authentication'
|
||||
|
||||
def get(self, request, format=None):
|
||||
data = OrderedDict()
|
||||
data['authorize'] = drf_reverse('api:authorize')
|
||||
data['token'] = drf_reverse('api:token')
|
||||
data['revoke_token'] = drf_reverse('api:revoke-token')
|
||||
return Response(data)
|
||||
|
||||
|
||||
class ApiVersionRootView(APIView):
|
||||
|
||||
permission_classes = (AllowAny,)
|
||||
swagger_topic = 'Versioning'
|
||||
|
||||
def get(self, request, format=None):
|
||||
''' List top level resources '''
|
||||
data = OrderedDict()
|
||||
data['ping'] = reverse('api:api_v1_ping_view', request=request)
|
||||
data['instances'] = reverse('api:instance_list', request=request)
|
||||
data['instance_groups'] = reverse('api:instance_group_list', request=request)
|
||||
data['config'] = reverse('api:api_v1_config_view', request=request)
|
||||
data['settings'] = reverse('api:setting_category_list', request=request)
|
||||
data['me'] = reverse('api:user_me_list', request=request)
|
||||
data['dashboard'] = reverse('api:dashboard_view', request=request)
|
||||
data['organizations'] = reverse('api:organization_list', request=request)
|
||||
data['users'] = reverse('api:user_list', request=request)
|
||||
data['projects'] = reverse('api:project_list', request=request)
|
||||
data['project_updates'] = reverse('api:project_update_list', request=request)
|
||||
data['teams'] = reverse('api:team_list', request=request)
|
||||
data['credentials'] = reverse('api:credential_list', request=request)
|
||||
if get_request_version(request) > 1:
|
||||
data['credential_types'] = reverse('api:credential_type_list', request=request)
|
||||
data['applications'] = reverse('api:o_auth2_application_list', request=request)
|
||||
data['tokens'] = reverse('api:o_auth2_token_list', request=request)
|
||||
data['inventory'] = reverse('api:inventory_list', request=request)
|
||||
data['inventory_scripts'] = reverse('api:inventory_script_list', request=request)
|
||||
data['inventory_sources'] = reverse('api:inventory_source_list', request=request)
|
||||
data['inventory_updates'] = reverse('api:inventory_update_list', request=request)
|
||||
data['groups'] = reverse('api:group_list', request=request)
|
||||
data['hosts'] = reverse('api:host_list', request=request)
|
||||
data['job_templates'] = reverse('api:job_template_list', request=request)
|
||||
data['jobs'] = reverse('api:job_list', request=request)
|
||||
data['job_events'] = reverse('api:job_event_list', request=request)
|
||||
data['ad_hoc_commands'] = reverse('api:ad_hoc_command_list', request=request)
|
||||
data['system_job_templates'] = reverse('api:system_job_template_list', request=request)
|
||||
data['system_jobs'] = reverse('api:system_job_list', request=request)
|
||||
data['schedules'] = reverse('api:schedule_list', request=request)
|
||||
data['roles'] = reverse('api:role_list', request=request)
|
||||
data['notification_templates'] = reverse('api:notification_template_list', request=request)
|
||||
data['notifications'] = reverse('api:notification_list', request=request)
|
||||
data['labels'] = reverse('api:label_list', request=request)
|
||||
data['unified_job_templates'] = reverse('api:unified_job_template_list', request=request)
|
||||
data['unified_jobs'] = reverse('api:unified_job_list', request=request)
|
||||
data['activity_stream'] = reverse('api:activity_stream_list', request=request)
|
||||
data['workflow_job_templates'] = reverse('api:workflow_job_template_list', request=request)
|
||||
data['workflow_jobs'] = reverse('api:workflow_job_list', request=request)
|
||||
data['workflow_job_template_nodes'] = reverse('api:workflow_job_template_node_list', request=request)
|
||||
data['workflow_job_nodes'] = reverse('api:workflow_job_node_list', request=request)
|
||||
return Response(data)
|
||||
|
||||
|
||||
class ApiV1RootView(ApiVersionRootView):
|
||||
view_name = _('Version 1')
|
||||
|
||||
|
||||
class ApiV2RootView(ApiVersionRootView):
|
||||
view_name = _('Version 2')
|
||||
|
||||
|
||||
class ApiV1PingView(APIView):
|
||||
"""A simple view that reports very basic information about this
|
||||
instance, which is acceptable to be public information.
|
||||
"""
|
||||
permission_classes = (AllowAny,)
|
||||
authentication_classes = ()
|
||||
view_name = _('Ping')
|
||||
swagger_topic = 'System Configuration'
|
||||
|
||||
def get(self, request, format=None):
|
||||
"""Return some basic information about this instance
|
||||
|
||||
Everything returned here should be considered public / insecure, as
|
||||
this requires no auth and is intended for use by the installer process.
|
||||
"""
|
||||
response = {
|
||||
'ha': is_ha_environment(),
|
||||
'version': get_awx_version(),
|
||||
'active_node': settings.CLUSTER_HOST_ID,
|
||||
}
|
||||
|
||||
response['instances'] = []
|
||||
for instance in Instance.objects.all():
|
||||
response['instances'].append(dict(node=instance.hostname, heartbeat=instance.modified,
|
||||
capacity=instance.capacity, version=instance.version))
|
||||
sorted(response['instances'], key=operator.itemgetter('node'))
|
||||
response['instance_groups'] = []
|
||||
for instance_group in InstanceGroup.objects.all():
|
||||
response['instance_groups'].append(dict(name=instance_group.name,
|
||||
capacity=instance_group.capacity,
|
||||
instances=[x.hostname for x in instance_group.instances.all()]))
|
||||
return Response(response)
|
||||
|
||||
|
||||
class ApiV1ConfigView(APIView):
|
||||
|
||||
permission_classes = (IsAuthenticated,)
|
||||
view_name = _('Configuration')
|
||||
swagger_topic = 'System Configuration'
|
||||
|
||||
def check_permissions(self, request):
|
||||
super(ApiV1ConfigView, self).check_permissions(request)
|
||||
if not request.user.is_superuser and request.method.lower() not in {'options', 'head', 'get'}:
|
||||
self.permission_denied(request) # Raises PermissionDenied exception.
|
||||
|
||||
def get(self, request, format=None):
|
||||
'''Return various sitewide configuration settings'''
|
||||
|
||||
if request.user.is_superuser or request.user.is_system_auditor:
|
||||
license_data = get_license(show_key=True)
|
||||
else:
|
||||
license_data = get_license(show_key=False)
|
||||
if not license_data.get('valid_key', False):
|
||||
license_data = {}
|
||||
if license_data and 'features' in license_data and 'activity_streams' in license_data['features']:
|
||||
# FIXME: Make the final setting value dependent on the feature?
|
||||
license_data['features']['activity_streams'] &= settings.ACTIVITY_STREAM_ENABLED
|
||||
|
||||
pendo_state = settings.PENDO_TRACKING_STATE if settings.PENDO_TRACKING_STATE in ('off', 'anonymous', 'detailed') else 'off'
|
||||
|
||||
data = dict(
|
||||
time_zone=settings.TIME_ZONE,
|
||||
license_info=license_data,
|
||||
version=get_awx_version(),
|
||||
ansible_version=get_ansible_version(),
|
||||
eula=render_to_string("eula.md") if license_data.get('license_type', 'UNLICENSED') != 'open' else '',
|
||||
analytics_status=pendo_state
|
||||
)
|
||||
|
||||
# If LDAP is enabled, user_ldap_fields will return a list of field
|
||||
# names that are managed by LDAP and should be read-only for users with
|
||||
# a non-empty ldap_dn attribute.
|
||||
if getattr(settings, 'AUTH_LDAP_SERVER_URI', None) and feature_enabled('ldap'):
|
||||
user_ldap_fields = ['username', 'password']
|
||||
user_ldap_fields.extend(getattr(settings, 'AUTH_LDAP_USER_ATTR_MAP', {}).keys())
|
||||
user_ldap_fields.extend(getattr(settings, 'AUTH_LDAP_USER_FLAGS_BY_GROUP', {}).keys())
|
||||
data['user_ldap_fields'] = user_ldap_fields
|
||||
|
||||
if request.user.is_superuser \
|
||||
or request.user.is_system_auditor \
|
||||
or Organization.accessible_objects(request.user, 'admin_role').exists() \
|
||||
or Organization.accessible_objects(request.user, 'auditor_role').exists():
|
||||
data.update(dict(
|
||||
project_base_dir = settings.PROJECTS_ROOT,
|
||||
project_local_paths = Project.get_local_path_choices(),
|
||||
custom_virtualenvs = get_custom_venv_choices()
|
||||
))
|
||||
elif JobTemplate.accessible_objects(request.user, 'admin_role').exists():
|
||||
data['custom_virtualenvs'] = get_custom_venv_choices()
|
||||
|
||||
return Response(data)
|
||||
|
||||
def post(self, request):
|
||||
if not isinstance(request.data, dict):
|
||||
return Response({"error": _("Invalid license data")}, status=status.HTTP_400_BAD_REQUEST)
|
||||
if "eula_accepted" not in request.data:
|
||||
return Response({"error": _("Missing 'eula_accepted' property")}, status=status.HTTP_400_BAD_REQUEST)
|
||||
try:
|
||||
eula_accepted = to_python_boolean(request.data["eula_accepted"])
|
||||
except ValueError:
|
||||
return Response({"error": _("'eula_accepted' value is invalid")}, status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
if not eula_accepted:
|
||||
return Response({"error": _("'eula_accepted' must be True")}, status=status.HTTP_400_BAD_REQUEST)
|
||||
request.data.pop("eula_accepted")
|
||||
try:
|
||||
data_actual = json.dumps(request.data)
|
||||
except Exception:
|
||||
logger.info(smart_text(u"Invalid JSON submitted for license."),
|
||||
extra=dict(actor=request.user.username))
|
||||
return Response({"error": _("Invalid JSON")}, status=status.HTTP_400_BAD_REQUEST)
|
||||
try:
|
||||
from awx.main.utils.common import get_licenser
|
||||
license_data = json.loads(data_actual)
|
||||
license_data_validated = get_licenser(**license_data).validate()
|
||||
except Exception:
|
||||
logger.warning(smart_text(u"Invalid license submitted."),
|
||||
extra=dict(actor=request.user.username))
|
||||
return Response({"error": _("Invalid License")}, status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
# If the license is valid, write it to the database.
|
||||
if license_data_validated['valid_key']:
|
||||
settings.LICENSE = license_data
|
||||
settings.TOWER_URL_BASE = "{}://{}".format(request.scheme, request.get_host())
|
||||
return Response(license_data_validated)
|
||||
|
||||
logger.warning(smart_text(u"Invalid license submitted."),
|
||||
extra=dict(actor=request.user.username))
|
||||
return Response({"error": _("Invalid license")}, status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
def delete(self, request):
|
||||
try:
|
||||
settings.LICENSE = {}
|
||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||
except Exception:
|
||||
# FIX: Log
|
||||
return Response({"error": _("Failed to remove license.")}, status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
# Python
|
||||
import logging
|
||||
import urlparse
|
||||
import urllib.parse as urlparse
|
||||
from collections import OrderedDict
|
||||
|
||||
# Django
|
||||
@@ -71,7 +71,7 @@ class StringListBooleanField(ListField):
|
||||
return False
|
||||
elif value in NullBooleanField.NULL_VALUES:
|
||||
return None
|
||||
elif isinstance(value, basestring):
|
||||
elif isinstance(value, str):
|
||||
return self.child.to_representation(value)
|
||||
except TypeError:
|
||||
pass
|
||||
@@ -88,7 +88,7 @@ class StringListBooleanField(ListField):
|
||||
return False
|
||||
elif data in NullBooleanField.NULL_VALUES:
|
||||
return None
|
||||
elif isinstance(data, basestring):
|
||||
elif isinstance(data, str):
|
||||
return self.child.run_validation(data)
|
||||
except TypeError:
|
||||
pass
|
||||
|
||||
@@ -460,10 +460,10 @@ class Command(BaseCommand):
|
||||
elif file_to_comment not in to_comment_patterns:
|
||||
to_comment_patterns.append(file_to_comment)
|
||||
# Run once in dry-run mode to catch any errors from updating the files.
|
||||
diffs = comment_assignments(to_comment_patterns, to_comment.keys(), dry_run=True, backup_suffix=self.backup_suffix)
|
||||
diffs = comment_assignments(to_comment_patterns, list(to_comment.keys()), dry_run=True, backup_suffix=self.backup_suffix)
|
||||
# Then, if really updating, run again.
|
||||
if not self.dry_run and not self.no_comment:
|
||||
diffs = comment_assignments(to_comment_patterns, to_comment.keys(), dry_run=False, backup_suffix=self.backup_suffix)
|
||||
diffs = comment_assignments(to_comment_patterns, list(to_comment.keys()), dry_run=False, backup_suffix=self.backup_suffix)
|
||||
if license_file_to_comment:
|
||||
diffs.extend(self._comment_license_file(dry_run=False))
|
||||
if local_settings_file_to_comment:
|
||||
|
||||
18
awx/conf/migrations/0006_v331_ldap_group_type.py
Normal file
18
awx/conf/migrations/0006_v331_ldap_group_type.py
Normal file
@@ -0,0 +1,18 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
from __future__ import unicode_literals
|
||||
|
||||
# AWX
|
||||
from awx.conf.migrations._ldap_group_type import fill_ldap_group_type_params
|
||||
|
||||
from django.db import migrations
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('conf', '0005_v330_rename_two_session_settings'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RunPython(fill_ldap_group_type_params),
|
||||
]
|
||||
30
awx/conf/migrations/_ldap_group_type.py
Normal file
30
awx/conf/migrations/_ldap_group_type.py
Normal file
@@ -0,0 +1,30 @@
|
||||
|
||||
import inspect
|
||||
|
||||
from django.conf import settings
|
||||
from django.utils.timezone import now
|
||||
|
||||
|
||||
def fill_ldap_group_type_params(apps, schema_editor):
|
||||
group_type = settings.AUTH_LDAP_GROUP_TYPE
|
||||
Setting = apps.get_model('conf', 'Setting')
|
||||
|
||||
group_type_params = {'name_attr': 'cn', 'member_attr': 'member'}
|
||||
qs = Setting.objects.filter(key='AUTH_LDAP_GROUP_TYPE_PARAMS')
|
||||
entry = None
|
||||
if qs.exists():
|
||||
entry = qs[0]
|
||||
group_type_params = entry.value
|
||||
else:
|
||||
entry = Setting(key='AUTH_LDAP_GROUP_TYPE_PARAMS',
|
||||
value=group_type_params,
|
||||
created=now(),
|
||||
modified=now())
|
||||
|
||||
init_attrs = set(inspect.getargspec(group_type.__init__).args[1:])
|
||||
for k in group_type_params.keys():
|
||||
if k not in init_attrs:
|
||||
del group_type_params[k]
|
||||
|
||||
entry.value = group_type_params
|
||||
entry.save()
|
||||
@@ -33,7 +33,7 @@ class Setting(CreatedModifiedModel):
|
||||
on_delete=models.CASCADE,
|
||||
))
|
||||
|
||||
def __unicode__(self):
|
||||
def __str__(self):
|
||||
try:
|
||||
json_value = json.dumps(self.value)
|
||||
except ValueError:
|
||||
|
||||
@@ -6,18 +6,17 @@ import re
|
||||
import sys
|
||||
import threading
|
||||
import time
|
||||
import StringIO
|
||||
import traceback
|
||||
import urllib
|
||||
|
||||
import six
|
||||
import urllib.parse
|
||||
from io import StringIO
|
||||
|
||||
# Django
|
||||
from django.conf import LazySettings
|
||||
from django.conf import settings, UserSettingsHolder
|
||||
from django.core.cache import cache as django_cache
|
||||
from django.core.exceptions import ImproperlyConfigured
|
||||
from django.db import ProgrammingError, OperationalError, transaction, connection
|
||||
from django.db import transaction, connection
|
||||
from django.db.utils import Error as DBError
|
||||
from django.utils.functional import cached_property
|
||||
|
||||
# Django REST Framework
|
||||
@@ -67,7 +66,7 @@ def normalize_broker_url(value):
|
||||
match = re.search('(amqp://[^:]+:)(.*)', parts[0])
|
||||
if match:
|
||||
prefix, password = match.group(1), match.group(2)
|
||||
parts[0] = prefix + urllib.quote(password)
|
||||
parts[0] = prefix + urllib.parse.quote(password)
|
||||
return '@'.join(parts)
|
||||
|
||||
|
||||
@@ -90,21 +89,21 @@ def _ctit_db_wrapper(trans_safe=False):
|
||||
logger.debug('Obtaining database settings in spite of broken transaction.')
|
||||
transaction.set_rollback(False)
|
||||
yield
|
||||
except (ProgrammingError, OperationalError):
|
||||
except DBError:
|
||||
if 'migrate' in sys.argv and get_tower_migration_version() < '310':
|
||||
logger.info('Using default settings until version 3.1 migration.')
|
||||
else:
|
||||
# We want the _full_ traceback with the context
|
||||
# First we get the current call stack, which constitutes the "top",
|
||||
# it has the context up to the point where the context manager is used
|
||||
top_stack = StringIO.StringIO()
|
||||
top_stack = StringIO()
|
||||
traceback.print_stack(file=top_stack)
|
||||
top_lines = top_stack.getvalue().strip('\n').split('\n')
|
||||
top_stack.close()
|
||||
# Get "bottom" stack from the local error that happened
|
||||
# inside of the "with" block this wraps
|
||||
exc_type, exc_value, exc_traceback = sys.exc_info()
|
||||
bottom_stack = StringIO.StringIO()
|
||||
bottom_stack = StringIO()
|
||||
traceback.print_tb(exc_traceback, file=bottom_stack)
|
||||
bottom_lines = bottom_stack.getvalue().strip('\n').split('\n')
|
||||
# Glue together top and bottom where overlap is found
|
||||
@@ -168,15 +167,6 @@ class EncryptedCacheProxy(object):
|
||||
def get(self, key, **kwargs):
|
||||
value = self.cache.get(key, **kwargs)
|
||||
value = self._handle_encryption(self.decrypter, key, value)
|
||||
|
||||
# python-memcached auto-encodes unicode on cache set in python2
|
||||
# https://github.com/linsomniac/python-memcached/issues/79
|
||||
# https://github.com/linsomniac/python-memcached/blob/288c159720eebcdf667727a859ef341f1e908308/memcache.py#L961
|
||||
if six.PY2 and isinstance(value, six.binary_type):
|
||||
try:
|
||||
six.text_type(value)
|
||||
except UnicodeDecodeError:
|
||||
value = value.decode('utf-8')
|
||||
logger.debug('cache get(%r, %r) -> %r', key, empty, filter_sensitive(self.registry, key, value))
|
||||
return value
|
||||
|
||||
|
||||
@@ -9,15 +9,11 @@ from django.core.cache import cache
|
||||
from django.dispatch import receiver
|
||||
|
||||
# Tower
|
||||
import awx.main.signals
|
||||
from awx.conf import settings_registry
|
||||
from awx.conf.models import Setting
|
||||
from awx.conf.serializers import SettingSerializer
|
||||
|
||||
logger = logging.getLogger('awx.conf.signals')
|
||||
|
||||
awx.main.signals.model_serializer_mapping[Setting] = SettingSerializer
|
||||
|
||||
__all__ = []
|
||||
|
||||
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import pytest
|
||||
import mock
|
||||
from unittest import mock
|
||||
|
||||
from rest_framework import serializers
|
||||
|
||||
|
||||
@@ -1,38 +0,0 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
# Copyright (c) 2017 Ansible, Inc.
|
||||
# All Rights Reserved.
|
||||
import pytest
|
||||
import mock
|
||||
|
||||
from django.apps import apps
|
||||
from awx.conf.migrations._reencrypt import (
|
||||
replace_aesecb_fernet,
|
||||
encrypt_field,
|
||||
decrypt_field,
|
||||
)
|
||||
from awx.conf.settings import Setting
|
||||
from awx.main.utils import decrypt_field as new_decrypt_field
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@pytest.mark.parametrize("old_enc, new_enc, value", [
|
||||
('$encrypted$UTF8$AES', '$encrypted$UTF8$AESCBC$', u'Iñtërnâtiônàlizætiøn'),
|
||||
('$encrypted$AES$', '$encrypted$AESCBC$', 'test'),
|
||||
])
|
||||
def test_settings(old_enc, new_enc, value):
|
||||
with mock.patch('awx.conf.models.encrypt_field', encrypt_field):
|
||||
with mock.patch('awx.conf.settings.decrypt_field', decrypt_field):
|
||||
setting = Setting.objects.create(key='SOCIAL_AUTH_GITHUB_SECRET', value=value)
|
||||
assert setting.value.startswith(old_enc)
|
||||
|
||||
replace_aesecb_fernet(apps, None)
|
||||
setting.refresh_from_db()
|
||||
|
||||
assert setting.value.startswith(new_enc)
|
||||
assert new_decrypt_field(setting, 'value') == value
|
||||
|
||||
# This is here for a side-effect.
|
||||
# Exception if the encryption type of AESCBC is not properly skipped, ensures
|
||||
# our `startswith` calls don't have typos
|
||||
replace_aesecb_fernet(apps, None)
|
||||
@@ -4,6 +4,7 @@
|
||||
# All Rights Reserved.
|
||||
|
||||
from contextlib import contextmanager
|
||||
import codecs
|
||||
from uuid import uuid4
|
||||
import time
|
||||
|
||||
@@ -67,7 +68,7 @@ def test_cached_settings_unicode_is_auto_decoded(settings):
|
||||
# https://github.com/linsomniac/python-memcached/issues/79
|
||||
# https://github.com/linsomniac/python-memcached/blob/288c159720eebcdf667727a859ef341f1e908308/memcache.py#L961
|
||||
|
||||
value = six.u('Iñtërnâtiônàlizætiøn').encode('utf-8') # this simulates what python-memcached does on cache.set()
|
||||
value = 'Iñtërnâtiônàlizætiøn' # this simulates what python-memcached does on cache.set()
|
||||
settings.cache.set('DEBUG', value)
|
||||
assert settings.cache.get('DEBUG') == six.u('Iñtërnâtiônàlizætiøn')
|
||||
|
||||
@@ -262,7 +263,7 @@ def test_setting_from_db_with_unicode(settings, mocker, encrypted):
|
||||
encrypted=encrypted
|
||||
)
|
||||
# this simulates a bug in python-memcached; see https://github.com/linsomniac/python-memcached/issues/79
|
||||
value = six.u('Iñtërnâtiônàlizætiøn').encode('utf-8')
|
||||
value = 'Iñtërnâtiônàlizætiøn'
|
||||
|
||||
setting_from_db = mocker.Mock(id=1, key='AWX_SOME_SETTING', value=value)
|
||||
mocks = mocker.Mock(**{
|
||||
@@ -272,8 +273,8 @@ def test_setting_from_db_with_unicode(settings, mocker, encrypted):
|
||||
}),
|
||||
})
|
||||
with mocker.patch('awx.conf.models.Setting.objects.filter', return_value=mocks):
|
||||
assert settings.AWX_SOME_SETTING == six.u('Iñtërnâtiônàlizætiøn')
|
||||
assert settings.cache.get('AWX_SOME_SETTING') == six.u('Iñtërnâtiônàlizætiøn')
|
||||
assert settings.AWX_SOME_SETTING == 'Iñtërnâtiônàlizætiøn'
|
||||
assert settings.cache.get('AWX_SOME_SETTING') == 'Iñtërnâtiônàlizætiøn'
|
||||
|
||||
|
||||
@pytest.mark.defined_in_file(AWX_SOME_SETTING='DEFAULT')
|
||||
@@ -434,7 +435,7 @@ def test_sensitive_cache_data_is_encrypted(settings, mocker):
|
||||
|
||||
def rot13(obj, attribute):
|
||||
assert obj.pk == 123
|
||||
return getattr(obj, attribute).encode('rot13')
|
||||
return codecs.encode(getattr(obj, attribute), 'rot_13')
|
||||
|
||||
native_cache = LocMemCache(str(uuid4()), {})
|
||||
cache = EncryptedCacheProxy(
|
||||
@@ -471,7 +472,7 @@ def test_readonly_sensitive_cache_data_is_encrypted(settings):
|
||||
|
||||
def rot13(obj, attribute):
|
||||
assert obj.pk is None
|
||||
return getattr(obj, attribute).encode('rot13')
|
||||
return codecs.encode(getattr(obj, attribute), 'rot_13')
|
||||
|
||||
native_cache = LocMemCache(str(uuid4()), {})
|
||||
cache = EncryptedCacheProxy(
|
||||
|
||||
@@ -102,7 +102,7 @@ def comment_assignments_in_file(filename, assignment_names, dry_run=True, backup
|
||||
if not dry_run:
|
||||
if backup_filename:
|
||||
shutil.copy2(filename, backup_filename)
|
||||
with open(filename, 'wb') as fileobj:
|
||||
with open(filename, 'w') as fileobj:
|
||||
fileobj.write(new_file_data)
|
||||
return '\n'.join(diff_lines)
|
||||
|
||||
|
||||
@@ -72,7 +72,7 @@ class SettingSingletonDetail(RetrieveUpdateDestroyAPIView):
|
||||
|
||||
def get_queryset(self):
|
||||
self.category_slug = self.kwargs.get('category_slug', 'all')
|
||||
all_category_slugs = settings_registry.get_registered_categories(features_enabled=get_licensed_features()).keys()
|
||||
all_category_slugs = list(settings_registry.get_registered_categories(features_enabled=get_licensed_features()).keys())
|
||||
for slug_to_delete in VERSION_SPECIFIC_CATEGORIES_TO_EXCLUDE[get_request_version(self.request)]:
|
||||
all_category_slugs.remove(slug_to_delete)
|
||||
if self.request.user.is_superuser or getattr(self.request.user, 'is_system_auditor', False):
|
||||
@@ -123,7 +123,7 @@ class SettingSingletonDetail(RetrieveUpdateDestroyAPIView):
|
||||
if key == 'LICENSE' or settings_registry.is_setting_read_only(key):
|
||||
continue
|
||||
if settings_registry.is_setting_encrypted(key) and \
|
||||
isinstance(value, basestring) and \
|
||||
isinstance(value, str) and \
|
||||
value.startswith('$encrypted$'):
|
||||
continue
|
||||
setattr(serializer.instance, key, value)
|
||||
@@ -210,7 +210,7 @@ class SettingLoggingTest(GenericAPIView):
|
||||
# in URL patterns and reverse URL lookups, converting CamelCase names to
|
||||
# lowercase_with_underscore (e.g. MyView.as_view() becomes my_view).
|
||||
this_module = sys.modules[__name__]
|
||||
for attr, value in locals().items():
|
||||
for attr, value in list(locals().items()):
|
||||
if isinstance(value, type) and issubclass(value, APIView):
|
||||
name = camelcase_to_underscore(attr)
|
||||
view = value.as_view()
|
||||
|
||||
@@ -35,8 +35,6 @@ except ImportError:
|
||||
os.environ['VIRTUAL_ENV']
|
||||
))
|
||||
|
||||
from six.moves import xrange
|
||||
|
||||
__all__ = ['event_context']
|
||||
|
||||
|
||||
@@ -56,9 +54,8 @@ class IsolatedFileWrite:
|
||||
filename = '{}-partial.json'.format(event_uuid)
|
||||
dropoff_location = os.path.join(self.private_data_dir, 'artifacts', 'job_events', filename)
|
||||
write_location = '.'.join([dropoff_location, 'tmp'])
|
||||
partial_data = json.dumps(value)
|
||||
with os.fdopen(os.open(write_location, os.O_WRONLY | os.O_CREAT, stat.S_IRUSR | stat.S_IWUSR), 'w') as f:
|
||||
f.write(partial_data)
|
||||
f.write(value)
|
||||
os.rename(write_location, dropoff_location)
|
||||
|
||||
|
||||
@@ -154,7 +151,7 @@ class EventContext(object):
|
||||
if event not in ('playbook_on_stats',) and "res" in event_data and len(str(event_data['res'])) > max_res:
|
||||
event_data['res'] = {}
|
||||
event_dict = dict(event=event, event_data=event_data)
|
||||
for key in event_data.keys():
|
||||
for key in list(event_data.keys()):
|
||||
if key in ('job_id', 'ad_hoc_command_id', 'project_update_id', 'uuid', 'parent_uuid', 'created',):
|
||||
event_dict[key] = event_data.pop(key)
|
||||
elif key in ('verbosity', 'pid'):
|
||||
@@ -165,11 +162,11 @@ class EventContext(object):
|
||||
return {}
|
||||
|
||||
def dump(self, fileobj, data, max_width=78, flush=False):
|
||||
b64data = base64.b64encode(json.dumps(data))
|
||||
b64data = base64.b64encode(json.dumps(data).encode('utf-8')).decode()
|
||||
with self.display_lock:
|
||||
# pattern corresponding to OutputEventFilter expectation
|
||||
fileobj.write(u'\x1b[K')
|
||||
for offset in xrange(0, len(b64data), max_width):
|
||||
for offset in range(0, len(b64data), max_width):
|
||||
chunk = b64data[offset:offset + max_width]
|
||||
escaped_chunk = u'{}\x1b[{}D'.format(chunk, len(chunk))
|
||||
fileobj.write(escaped_chunk)
|
||||
@@ -179,7 +176,7 @@ class EventContext(object):
|
||||
|
||||
def dump_begin(self, fileobj):
|
||||
begin_dict = self.get_begin_dict()
|
||||
self.cache.set(":1:ev-{}".format(begin_dict['uuid']), begin_dict)
|
||||
self.cache.set(":1:ev-{}".format(begin_dict['uuid']), json.dumps(begin_dict))
|
||||
self.dump(fileobj, {'uuid': begin_dict['uuid']})
|
||||
|
||||
def dump_end(self, fileobj):
|
||||
|
||||
@@ -475,6 +475,15 @@ class BaseCallbackModule(CallbackBase):
|
||||
with self.capture_event_data('runner_retry', **event_data):
|
||||
super(BaseCallbackModule, self).v2_runner_retry(result)
|
||||
|
||||
def v2_runner_on_start(self, host, task):
|
||||
event_data = dict(
|
||||
host=host.get_name(),
|
||||
task=task
|
||||
)
|
||||
with self.capture_event_data('runner_on_start', **event_data):
|
||||
super(BaseCallbackModule, self).v2_runner_on_start(host, task)
|
||||
|
||||
|
||||
|
||||
class AWXDefaultCallbackModule(BaseCallbackModule, DefaultCallbackModule):
|
||||
|
||||
|
||||
@@ -5,11 +5,11 @@ from __future__ import absolute_import
|
||||
|
||||
from collections import OrderedDict
|
||||
import json
|
||||
import mock
|
||||
import os
|
||||
import shutil
|
||||
import sys
|
||||
import tempfile
|
||||
from unittest import mock
|
||||
|
||||
import pytest
|
||||
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@@ -3086,7 +3086,7 @@ msgstr "URL CloudForms"
|
||||
|
||||
#: awx/main/models/credential/__init__.py:982
|
||||
msgid ""
|
||||
"Enter the URL for the virtual machine that corresponds to your CloudForm "
|
||||
"Enter the URL for the virtual machine that corresponds to your CloudForms "
|
||||
"instance. For example, https://cloudforms.example.org"
|
||||
msgstr ""
|
||||
"Introduzca la URL para la máquina virtual que corresponda a su instancia "
|
||||
|
||||
@@ -3099,7 +3099,7 @@ msgstr "URL CloudForms"
|
||||
|
||||
#: awx/main/models/credential/__init__.py:982
|
||||
msgid ""
|
||||
"Enter the URL for the virtual machine that corresponds to your CloudForm "
|
||||
"Enter the URL for the virtual machine that corresponds to your CloudForms "
|
||||
"instance. For example, https://cloudforms.example.org"
|
||||
msgstr ""
|
||||
"Veuillez saisir l’URL de la machine virtuelle qui correspond à votre "
|
||||
|
||||
@@ -2858,7 +2858,7 @@ msgstr "CloudForms URL"
|
||||
|
||||
#: awx/main/models/credential/__init__.py:982
|
||||
msgid ""
|
||||
"Enter the URL for the virtual machine that corresponds to your CloudForm "
|
||||
"Enter the URL for the virtual machine that corresponds to your CloudForms "
|
||||
"instance. For example, https://cloudforms.example.org"
|
||||
msgstr ""
|
||||
"CloudForms インスタンスに対応する仮想マシンの URL を入力します (例: https://cloudforms.example.org)。"
|
||||
|
||||
@@ -3072,7 +3072,7 @@ msgstr "CloudForms-URL"
|
||||
|
||||
#: awx/main/models/credential/__init__.py:982
|
||||
msgid ""
|
||||
"Enter the URL for the virtual machine that corresponds to your CloudForm "
|
||||
"Enter the URL for the virtual machine that corresponds to your CloudForms "
|
||||
"instance. For example, https://cloudforms.example.org"
|
||||
msgstr ""
|
||||
"Voer de URL in voor de virtuele machine die overeenkomt met uw CloudForm-"
|
||||
|
||||
@@ -524,7 +524,7 @@ class UserAccess(BaseAccess):
|
||||
# A user can be changed if they are themselves, or by org admins or
|
||||
# superusers. Change permission implies changing only certain fields
|
||||
# that a user should be able to edit for themselves.
|
||||
if not settings.MANAGE_ORGANIZATION_AUTH:
|
||||
if not settings.MANAGE_ORGANIZATION_AUTH and not self.user.is_superuser:
|
||||
return False
|
||||
return bool(self.user == obj or self.can_admin(obj, data))
|
||||
|
||||
@@ -577,7 +577,7 @@ class UserAccess(BaseAccess):
|
||||
return False
|
||||
|
||||
def can_attach(self, obj, sub_obj, relationship, *args, **kwargs):
|
||||
if not settings.MANAGE_ORGANIZATION_AUTH:
|
||||
if not settings.MANAGE_ORGANIZATION_AUTH and not self.user.is_superuser:
|
||||
return False
|
||||
|
||||
# Reverse obj and sub_obj, defer to RoleAccess if this is a role assignment.
|
||||
@@ -587,7 +587,7 @@ class UserAccess(BaseAccess):
|
||||
return super(UserAccess, self).can_attach(obj, sub_obj, relationship, *args, **kwargs)
|
||||
|
||||
def can_unattach(self, obj, sub_obj, relationship, *args, **kwargs):
|
||||
if not settings.MANAGE_ORGANIZATION_AUTH:
|
||||
if not settings.MANAGE_ORGANIZATION_AUTH and not self.user.is_superuser:
|
||||
return False
|
||||
|
||||
if relationship == 'roles':
|
||||
@@ -1157,13 +1157,10 @@ class TeamAccess(BaseAccess):
|
||||
def can_attach(self, obj, sub_obj, relationship, *args, **kwargs):
|
||||
"""Reverse obj and sub_obj, defer to RoleAccess if this is an assignment
|
||||
of a resource role to the team."""
|
||||
if not settings.MANAGE_ORGANIZATION_AUTH:
|
||||
return False
|
||||
# MANAGE_ORGANIZATION_AUTH setting checked in RoleAccess
|
||||
if isinstance(sub_obj, Role):
|
||||
if sub_obj.content_object is None:
|
||||
raise PermissionDenied(_("The {} role cannot be assigned to a team").format(sub_obj.name))
|
||||
elif isinstance(sub_obj.content_object, User):
|
||||
raise PermissionDenied(_("The admin_role for a User cannot be assigned to a team"))
|
||||
|
||||
if isinstance(sub_obj.content_object, ResourceMixin):
|
||||
role_access = RoleAccess(self.user)
|
||||
@@ -1175,9 +1172,7 @@ class TeamAccess(BaseAccess):
|
||||
*args, **kwargs)
|
||||
|
||||
def can_unattach(self, obj, sub_obj, relationship, *args, **kwargs):
|
||||
if not settings.MANAGE_ORGANIZATION_AUTH:
|
||||
return False
|
||||
|
||||
# MANAGE_ORGANIZATION_AUTH setting checked in RoleAccess
|
||||
if isinstance(sub_obj, Role):
|
||||
if isinstance(sub_obj.content_object, ResourceMixin):
|
||||
role_access = RoleAccess(self.user)
|
||||
@@ -1213,7 +1208,7 @@ class ProjectAccess(BaseAccess):
|
||||
@check_superuser
|
||||
def can_add(self, data):
|
||||
if not data: # So the browseable API will work
|
||||
return Organization.accessible_objects(self.user, 'admin_role').exists()
|
||||
return Organization.accessible_objects(self.user, 'project_admin_role').exists()
|
||||
return (self.check_related('organization', Organization, data, role_field='project_admin_role', mandatory=True) and
|
||||
self.check_related('credential', Credential, data, role_field='use_role'))
|
||||
|
||||
@@ -1281,6 +1276,7 @@ class JobTemplateAccess(BaseAccess):
|
||||
'instance_groups',
|
||||
'credentials__credential_type',
|
||||
Prefetch('labels', queryset=Label.objects.all().order_by('name')),
|
||||
Prefetch('last_job', queryset=UnifiedJob.objects.non_polymorphic()),
|
||||
)
|
||||
|
||||
def filtered_queryset(self):
|
||||
@@ -1394,13 +1390,15 @@ class JobTemplateAccess(BaseAccess):
|
||||
'job_tags', 'force_handlers', 'skip_tags', 'ask_variables_on_launch',
|
||||
'ask_tags_on_launch', 'ask_job_type_on_launch', 'ask_skip_tags_on_launch',
|
||||
'ask_inventory_on_launch', 'ask_credential_on_launch', 'survey_enabled',
|
||||
'custom_virtualenv', 'diff_mode',
|
||||
'custom_virtualenv', 'diff_mode', 'timeout', 'job_slice_count',
|
||||
|
||||
# These fields are ignored, but it is convenient for QA to allow clients to post them
|
||||
'last_job_run', 'created', 'modified',
|
||||
]
|
||||
|
||||
for k, v in data.items():
|
||||
if k not in [x.name for x in obj._meta.concrete_fields]:
|
||||
continue
|
||||
if hasattr(obj, k) and getattr(obj, k) != v:
|
||||
if k not in field_whitelist and v != getattr(obj, '%s_id' % k, None) \
|
||||
and not (hasattr(obj, '%s_id' % k) and getattr(obj, '%s_id' % k) is None and v == ''): # Equate '' to None in the case of foreign keys
|
||||
@@ -1840,8 +1838,10 @@ class WorkflowJobTemplateAccess(BaseAccess):
|
||||
if 'survey_enabled' in data and data['survey_enabled']:
|
||||
self.check_license(feature='surveys')
|
||||
|
||||
return self.check_related('organization', Organization, data, role_field='workflow_admin_role',
|
||||
mandatory=True)
|
||||
return (
|
||||
self.check_related('organization', Organization, data, role_field='workflow_admin_role', mandatory=True) and
|
||||
self.check_related('inventory', Inventory, data, role_field='use_role')
|
||||
)
|
||||
|
||||
def can_copy(self, obj):
|
||||
if self.save_messages:
|
||||
@@ -1895,8 +1895,11 @@ class WorkflowJobTemplateAccess(BaseAccess):
|
||||
if self.user.is_superuser:
|
||||
return True
|
||||
|
||||
return (self.check_related('organization', Organization, data, role_field='workflow_admin_role', obj=obj) and
|
||||
self.user in obj.admin_role)
|
||||
return (
|
||||
self.check_related('organization', Organization, data, role_field='workflow_admin_role', obj=obj) and
|
||||
self.check_related('inventory', Inventory, data, role_field='use_role', obj=obj) and
|
||||
self.user in obj.admin_role
|
||||
)
|
||||
|
||||
def can_delete(self, obj):
|
||||
return self.user.is_superuser or self.user in obj.admin_role
|
||||
@@ -1954,19 +1957,29 @@ class WorkflowJobAccess(BaseAccess):
|
||||
if not template:
|
||||
return False
|
||||
|
||||
# If job was launched by another user, it could have survey passwords
|
||||
if obj.created_by_id != self.user.pk:
|
||||
# Obtain prompts used to start original job
|
||||
JobLaunchConfig = obj._meta.get_field('launch_config').related_model
|
||||
try:
|
||||
config = JobLaunchConfig.objects.get(job=obj)
|
||||
except JobLaunchConfig.DoesNotExist:
|
||||
config = None
|
||||
# Obtain prompts used to start original job
|
||||
JobLaunchConfig = obj._meta.get_field('launch_config').related_model
|
||||
try:
|
||||
config = JobLaunchConfig.objects.get(job=obj)
|
||||
except JobLaunchConfig.DoesNotExist:
|
||||
if self.save_messages:
|
||||
self.messages['detail'] = _('Workflow Job was launched with unknown prompts.')
|
||||
return False
|
||||
|
||||
if config is None or config.prompts_dict():
|
||||
# Check if access to prompts to prevent relaunch
|
||||
if config.prompts_dict():
|
||||
if obj.created_by_id != self.user.pk:
|
||||
if self.save_messages:
|
||||
self.messages['detail'] = _('Job was launched with prompts provided by another user.')
|
||||
return False
|
||||
if not JobLaunchConfigAccess(self.user).can_add({'reference_obj': config}):
|
||||
if self.save_messages:
|
||||
self.messages['detail'] = _('Job was launched with prompts you lack access to.')
|
||||
return False
|
||||
if config.has_unprompted(template):
|
||||
if self.save_messages:
|
||||
self.messages['detail'] = _('Job was launched with prompts no longer accepted.')
|
||||
return False
|
||||
|
||||
# execute permission to WFJT is mandatory for any relaunch
|
||||
return (self.user in template.execute_role)
|
||||
@@ -2552,14 +2565,13 @@ class RoleAccess(BaseAccess):
|
||||
# Unsupported for now
|
||||
return False
|
||||
|
||||
def can_attach(self, obj, sub_obj, relationship, data,
|
||||
skip_sub_obj_read_check=False):
|
||||
return self.can_unattach(obj, sub_obj, relationship, data, skip_sub_obj_read_check)
|
||||
def can_attach(self, obj, sub_obj, relationship, *args, **kwargs):
|
||||
return self.can_unattach(obj, sub_obj, relationship, *args, **kwargs)
|
||||
|
||||
@check_superuser
|
||||
def can_unattach(self, obj, sub_obj, relationship, data=None, skip_sub_obj_read_check=False):
|
||||
if isinstance(obj.content_object, Team):
|
||||
if not settings.MANAGE_ORGANIZATION_AUTH:
|
||||
if not settings.MANAGE_ORGANIZATION_AUTH and not self.user.is_superuser:
|
||||
return False
|
||||
|
||||
if not skip_sub_obj_read_check and relationship in ['members', 'member_role.parents', 'parents']:
|
||||
|
||||
@@ -197,6 +197,18 @@ register(
|
||||
category_slug='jobs',
|
||||
)
|
||||
|
||||
register(
|
||||
'AWX_ISOLATED_VERBOSITY',
|
||||
field_class=fields.IntegerField,
|
||||
min_value=0,
|
||||
max_value=5,
|
||||
label=_('Verbosity level for isolated node management tasks'),
|
||||
help_text=_('This can be raised to aid in debugging connection issues for isolated task execution'),
|
||||
category=_('Jobs'),
|
||||
category_slug='jobs',
|
||||
default=0
|
||||
)
|
||||
|
||||
register(
|
||||
'AWX_ISOLATED_CHECK_INTERVAL',
|
||||
field_class=fields.IntegerField,
|
||||
|
||||
@@ -4,6 +4,7 @@ import logging
|
||||
from channels import Group
|
||||
from channels.auth import channel_session_user_from_http, channel_session_user
|
||||
|
||||
from django.utils.encoding import smart_str
|
||||
from django.http.cookie import parse_cookie
|
||||
from django.core.serializers.json import DjangoJSONEncoder
|
||||
|
||||
@@ -30,7 +31,7 @@ def ws_connect(message):
|
||||
# store the valid CSRF token from the cookie so we can compare it later
|
||||
# on ws_receive
|
||||
cookie_token = parse_cookie(
|
||||
headers.get('cookie')
|
||||
smart_str(headers.get(b'cookie'))
|
||||
).get('csrftoken')
|
||||
if cookie_token:
|
||||
message.channel_session[XRF_KEY] = cookie_token
|
||||
|
||||
@@ -2,4 +2,4 @@ from django.conf import settings
|
||||
|
||||
|
||||
def get_local_queuename():
|
||||
return settings.CLUSTER_HOST_ID.encode('utf-8')
|
||||
return settings.CLUSTER_HOST_ID
|
||||
|
||||
@@ -8,7 +8,7 @@ from uuid import uuid4
|
||||
import collections
|
||||
from multiprocessing import Process
|
||||
from multiprocessing import Queue as MPQueue
|
||||
from Queue import Full as QueueFull, Empty as QueueEmpty
|
||||
from queue import Full as QueueFull, Empty as QueueEmpty
|
||||
|
||||
from django.conf import settings
|
||||
from django.db import connection as django_connection, connections
|
||||
@@ -129,7 +129,7 @@ class PoolWorker(object):
|
||||
# the task at [0] is the one that's running right now (or is about to
|
||||
# be running)
|
||||
if len(self.managed_tasks):
|
||||
return self.managed_tasks[self.managed_tasks.keys()[0]]
|
||||
return self.managed_tasks[list(self.managed_tasks.keys())[0]]
|
||||
|
||||
return None
|
||||
|
||||
@@ -180,7 +180,7 @@ class WorkerPool(object):
|
||||
class MessagePrinter(awx.main.dispatch.worker.BaseWorker):
|
||||
|
||||
def perform_work(self, body):
|
||||
print body
|
||||
print(body)
|
||||
|
||||
pool = WorkerPool(min_workers=4) # spawn four worker processes
|
||||
pool.init_workers(MessagePrint().work_loop)
|
||||
@@ -253,7 +253,7 @@ class WorkerPool(object):
|
||||
return tmpl.render(pool=self, workers=self.workers, meta=self.debug_meta)
|
||||
|
||||
def write(self, preferred_queue, body):
|
||||
queue_order = sorted(range(len(self.workers)), cmp=lambda x, y: -1 if x==preferred_queue else 0)
|
||||
queue_order = sorted(range(len(self.workers)), key=lambda x: -1 if x==preferred_queue else x)
|
||||
write_attempt_order = []
|
||||
for queue_actual in queue_order:
|
||||
try:
|
||||
@@ -296,6 +296,9 @@ class AutoscalePool(WorkerPool):
|
||||
# 5 workers per GB of total memory
|
||||
self.max_workers = (total_memory_gb * 5)
|
||||
|
||||
# max workers can't be less than min_workers
|
||||
self.max_workers = max(self.min_workers, self.max_workers)
|
||||
|
||||
@property
|
||||
def should_grow(self):
|
||||
if len(self.workers) < self.min_workers:
|
||||
@@ -362,7 +365,7 @@ class AutoscalePool(WorkerPool):
|
||||
running_uuids = []
|
||||
for worker in self.workers:
|
||||
worker.calculate_managed_tasks()
|
||||
running_uuids.extend(worker.managed_tasks.keys())
|
||||
running_uuids.extend(list(worker.managed_tasks.keys()))
|
||||
try:
|
||||
reaper.reap(excluded_uuids=running_uuids)
|
||||
except Exception:
|
||||
@@ -370,6 +373,10 @@ class AutoscalePool(WorkerPool):
|
||||
# don't use our logger (it accesses the database for configuration)
|
||||
_, _, tb = sys.exc_info()
|
||||
traceback.print_tb(tb)
|
||||
for conn in connections.all():
|
||||
# If the database connection has a hiccup, re-establish a new
|
||||
# connection
|
||||
conn.close_if_unusable_or_obsolete()
|
||||
|
||||
def up(self):
|
||||
if self.full:
|
||||
|
||||
@@ -45,7 +45,7 @@ class task:
|
||||
|
||||
@task(queue='tower_broadcast', exchange_type='fanout')
|
||||
def announce():
|
||||
print "Run this everywhere!"
|
||||
print("Run this everywhere!")
|
||||
"""
|
||||
|
||||
def __init__(self, queue=None, exchange_type=None):
|
||||
|
||||
@@ -11,6 +11,9 @@ logger = logging.getLogger('awx.main.dispatch')
|
||||
|
||||
|
||||
def reap_job(j, status):
|
||||
if UnifiedJob.objects.get(id=j.id).status not in ('running', 'waiting'):
|
||||
# just in case, don't reap jobs that aren't running
|
||||
return
|
||||
j.status = status
|
||||
j.start_args = '' # blank field to remove encrypted passwords
|
||||
j.job_explanation += ' '.join((
|
||||
|
||||
@@ -5,8 +5,9 @@ import os
|
||||
import logging
|
||||
import signal
|
||||
from uuid import UUID
|
||||
from Queue import Empty as QueueEmpty
|
||||
from queue import Empty as QueueEmpty
|
||||
|
||||
from django import db
|
||||
from kombu import Producer
|
||||
from kombu.mixins import ConsumerMixin
|
||||
|
||||
@@ -128,6 +129,10 @@ class BaseWorker(object):
|
||||
logger.error("Exception on worker {}, restarting: ".format(idx) + str(e))
|
||||
continue
|
||||
try:
|
||||
for conn in db.connections.all():
|
||||
# If the database connection has a hiccup during the prior message, close it
|
||||
# so we can establish a new connection
|
||||
conn.close_if_unusable_or_obsolete()
|
||||
self.perform_work(body, *args)
|
||||
finally:
|
||||
if 'uuid' in body:
|
||||
|
||||
@@ -1,7 +1,5 @@
|
||||
import logging
|
||||
import time
|
||||
import os
|
||||
import signal
|
||||
import traceback
|
||||
|
||||
from django.conf import settings
|
||||
@@ -110,8 +108,7 @@ class CallbackBrokerWorker(BaseWorker):
|
||||
break
|
||||
except (OperationalError, InterfaceError, InternalError):
|
||||
if retries >= self.MAX_RETRIES:
|
||||
logger.exception('Worker could not re-establish database connectivity, shutting down gracefully: Job {}'.format(job_identifier))
|
||||
os.kill(os.getppid(), signal.SIGINT)
|
||||
logger.exception('Worker could not re-establish database connectivity, giving up on event for Job {}'.format(job_identifier))
|
||||
return
|
||||
delay = 60 * retries
|
||||
logger.exception('Database Error Saving Job Event, retry #{i} in {delay} seconds:'.format(
|
||||
|
||||
@@ -5,7 +5,6 @@ import sys
|
||||
import traceback
|
||||
|
||||
import six
|
||||
from django import db
|
||||
|
||||
from awx.main.tasks import dispatch_startup, inform_cluster_of_shutdown
|
||||
|
||||
@@ -31,11 +30,18 @@ class TaskWorker(BaseWorker):
|
||||
awx.main.tasks.delete_inventory
|
||||
awx.main.tasks.RunProjectUpdate
|
||||
'''
|
||||
if not task.startswith('awx.'):
|
||||
raise ValueError('{} is not a valid awx task'.format(task))
|
||||
module, target = task.rsplit('.', 1)
|
||||
module = importlib.import_module(module)
|
||||
_call = None
|
||||
if hasattr(module, target):
|
||||
_call = getattr(module, target, None)
|
||||
if not (
|
||||
hasattr(_call, 'apply_async') and hasattr(_call, 'delay')
|
||||
):
|
||||
raise ValueError('{} is not decorated with @task()'.format(task))
|
||||
|
||||
return _call
|
||||
|
||||
def run_callable(self, body):
|
||||
@@ -75,14 +81,11 @@ class TaskWorker(BaseWorker):
|
||||
'task': u'awx.main.tasks.RunProjectUpdate'
|
||||
}
|
||||
'''
|
||||
for conn in db.connections.all():
|
||||
# If the database connection has a hiccup during at task, close it
|
||||
# so we can establish a new connection
|
||||
conn.close_if_unusable_or_obsolete()
|
||||
result = None
|
||||
try:
|
||||
result = self.run_callable(body)
|
||||
except Exception as exc:
|
||||
result = exc
|
||||
|
||||
try:
|
||||
if getattr(exc, 'is_awx_task_error', False):
|
||||
|
||||
1
awx/main/expect/.gitignore
vendored
Normal file
1
awx/main/expect/.gitignore
vendored
Normal file
@@ -0,0 +1 @@
|
||||
authorized_keys
|
||||
0
awx/main/expect/authorized_keys
Normal file
0
awx/main/expect/authorized_keys
Normal file
@@ -1,6 +1,5 @@
|
||||
import base64
|
||||
import codecs
|
||||
import StringIO
|
||||
import json
|
||||
import os
|
||||
import shutil
|
||||
@@ -9,8 +8,10 @@ import tempfile
|
||||
import time
|
||||
import logging
|
||||
from distutils.version import LooseVersion as Version
|
||||
from io import StringIO
|
||||
|
||||
from django.conf import settings
|
||||
from django.utils.encoding import smart_bytes, smart_str
|
||||
|
||||
import awx
|
||||
from awx.main.expect import run
|
||||
@@ -101,6 +102,8 @@ class IsolatedManager(object):
|
||||
]
|
||||
if extra_vars:
|
||||
args.extend(['-e', json.dumps(extra_vars)])
|
||||
if settings.AWX_ISOLATED_VERBOSITY:
|
||||
args.append('-%s' % ('v' * min(5, settings.AWX_ISOLATED_VERBOSITY)))
|
||||
return args
|
||||
|
||||
@staticmethod
|
||||
@@ -142,7 +145,7 @@ class IsolatedManager(object):
|
||||
|
||||
# if an ssh private key fifo exists, read its contents and delete it
|
||||
if self.ssh_key_path:
|
||||
buff = StringIO.StringIO()
|
||||
buff = StringIO()
|
||||
with open(self.ssh_key_path, 'r') as fifo:
|
||||
for line in fifo:
|
||||
buff.write(line)
|
||||
@@ -154,7 +157,10 @@ class IsolatedManager(object):
|
||||
# into a variable, and will replicate the data into a named pipe on the
|
||||
# isolated instance
|
||||
secrets_path = os.path.join(self.private_data_dir, 'env')
|
||||
run.open_fifo_write(secrets_path, base64.b64encode(json.dumps(secrets)))
|
||||
run.open_fifo_write(
|
||||
secrets_path,
|
||||
smart_str(base64.b64encode(smart_bytes(json.dumps(secrets))))
|
||||
)
|
||||
|
||||
self.build_isolated_job_data()
|
||||
|
||||
@@ -174,7 +180,7 @@ class IsolatedManager(object):
|
||||
args = self._build_args('run_isolated.yml', '%s,' % self.host, extra_vars)
|
||||
if self.instance.verbosity:
|
||||
args.append('-%s' % ('v' * min(5, self.instance.verbosity)))
|
||||
buff = StringIO.StringIO()
|
||||
buff = StringIO()
|
||||
logger.debug('Starting job {} on isolated host with `run_isolated.yml` playbook.'.format(self.instance.id))
|
||||
status, rc = IsolatedManager.run_pexpect(
|
||||
args, self.awx_playbook_path(), self.management_env, buff,
|
||||
@@ -244,7 +250,7 @@ class IsolatedManager(object):
|
||||
os.makedirs(self.path_to('artifacts', 'job_events'), mode=stat.S_IXUSR + stat.S_IWUSR + stat.S_IRUSR)
|
||||
|
||||
def _missing_artifacts(self, path_list, output):
|
||||
missing_artifacts = filter(lambda path: not os.path.exists(path), path_list)
|
||||
missing_artifacts = list(filter(lambda path: not os.path.exists(path), path_list))
|
||||
for path in missing_artifacts:
|
||||
self.stdout_handle.write('ansible did not exit cleanly, missing `{}`.\n'.format(path))
|
||||
if missing_artifacts:
|
||||
@@ -282,7 +288,7 @@ class IsolatedManager(object):
|
||||
status = 'failed'
|
||||
output = ''
|
||||
rc = None
|
||||
buff = StringIO.StringIO()
|
||||
buff = StringIO()
|
||||
last_check = time.time()
|
||||
seek = 0
|
||||
job_timeout = remaining = self.job_timeout
|
||||
@@ -303,7 +309,7 @@ class IsolatedManager(object):
|
||||
time.sleep(1)
|
||||
continue
|
||||
|
||||
buff = StringIO.StringIO()
|
||||
buff = StringIO()
|
||||
logger.debug('Checking on isolated job {} with `check_isolated.yml`.'.format(self.instance.id))
|
||||
status, rc = IsolatedManager.run_pexpect(
|
||||
args, self.awx_playbook_path(), self.management_env, buff,
|
||||
@@ -318,7 +324,7 @@ class IsolatedManager(object):
|
||||
|
||||
path = self.path_to('artifacts', 'stdout')
|
||||
if os.path.exists(path):
|
||||
with open(path, 'r') as f:
|
||||
with codecs.open(path, 'r', encoding='utf-8') as f:
|
||||
f.seek(seek)
|
||||
for line in f:
|
||||
self.stdout_handle.write(line)
|
||||
@@ -340,7 +346,7 @@ class IsolatedManager(object):
|
||||
elif status == 'failed':
|
||||
# if we were unable to retrieve job reults from the isolated host,
|
||||
# print stdout of the `check_isolated.yml` playbook for clues
|
||||
self.stdout_handle.write(output)
|
||||
self.stdout_handle.write(smart_str(output))
|
||||
|
||||
return status, rc
|
||||
|
||||
@@ -355,7 +361,7 @@ class IsolatedManager(object):
|
||||
}
|
||||
args = self._build_args('clean_isolated.yml', '%s,' % self.host, extra_vars)
|
||||
logger.debug('Cleaning up job {} on isolated host with `clean_isolated.yml` playbook.'.format(self.instance.id))
|
||||
buff = StringIO.StringIO()
|
||||
buff = StringIO()
|
||||
timeout = max(60, 2 * settings.AWX_ISOLATED_CONNECTION_TIMEOUT)
|
||||
status, rc = IsolatedManager.run_pexpect(
|
||||
args, self.awx_playbook_path(), self.management_env, buff,
|
||||
@@ -407,46 +413,52 @@ class IsolatedManager(object):
|
||||
args = cls._build_args('heartbeat_isolated.yml', hostname_string)
|
||||
args.extend(['--forks', str(len(instance_qs))])
|
||||
env = cls._base_management_env()
|
||||
env['ANSIBLE_STDOUT_CALLBACK'] = 'json'
|
||||
|
||||
buff = StringIO.StringIO()
|
||||
timeout = max(60, 2 * settings.AWX_ISOLATED_CONNECTION_TIMEOUT)
|
||||
status, rc = IsolatedManager.run_pexpect(
|
||||
args, cls.awx_playbook_path(), env, buff,
|
||||
idle_timeout=timeout, job_timeout=timeout,
|
||||
pexpect_timeout=5
|
||||
)
|
||||
output = buff.getvalue().encode('utf-8')
|
||||
buff.close()
|
||||
|
||||
try:
|
||||
result = json.loads(output)
|
||||
if not isinstance(result, dict):
|
||||
raise TypeError('Expected a dict but received {}.'.format(str(type(result))))
|
||||
except (ValueError, AssertionError, TypeError):
|
||||
logger.exception('Failed to read status from isolated instances, output:\n {}'.format(output))
|
||||
return
|
||||
facts_path = tempfile.mkdtemp()
|
||||
env['ANSIBLE_CACHE_PLUGIN'] = 'jsonfile'
|
||||
env['ANSIBLE_CACHE_PLUGIN_CONNECTION'] = facts_path
|
||||
|
||||
for instance in instance_qs:
|
||||
try:
|
||||
task_result = result['plays'][0]['tasks'][0]['hosts'][instance.hostname]
|
||||
except (KeyError, IndexError):
|
||||
buff = StringIO()
|
||||
timeout = max(60, 2 * settings.AWX_ISOLATED_CONNECTION_TIMEOUT)
|
||||
status, rc = IsolatedManager.run_pexpect(
|
||||
args, cls.awx_playbook_path(), env, buff,
|
||||
idle_timeout=timeout, job_timeout=timeout,
|
||||
pexpect_timeout=5
|
||||
)
|
||||
heartbeat_stdout = buff.getvalue().encode('utf-8')
|
||||
buff.close()
|
||||
|
||||
for instance in instance_qs:
|
||||
output = heartbeat_stdout
|
||||
task_result = {}
|
||||
if 'capacity_cpu' in task_result and 'capacity_mem' in task_result:
|
||||
cls.update_capacity(instance, task_result, awx_application_version)
|
||||
logger.debug('Isolated instance {} successful heartbeat'.format(instance.hostname))
|
||||
elif instance.capacity == 0:
|
||||
logger.debug('Isolated instance {} previously marked as lost, could not re-join.'.format(
|
||||
instance.hostname))
|
||||
else:
|
||||
logger.warning('Could not update status of isolated instance {}, msg={}'.format(
|
||||
instance.hostname, task_result.get('msg', 'unknown failure')
|
||||
))
|
||||
if instance.is_lost(isolated=True):
|
||||
instance.capacity = 0
|
||||
instance.save(update_fields=['capacity'])
|
||||
logger.error('Isolated instance {} last checked in at {}, marked as lost.'.format(
|
||||
instance.hostname, instance.modified))
|
||||
try:
|
||||
with open(os.path.join(facts_path, instance.hostname), 'r') as facts_data:
|
||||
output = facts_data.read()
|
||||
task_result = json.loads(output)
|
||||
except Exception:
|
||||
logger.exception('Failed to read status from isolated instances, output:\n {}'.format(output))
|
||||
if 'awx_capacity_cpu' in task_result and 'awx_capacity_mem' in task_result:
|
||||
task_result = {
|
||||
'capacity_cpu': task_result['awx_capacity_cpu'],
|
||||
'capacity_mem': task_result['awx_capacity_mem'],
|
||||
'version': task_result['awx_capacity_version']
|
||||
}
|
||||
cls.update_capacity(instance, task_result, awx_application_version)
|
||||
logger.debug('Isolated instance {} successful heartbeat'.format(instance.hostname))
|
||||
elif instance.capacity == 0:
|
||||
logger.debug('Isolated instance {} previously marked as lost, could not re-join.'.format(
|
||||
instance.hostname))
|
||||
else:
|
||||
logger.warning('Could not update status of isolated instance {}'.format(instance.hostname))
|
||||
if instance.is_lost(isolated=True):
|
||||
instance.capacity = 0
|
||||
instance.save(update_fields=['capacity'])
|
||||
logger.error('Isolated instance {} last checked in at {}, marked as lost.'.format(
|
||||
instance.hostname, instance.modified))
|
||||
finally:
|
||||
if os.path.exists(facts_path):
|
||||
shutil.rmtree(facts_path)
|
||||
|
||||
@staticmethod
|
||||
def get_stdout_handle(instance, private_data_dir, event_data_key='job_id'):
|
||||
|
||||
@@ -4,7 +4,6 @@ import argparse
|
||||
import base64
|
||||
import codecs
|
||||
import collections
|
||||
import StringIO
|
||||
import logging
|
||||
import json
|
||||
import os
|
||||
@@ -13,12 +12,15 @@ import pipes
|
||||
import re
|
||||
import signal
|
||||
import sys
|
||||
import thread
|
||||
import threading
|
||||
import time
|
||||
try:
|
||||
from io import StringIO
|
||||
except ImportError:
|
||||
from StringIO import StringIO
|
||||
|
||||
import pexpect
|
||||
import psutil
|
||||
import six
|
||||
|
||||
|
||||
logger = logging.getLogger('awx.main.utils.expect')
|
||||
@@ -49,7 +51,10 @@ def open_fifo_write(path, data):
|
||||
reads data from the pipe.
|
||||
'''
|
||||
os.mkfifo(path, 0o600)
|
||||
thread.start_new_thread(lambda p, d: open(p, 'w').write(d), (path, data))
|
||||
threading.Thread(
|
||||
target=lambda p, d: open(p, 'w').write(d),
|
||||
args=(path, data)
|
||||
).start()
|
||||
|
||||
|
||||
def run_pexpect(args, cwd, env, logfile,
|
||||
@@ -97,14 +102,8 @@ def run_pexpect(args, cwd, env, logfile,
|
||||
# enforce usage of an OrderedDict so that the ordering of elements in
|
||||
# `keys()` matches `values()`.
|
||||
expect_passwords = collections.OrderedDict(expect_passwords)
|
||||
password_patterns = expect_passwords.keys()
|
||||
password_values = expect_passwords.values()
|
||||
|
||||
# pexpect needs all env vars to be utf-8 encoded strings
|
||||
# https://github.com/pexpect/pexpect/issues/512
|
||||
for k, v in env.items():
|
||||
if isinstance(v, six.text_type):
|
||||
env[k] = v.encode('utf-8')
|
||||
password_patterns = list(expect_passwords.keys())
|
||||
password_values = list(expect_passwords.values())
|
||||
|
||||
child = pexpect.spawn(
|
||||
args[0], args[1:], cwd=cwd, env=env, ignore_sighup=True,
|
||||
@@ -232,7 +231,11 @@ def handle_termination(pid, args, proot_cmd, is_cancel=True):
|
||||
instance's cancel_flag.
|
||||
'''
|
||||
try:
|
||||
if proot_cmd in ' '.join(args):
|
||||
if sys.version_info > (3, 0):
|
||||
used_proot = proot_cmd.encode('utf-8') in args
|
||||
else:
|
||||
used_proot = proot_cmd in ' '.join(args)
|
||||
if used_proot:
|
||||
if not psutil:
|
||||
os.kill(pid, signal.SIGKILL)
|
||||
else:
|
||||
@@ -253,8 +256,8 @@ def handle_termination(pid, args, proot_cmd, is_cancel=True):
|
||||
|
||||
|
||||
def __run__(private_data_dir):
|
||||
buff = StringIO.StringIO()
|
||||
with open(os.path.join(private_data_dir, 'env'), 'r') as f:
|
||||
buff = StringIO()
|
||||
with codecs.open(os.path.join(private_data_dir, 'env'), 'r', encoding='utf-8') as f:
|
||||
for line in f:
|
||||
buff.write(line)
|
||||
|
||||
|
||||
@@ -7,7 +7,7 @@ import json
|
||||
import operator
|
||||
import re
|
||||
import six
|
||||
import urllib
|
||||
import urllib.parse
|
||||
|
||||
from jinja2 import Environment, StrictUndefined
|
||||
from jinja2.exceptions import UndefinedError, TemplateSyntaxError
|
||||
@@ -251,6 +251,9 @@ class ImplicitRoleField(models.ForeignKey):
|
||||
if type(field_name) == tuple:
|
||||
continue
|
||||
|
||||
if type(field_name) == bytes:
|
||||
field_name = field_name.decode('utf-8')
|
||||
|
||||
if field_name.startswith('singleton:'):
|
||||
continue
|
||||
|
||||
@@ -373,7 +376,7 @@ class SmartFilterField(models.TextField):
|
||||
# https://docs.python.org/2/library/stdtypes.html#truth-value-testing
|
||||
if not value:
|
||||
return None
|
||||
value = urllib.unquote(value)
|
||||
value = urllib.parse.unquote(value)
|
||||
try:
|
||||
SmartFilter().query_from_string(value)
|
||||
except RuntimeError as e:
|
||||
@@ -407,9 +410,6 @@ class JSONSchemaField(JSONBField):
|
||||
self.schema(model_instance),
|
||||
format_checker=self.format_checker
|
||||
).iter_errors(value):
|
||||
# strip Python unicode markers from jsonschema validation errors
|
||||
error.message = re.sub(r'\bu(\'|")', r'\1', error.message)
|
||||
|
||||
if error.validator == 'pattern' and 'error' in error.schema:
|
||||
error.message = six.text_type(error.schema['error']).format(instance=error.instance)
|
||||
elif error.validator == 'type':
|
||||
@@ -514,10 +514,10 @@ class CredentialInputField(JSONSchemaField):
|
||||
field = field.copy()
|
||||
if field['type'] == 'become_method':
|
||||
field.pop('type')
|
||||
field['choices'] = map(operator.itemgetter(0), CHOICES_PRIVILEGE_ESCALATION_METHODS)
|
||||
field['choices'] = list(map(operator.itemgetter(0), CHOICES_PRIVILEGE_ESCALATION_METHODS))
|
||||
properties[field['id']] = field
|
||||
if field.get('choices', []):
|
||||
field['enum'] = field['choices'][:]
|
||||
field['enum'] = list(field['choices'])[:]
|
||||
return {
|
||||
'type': 'object',
|
||||
'properties': properties,
|
||||
@@ -824,14 +824,14 @@ class CredentialTypeInjectorField(JSONSchemaField):
|
||||
)
|
||||
|
||||
class ExplodingNamespace:
|
||||
def __unicode__(self):
|
||||
def __str__(self):
|
||||
raise UndefinedError(_('Must define unnamed file injector in order to reference `tower.filename`.'))
|
||||
|
||||
class TowerNamespace:
|
||||
def __init__(self):
|
||||
self.filename = ExplodingNamespace()
|
||||
|
||||
def __unicode__(self):
|
||||
def __str__(self):
|
||||
raise UndefinedError(_('Cannot directly reference reserved `tower` namespace container.'))
|
||||
|
||||
valid_namespace['tower'] = TowerNamespace()
|
||||
|
||||
@@ -3,6 +3,7 @@
|
||||
|
||||
# Python
|
||||
import re
|
||||
import sys
|
||||
from dateutil.relativedelta import relativedelta
|
||||
|
||||
# Django
|
||||
@@ -129,6 +130,7 @@ class Command(BaseCommand):
|
||||
|
||||
@transaction.atomic
|
||||
def handle(self, *args, **options):
|
||||
sys.stderr.write("This command has been deprecated and will be removed in a future release.\n")
|
||||
if not feature_enabled('system_tracking'):
|
||||
raise CommandError("The System Tracking feature is not enabled for your instance")
|
||||
cleanup_facts = CleanupFacts()
|
||||
|
||||
@@ -2,7 +2,6 @@
|
||||
# All Rights Reserved
|
||||
|
||||
import subprocess
|
||||
import warnings
|
||||
|
||||
from django.db import transaction
|
||||
from django.core.management.base import BaseCommand, CommandError
|
||||
@@ -24,17 +23,10 @@ class Command(BaseCommand):
|
||||
def add_arguments(self, parser):
|
||||
parser.add_argument('--hostname', dest='hostname', type=str,
|
||||
help='Hostname used during provisioning')
|
||||
parser.add_argument('--name', dest='name', type=str,
|
||||
help='(PENDING DEPRECIATION) Hostname used during provisioning')
|
||||
|
||||
@transaction.atomic
|
||||
def handle(self, *args, **options):
|
||||
# TODO: remove in 3.3
|
||||
if options.get('name'):
|
||||
warnings.warn("`--name` is depreciated in favor of `--hostname`, and will be removed in release 3.3.")
|
||||
if options.get('hostname'):
|
||||
raise CommandError("Cannot accept both --name and --hostname.")
|
||||
options['hostname'] = options['name']
|
||||
hostname = options.get('hostname')
|
||||
if not hostname:
|
||||
raise CommandError("--hostname is a required argument")
|
||||
|
||||
@@ -1,17 +0,0 @@
|
||||
# Copyright (c) 2017 Ansible by Red Hat
|
||||
# All Rights Reserved
|
||||
|
||||
# Borrow from another AWX command
|
||||
from awx.main.management.commands.deprovision_instance import Command as OtherCommand
|
||||
|
||||
# Python
|
||||
import warnings
|
||||
|
||||
|
||||
class Command(OtherCommand):
|
||||
|
||||
def handle(self, *args, **options):
|
||||
# TODO: delete this entire file in 3.3
|
||||
warnings.warn('This command is replaced with `deprovision_instance` and will '
|
||||
'be removed in release 3.3.')
|
||||
return super(Command, self).handle(*args, **options)
|
||||
@@ -1,6 +1,7 @@
|
||||
# Copyright (c) 2015 Ansible, Inc.
|
||||
# All Rights Reserved
|
||||
import datetime
|
||||
from django.utils.encoding import smart_str
|
||||
|
||||
from cryptography.hazmat.backends import default_backend
|
||||
from cryptography.hazmat.primitives import serialization
|
||||
@@ -35,10 +36,10 @@ class Command(BaseCommand):
|
||||
).save()
|
||||
pemfile = Setting.objects.create(
|
||||
key='AWX_ISOLATED_PUBLIC_KEY',
|
||||
value=key.public_key().public_bytes(
|
||||
value=smart_str(key.public_key().public_bytes(
|
||||
encoding=serialization.Encoding.OpenSSH,
|
||||
format=serialization.PublicFormat.OpenSSH
|
||||
) + " generated-by-awx@%s" % datetime.datetime.utcnow().isoformat()
|
||||
)) + " generated-by-awx@%s" % datetime.datetime.utcnow().isoformat()
|
||||
)
|
||||
pemfile.save()
|
||||
print(pemfile.value)
|
||||
|
||||
@@ -4,6 +4,7 @@
|
||||
# Python
|
||||
import json
|
||||
import logging
|
||||
import fnmatch
|
||||
import os
|
||||
import re
|
||||
import subprocess
|
||||
@@ -15,12 +16,20 @@ import shutil
|
||||
# Django
|
||||
from django.conf import settings
|
||||
from django.core.management.base import BaseCommand, CommandError
|
||||
from django.core.exceptions import ImproperlyConfigured
|
||||
from django.db import connection, transaction
|
||||
from django.utils.encoding import smart_text
|
||||
|
||||
# AWX
|
||||
from awx.main.models import * # noqa
|
||||
# AWX inventory imports
|
||||
from awx.main.models.inventory import (
|
||||
Inventory,
|
||||
InventorySource,
|
||||
InventoryUpdate,
|
||||
Host
|
||||
)
|
||||
from awx.main.utils.mem_inventory import MemInventory, dict_to_mem_data
|
||||
|
||||
# other AWX imports
|
||||
from awx.main.models.rbac import batch_role_ancestor_rebuilding
|
||||
from awx.main.utils import (
|
||||
ignore_inventory_computed_fields,
|
||||
check_proot_installed,
|
||||
@@ -28,7 +37,6 @@ from awx.main.utils import (
|
||||
build_proot_temp_dir,
|
||||
get_licenser
|
||||
)
|
||||
from awx.main.utils.mem_inventory import MemInventory, dict_to_mem_data
|
||||
from awx.main.signals import disable_activity_stream
|
||||
from awx.main.constants import STANDARD_INVENTORY_UPDATE_ENV
|
||||
|
||||
@@ -63,60 +71,46 @@ class AnsibleInventoryLoader(object):
|
||||
use the ansible-inventory CLI utility to convert it into in-memory
|
||||
representational objects. Example:
|
||||
/usr/bin/ansible/ansible-inventory -i hosts --list
|
||||
If it fails to find this, it uses the backported script instead
|
||||
'''
|
||||
|
||||
def __init__(self, source, group_filter_re=None, host_filter_re=None, is_custom=False):
|
||||
def __init__(self, source, is_custom=False, venv_path=None):
|
||||
self.source = source
|
||||
self.source_dir = functioning_dir(self.source)
|
||||
self.is_custom = is_custom
|
||||
self.tmp_private_dir = None
|
||||
self.method = 'ansible-inventory'
|
||||
self.group_filter_re = group_filter_re
|
||||
self.host_filter_re = host_filter_re
|
||||
|
||||
self.is_vendored_source = False
|
||||
if self.source_dir == os.path.join(settings.BASE_DIR, 'plugins', 'inventory'):
|
||||
self.is_vendored_source = True
|
||||
if venv_path:
|
||||
self.venv_path = venv_path
|
||||
else:
|
||||
self.venv_path = settings.ANSIBLE_VENV_PATH
|
||||
|
||||
def build_env(self):
|
||||
env = dict(os.environ.items())
|
||||
env['VIRTUAL_ENV'] = settings.ANSIBLE_VENV_PATH
|
||||
env['PATH'] = os.path.join(settings.ANSIBLE_VENV_PATH, "bin") + ":" + env['PATH']
|
||||
env['VIRTUAL_ENV'] = self.venv_path
|
||||
env['PATH'] = os.path.join(self.venv_path, "bin") + ":" + env['PATH']
|
||||
# Set configuration items that should always be used for updates
|
||||
for key, value in STANDARD_INVENTORY_UPDATE_ENV.items():
|
||||
if key not in env:
|
||||
env[key] = value
|
||||
venv_libdir = os.path.join(settings.ANSIBLE_VENV_PATH, "lib")
|
||||
venv_libdir = os.path.join(self.venv_path, "lib")
|
||||
env.pop('PYTHONPATH', None) # default to none if no python_ver matches
|
||||
if os.path.isdir(os.path.join(venv_libdir, "python2.7")):
|
||||
env['PYTHONPATH'] = os.path.join(venv_libdir, "python2.7", "site-packages") + ":"
|
||||
for version in os.listdir(venv_libdir):
|
||||
if fnmatch.fnmatch(version, 'python[23].*'):
|
||||
if os.path.isdir(os.path.join(venv_libdir, version)):
|
||||
env['PYTHONPATH'] = os.path.join(venv_libdir, version, "site-packages") + ":"
|
||||
break
|
||||
# For internal inventory updates, these are not reported in the job_env API
|
||||
logger.info('Using VIRTUAL_ENV: {}'.format(env['VIRTUAL_ENV']))
|
||||
logger.info('Using PATH: {}'.format(env['PATH']))
|
||||
logger.info('Using PYTHONPATH: {}'.format(env.get('PYTHONPATH', None)))
|
||||
return env
|
||||
|
||||
def get_base_args(self):
|
||||
# get ansible-inventory absolute path for running in bubblewrap/proot, in Popen
|
||||
for path in os.environ["PATH"].split(os.pathsep):
|
||||
potential_path = os.path.join(path.strip('"'), 'ansible-inventory')
|
||||
if os.path.isfile(potential_path) and os.access(potential_path, os.X_OK):
|
||||
logger.debug('Using system install of ansible-inventory CLI: {}'.format(potential_path))
|
||||
return [potential_path, '-i', self.source]
|
||||
|
||||
# Stopgap solution for group_vars, do not use backported module for official
|
||||
# vendored cloud modules or custom scripts TODO: remove after Ansible 2.3 deprecation
|
||||
if self.is_vendored_source or self.is_custom:
|
||||
self.method = 'inventory script invocation'
|
||||
return [self.source]
|
||||
|
||||
# ansible-inventory was not found, look for backported module TODO: remove after Ansible 2.3 deprecation
|
||||
abs_module_path = os.path.abspath(os.path.join(
|
||||
os.path.dirname(__file__), '..', '..', '..', 'plugins',
|
||||
'ansible_inventory', 'backport.py'))
|
||||
self.method = 'ansible-inventory backport'
|
||||
|
||||
if not os.path.exists(abs_module_path):
|
||||
raise ImproperlyConfigured('Cannot find inventory module')
|
||||
logger.debug('Using backported ansible-inventory module: {}'.format(abs_module_path))
|
||||
return [abs_module_path, '-i', self.source]
|
||||
abs_ansible_inventory = shutil.which('ansible-inventory')
|
||||
bargs= [abs_ansible_inventory, '-i', self.source]
|
||||
logger.debug('Using base command: {}'.format(' '.join(bargs)))
|
||||
return bargs
|
||||
|
||||
def get_proot_args(self, cmd, env):
|
||||
cwd = os.getcwd()
|
||||
@@ -155,6 +149,8 @@ class AnsibleInventoryLoader(object):
|
||||
|
||||
proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, env=env)
|
||||
stdout, stderr = proc.communicate()
|
||||
stdout = smart_text(stdout)
|
||||
stderr = smart_text(stderr)
|
||||
|
||||
if self.tmp_private_dir:
|
||||
shutil.rmtree(self.tmp_private_dir, True)
|
||||
@@ -177,80 +173,7 @@ class AnsibleInventoryLoader(object):
|
||||
base_args = self.get_base_args()
|
||||
logger.info('Reading Ansible inventory source: %s', self.source)
|
||||
|
||||
data = self.command_to_json(base_args + ['--list'])
|
||||
|
||||
# TODO: remove after we run custom scripts through ansible-inventory
|
||||
if self.is_custom and '_meta' not in data or 'hostvars' not in data['_meta']:
|
||||
# Invoke the executable once for each host name we've built up
|
||||
# to set their variables
|
||||
data.setdefault('_meta', {})
|
||||
data['_meta'].setdefault('hostvars', {})
|
||||
logger.warning('Re-calling script for hostvars individually.')
|
||||
for group_name, group_data in data.iteritems():
|
||||
if group_name == '_meta':
|
||||
continue
|
||||
|
||||
if isinstance(group_data, dict):
|
||||
group_host_list = group_data.get('hosts', [])
|
||||
elif isinstance(group_data, list):
|
||||
group_host_list = group_data
|
||||
else:
|
||||
logger.warning('Group data for "%s" is not a dict or list',
|
||||
group_name)
|
||||
group_host_list = []
|
||||
|
||||
for hostname in group_host_list:
|
||||
logger.debug('Obtaining hostvars for %s' % hostname.encode('utf-8'))
|
||||
hostdata = self.command_to_json(
|
||||
base_args + ['--host', hostname.encode("utf-8")]
|
||||
)
|
||||
if isinstance(hostdata, dict):
|
||||
data['_meta']['hostvars'][hostname] = hostdata
|
||||
else:
|
||||
logger.warning(
|
||||
'Expected dict of vars for host "%s" when '
|
||||
'calling with `--host`, got %s instead',
|
||||
k, str(type(data))
|
||||
)
|
||||
|
||||
logger.info('Processing JSON output...')
|
||||
inventory = MemInventory(
|
||||
group_filter_re=self.group_filter_re, host_filter_re=self.host_filter_re)
|
||||
inventory = dict_to_mem_data(data, inventory=inventory)
|
||||
|
||||
return inventory
|
||||
|
||||
|
||||
def load_inventory_source(source, group_filter_re=None,
|
||||
host_filter_re=None, exclude_empty_groups=False,
|
||||
is_custom=False):
|
||||
'''
|
||||
Load inventory from given source directory or file.
|
||||
'''
|
||||
# Sanity check: We sanitize these module names for our API but Ansible proper doesn't follow
|
||||
# good naming conventions
|
||||
source = source.replace('rhv.py', 'ovirt4.py')
|
||||
source = source.replace('satellite6.py', 'foreman.py')
|
||||
source = source.replace('vmware.py', 'vmware_inventory.py')
|
||||
if not os.path.exists(source):
|
||||
raise IOError('Source does not exist: %s' % source)
|
||||
source = os.path.join(os.getcwd(), os.path.dirname(source),
|
||||
os.path.basename(source))
|
||||
source = os.path.normpath(os.path.abspath(source))
|
||||
|
||||
inventory = AnsibleInventoryLoader(
|
||||
source=source,
|
||||
group_filter_re=group_filter_re,
|
||||
host_filter_re=host_filter_re,
|
||||
is_custom=is_custom).load()
|
||||
|
||||
logger.debug('Finished loading from source: %s', source)
|
||||
# Exclude groups that are completely empty.
|
||||
if exclude_empty_groups:
|
||||
inventory.delete_empty_groups()
|
||||
logger.info('Loaded %d groups, %d hosts', len(inventory.all_group.all_groups),
|
||||
len(inventory.all_group.all_hosts))
|
||||
return inventory.all_group
|
||||
return self.command_to_json(base_args + ['--list'])
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
@@ -268,6 +191,8 @@ class Command(BaseCommand):
|
||||
parser.add_argument('--inventory-id', dest='inventory_id', type=int,
|
||||
default=None, metavar='i',
|
||||
help='id of inventory to sync')
|
||||
parser.add_argument('--venv', dest='venv', type=str, default=None,
|
||||
help='absolute path to the AWX custom virtualenv to use')
|
||||
parser.add_argument('--overwrite', dest='overwrite', action='store_true', default=False,
|
||||
help='overwrite the destination hosts and groups')
|
||||
parser.add_argument('--overwrite-vars', dest='overwrite_vars',
|
||||
@@ -347,7 +272,7 @@ class Command(BaseCommand):
|
||||
if enabled is not default:
|
||||
enabled_value = getattr(self, 'enabled_value', None)
|
||||
if enabled_value is not None:
|
||||
enabled = bool(unicode(enabled_value) == unicode(enabled))
|
||||
enabled = bool(str(enabled_value) == str(enabled))
|
||||
else:
|
||||
enabled = bool(enabled)
|
||||
if enabled is default:
|
||||
@@ -357,6 +282,19 @@ class Command(BaseCommand):
|
||||
else:
|
||||
raise NotImplementedError('Value of enabled {} not understood.'.format(enabled))
|
||||
|
||||
def get_source_absolute_path(self, source):
|
||||
# Sanity check: We sanitize these module names for our API but Ansible proper doesn't follow
|
||||
# good naming conventions
|
||||
source = source.replace('rhv.py', 'ovirt4.py')
|
||||
source = source.replace('satellite6.py', 'foreman.py')
|
||||
source = source.replace('vmware.py', 'vmware_inventory.py')
|
||||
if not os.path.exists(source):
|
||||
raise IOError('Source does not exist: %s' % source)
|
||||
source = os.path.join(os.getcwd(), os.path.dirname(source),
|
||||
os.path.basename(source))
|
||||
source = os.path.normpath(os.path.abspath(source))
|
||||
return source
|
||||
|
||||
def load_inventory_from_database(self):
|
||||
'''
|
||||
Load inventory and related objects from the database.
|
||||
@@ -369,9 +307,9 @@ class Command(BaseCommand):
|
||||
try:
|
||||
self.inventory = Inventory.objects.get(**q)
|
||||
except Inventory.DoesNotExist:
|
||||
raise CommandError('Inventory with %s = %s cannot be found' % q.items()[0])
|
||||
raise CommandError('Inventory with %s = %s cannot be found' % list(q.items())[0])
|
||||
except Inventory.MultipleObjectsReturned:
|
||||
raise CommandError('Inventory with %s = %s returned multiple results' % q.items()[0])
|
||||
raise CommandError('Inventory with %s = %s returned multiple results' % list(q.items())[0])
|
||||
logger.info('Updating inventory %d: %s' % (self.inventory.pk,
|
||||
self.inventory.name))
|
||||
|
||||
@@ -471,7 +409,7 @@ class Command(BaseCommand):
|
||||
if self.instance_id_var:
|
||||
all_instance_ids = self.mem_instance_id_map.keys()
|
||||
instance_ids = []
|
||||
for offset in xrange(0, len(all_instance_ids), self._batch_size):
|
||||
for offset in range(0, len(all_instance_ids), self._batch_size):
|
||||
instance_ids = all_instance_ids[offset:(offset + self._batch_size)]
|
||||
for host_pk in hosts_qs.filter(instance_id__in=instance_ids).values_list('pk', flat=True):
|
||||
del_host_pks.discard(host_pk)
|
||||
@@ -479,14 +417,14 @@ class Command(BaseCommand):
|
||||
del_host_pks.discard(host_pk)
|
||||
all_host_names = list(set(self.mem_instance_id_map.values()) - set(self.all_group.all_hosts.keys()))
|
||||
else:
|
||||
all_host_names = self.all_group.all_hosts.keys()
|
||||
for offset in xrange(0, len(all_host_names), self._batch_size):
|
||||
all_host_names = list(self.all_group.all_hosts.keys())
|
||||
for offset in range(0, len(all_host_names), self._batch_size):
|
||||
host_names = all_host_names[offset:(offset + self._batch_size)]
|
||||
for host_pk in hosts_qs.filter(name__in=host_names).values_list('pk', flat=True):
|
||||
del_host_pks.discard(host_pk)
|
||||
# Now delete all remaining hosts in batches.
|
||||
all_del_pks = sorted(list(del_host_pks))
|
||||
for offset in xrange(0, len(all_del_pks), self._batch_size):
|
||||
for offset in range(0, len(all_del_pks), self._batch_size):
|
||||
del_pks = all_del_pks[offset:(offset + self._batch_size)]
|
||||
for host in hosts_qs.filter(pk__in=del_pks):
|
||||
host_name = host.name
|
||||
@@ -509,8 +447,8 @@ class Command(BaseCommand):
|
||||
groups_qs = self.inventory_source.groups.all()
|
||||
# Build list of all group pks, remove those that should not be deleted.
|
||||
del_group_pks = set(groups_qs.values_list('pk', flat=True))
|
||||
all_group_names = self.all_group.all_groups.keys()
|
||||
for offset in xrange(0, len(all_group_names), self._batch_size):
|
||||
all_group_names = list(self.all_group.all_groups.keys())
|
||||
for offset in range(0, len(all_group_names), self._batch_size):
|
||||
group_names = all_group_names[offset:(offset + self._batch_size)]
|
||||
for group_pk in groups_qs.filter(name__in=group_names).values_list('pk', flat=True):
|
||||
del_group_pks.discard(group_pk)
|
||||
@@ -522,7 +460,7 @@ class Command(BaseCommand):
|
||||
del_group_pks.discard(self.inventory_source.deprecated_group_id)
|
||||
# Now delete all remaining groups in batches.
|
||||
all_del_pks = sorted(list(del_group_pks))
|
||||
for offset in xrange(0, len(all_del_pks), self._batch_size):
|
||||
for offset in range(0, len(all_del_pks), self._batch_size):
|
||||
del_pks = all_del_pks[offset:(offset + self._batch_size)]
|
||||
for group in groups_qs.filter(pk__in=del_pks):
|
||||
group_name = group.name
|
||||
@@ -561,7 +499,7 @@ class Command(BaseCommand):
|
||||
for mem_group in mem_children:
|
||||
db_children_name_pk_map.pop(mem_group.name, None)
|
||||
del_child_group_pks = list(set(db_children_name_pk_map.values()))
|
||||
for offset in xrange(0, len(del_child_group_pks), self._batch_size):
|
||||
for offset in range(0, len(del_child_group_pks), self._batch_size):
|
||||
child_group_pks = del_child_group_pks[offset:(offset + self._batch_size)]
|
||||
for db_child in db_children.filter(pk__in=child_group_pks):
|
||||
group_group_count += 1
|
||||
@@ -574,12 +512,12 @@ class Command(BaseCommand):
|
||||
del_host_pks = set(db_hosts.values_list('pk', flat=True))
|
||||
mem_hosts = self.all_group.all_groups[db_group.name].hosts
|
||||
all_mem_host_names = [h.name for h in mem_hosts if not h.instance_id]
|
||||
for offset in xrange(0, len(all_mem_host_names), self._batch_size):
|
||||
for offset in range(0, len(all_mem_host_names), self._batch_size):
|
||||
mem_host_names = all_mem_host_names[offset:(offset + self._batch_size)]
|
||||
for db_host_pk in db_hosts.filter(name__in=mem_host_names).values_list('pk', flat=True):
|
||||
del_host_pks.discard(db_host_pk)
|
||||
all_mem_instance_ids = [h.instance_id for h in mem_hosts if h.instance_id]
|
||||
for offset in xrange(0, len(all_mem_instance_ids), self._batch_size):
|
||||
for offset in range(0, len(all_mem_instance_ids), self._batch_size):
|
||||
mem_instance_ids = all_mem_instance_ids[offset:(offset + self._batch_size)]
|
||||
for db_host_pk in db_hosts.filter(instance_id__in=mem_instance_ids).values_list('pk', flat=True):
|
||||
del_host_pks.discard(db_host_pk)
|
||||
@@ -587,7 +525,7 @@ class Command(BaseCommand):
|
||||
for db_host_pk in all_db_host_pks:
|
||||
del_host_pks.discard(db_host_pk)
|
||||
del_host_pks = list(del_host_pks)
|
||||
for offset in xrange(0, len(del_host_pks), self._batch_size):
|
||||
for offset in range(0, len(del_host_pks), self._batch_size):
|
||||
del_pks = del_host_pks[offset:(offset + self._batch_size)]
|
||||
for db_host in db_hosts.filter(pk__in=del_pks):
|
||||
group_host_count += 1
|
||||
@@ -635,7 +573,7 @@ class Command(BaseCommand):
|
||||
if len(v.parents) == 1 and v.parents[0].name == 'all':
|
||||
root_group_names.add(k)
|
||||
existing_group_names = set()
|
||||
for offset in xrange(0, len(all_group_names), self._batch_size):
|
||||
for offset in range(0, len(all_group_names), self._batch_size):
|
||||
group_names = all_group_names[offset:(offset + self._batch_size)]
|
||||
for group in self.inventory.groups.filter(name__in=group_names):
|
||||
mem_group = self.all_group.all_groups[group.name]
|
||||
@@ -739,7 +677,7 @@ class Command(BaseCommand):
|
||||
mem_host_instance_id_map = {}
|
||||
mem_host_name_map = {}
|
||||
mem_host_names_to_update = set(self.all_group.all_hosts.keys())
|
||||
for k,v in self.all_group.all_hosts.iteritems():
|
||||
for k,v in self.all_group.all_hosts.items():
|
||||
mem_host_name_map[k] = v
|
||||
instance_id = self._get_instance_id(v.variables)
|
||||
if instance_id in self.db_instance_id_map:
|
||||
@@ -749,7 +687,7 @@ class Command(BaseCommand):
|
||||
|
||||
# Update all existing hosts where we know the PK based on instance_id.
|
||||
all_host_pks = sorted(mem_host_pk_map.keys())
|
||||
for offset in xrange(0, len(all_host_pks), self._batch_size):
|
||||
for offset in range(0, len(all_host_pks), self._batch_size):
|
||||
host_pks = all_host_pks[offset:(offset + self._batch_size)]
|
||||
for db_host in self.inventory.hosts.filter( pk__in=host_pks):
|
||||
if db_host.pk in host_pks_updated:
|
||||
@@ -761,7 +699,7 @@ class Command(BaseCommand):
|
||||
|
||||
# Update all existing hosts where we know the instance_id.
|
||||
all_instance_ids = sorted(mem_host_instance_id_map.keys())
|
||||
for offset in xrange(0, len(all_instance_ids), self._batch_size):
|
||||
for offset in range(0, len(all_instance_ids), self._batch_size):
|
||||
instance_ids = all_instance_ids[offset:(offset + self._batch_size)]
|
||||
for db_host in self.inventory.hosts.filter( instance_id__in=instance_ids):
|
||||
if db_host.pk in host_pks_updated:
|
||||
@@ -773,7 +711,7 @@ class Command(BaseCommand):
|
||||
|
||||
# Update all existing hosts by name.
|
||||
all_host_names = sorted(mem_host_name_map.keys())
|
||||
for offset in xrange(0, len(all_host_names), self._batch_size):
|
||||
for offset in range(0, len(all_host_names), self._batch_size):
|
||||
host_names = all_host_names[offset:(offset + self._batch_size)]
|
||||
for db_host in self.inventory.hosts.filter( name__in=host_names):
|
||||
if db_host.pk in host_pks_updated:
|
||||
@@ -815,15 +753,15 @@ class Command(BaseCommand):
|
||||
'''
|
||||
if settings.SQL_DEBUG:
|
||||
queries_before = len(connection.queries)
|
||||
all_group_names = sorted([k for k,v in self.all_group.all_groups.iteritems() if v.children])
|
||||
all_group_names = sorted([k for k,v in self.all_group.all_groups.items() if v.children])
|
||||
group_group_count = 0
|
||||
for offset in xrange(0, len(all_group_names), self._batch_size):
|
||||
for offset in range(0, len(all_group_names), self._batch_size):
|
||||
group_names = all_group_names[offset:(offset + self._batch_size)]
|
||||
for db_group in self.inventory.groups.filter(name__in=group_names):
|
||||
mem_group = self.all_group.all_groups[db_group.name]
|
||||
group_group_count += len(mem_group.children)
|
||||
all_child_names = sorted([g.name for g in mem_group.children])
|
||||
for offset2 in xrange(0, len(all_child_names), self._batch_size):
|
||||
for offset2 in range(0, len(all_child_names), self._batch_size):
|
||||
child_names = all_child_names[offset2:(offset2 + self._batch_size)]
|
||||
db_children_qs = self.inventory.groups.filter(name__in=child_names)
|
||||
for db_child in db_children_qs.filter(children__id=db_group.id):
|
||||
@@ -842,15 +780,15 @@ class Command(BaseCommand):
|
||||
# belongs.
|
||||
if settings.SQL_DEBUG:
|
||||
queries_before = len(connection.queries)
|
||||
all_group_names = sorted([k for k,v in self.all_group.all_groups.iteritems() if v.hosts])
|
||||
all_group_names = sorted([k for k,v in self.all_group.all_groups.items() if v.hosts])
|
||||
group_host_count = 0
|
||||
for offset in xrange(0, len(all_group_names), self._batch_size):
|
||||
for offset in range(0, len(all_group_names), self._batch_size):
|
||||
group_names = all_group_names[offset:(offset + self._batch_size)]
|
||||
for db_group in self.inventory.groups.filter(name__in=group_names):
|
||||
mem_group = self.all_group.all_groups[db_group.name]
|
||||
group_host_count += len(mem_group.hosts)
|
||||
all_host_names = sorted([h.name for h in mem_group.hosts if not h.instance_id])
|
||||
for offset2 in xrange(0, len(all_host_names), self._batch_size):
|
||||
for offset2 in range(0, len(all_host_names), self._batch_size):
|
||||
host_names = all_host_names[offset2:(offset2 + self._batch_size)]
|
||||
db_hosts_qs = self.inventory.hosts.filter(name__in=host_names)
|
||||
for db_host in db_hosts_qs.filter(groups__id=db_group.id):
|
||||
@@ -859,7 +797,7 @@ class Command(BaseCommand):
|
||||
self._batch_add_m2m(db_group.hosts, db_host)
|
||||
logger.debug('Host "%s" added to group "%s"', db_host.name, db_group.name)
|
||||
all_instance_ids = sorted([h.instance_id for h in mem_group.hosts if h.instance_id])
|
||||
for offset2 in xrange(0, len(all_instance_ids), self._batch_size):
|
||||
for offset2 in range(0, len(all_instance_ids), self._batch_size):
|
||||
instance_ids = all_instance_ids[offset2:(offset2 + self._batch_size)]
|
||||
db_hosts_qs = self.inventory.hosts.filter(instance_id__in=instance_ids)
|
||||
for db_host in db_hosts_qs.filter(groups__id=db_group.id):
|
||||
@@ -926,6 +864,7 @@ class Command(BaseCommand):
|
||||
self.set_logging_level()
|
||||
self.inventory_name = options.get('inventory_name', None)
|
||||
self.inventory_id = options.get('inventory_id', None)
|
||||
venv_path = options.get('venv', None)
|
||||
self.overwrite = bool(options.get('overwrite', False))
|
||||
self.overwrite_vars = bool(options.get('overwrite_vars', False))
|
||||
self.keep_vars = bool(options.get('keep_vars', False))
|
||||
@@ -986,12 +925,26 @@ class Command(BaseCommand):
|
||||
self.inventory_update.status = 'running'
|
||||
self.inventory_update.save()
|
||||
|
||||
# Load inventory from source.
|
||||
self.all_group = load_inventory_source(self.source,
|
||||
self.group_filter_re,
|
||||
self.host_filter_re,
|
||||
self.exclude_empty_groups,
|
||||
self.is_custom)
|
||||
source = self.get_source_absolute_path(self.source)
|
||||
|
||||
data = AnsibleInventoryLoader(source=source, is_custom=self.is_custom, venv_path=venv_path).load()
|
||||
|
||||
logger.debug('Finished loading from source: %s', source)
|
||||
logger.info('Processing JSON output...')
|
||||
inventory = MemInventory(
|
||||
group_filter_re=self.group_filter_re, host_filter_re=self.host_filter_re)
|
||||
inventory = dict_to_mem_data(data, inventory=inventory)
|
||||
|
||||
del data # forget dict from import, could be large
|
||||
|
||||
logger.info('Loaded %d groups, %d hosts', len(inventory.all_group.all_groups),
|
||||
len(inventory.all_group.all_hosts))
|
||||
|
||||
if self.exclude_empty_groups:
|
||||
inventory.delete_empty_groups()
|
||||
|
||||
self.all_group = inventory.all_group
|
||||
|
||||
if settings.DEBUG:
|
||||
# depending on inventory source, this output can be
|
||||
# *exceedingly* verbose - crawling a deeply nested
|
||||
@@ -1074,4 +1027,4 @@ class Command(BaseCommand):
|
||||
if exc and isinstance(exc, CommandError):
|
||||
sys.exit(1)
|
||||
elif exc:
|
||||
raise
|
||||
raise exc
|
||||
|
||||
@@ -1,17 +0,0 @@
|
||||
# Copyright (c) 2017 Ansible by Red Hat
|
||||
# All Rights Reserved
|
||||
|
||||
# Borrow from another AWX command
|
||||
from awx.main.management.commands.provision_instance import Command as OtherCommand
|
||||
|
||||
# Python
|
||||
import warnings
|
||||
|
||||
|
||||
class Command(OtherCommand):
|
||||
|
||||
def handle(self, *args, **options):
|
||||
# TODO: delete this entire file in 3.3
|
||||
warnings.warn('This command is replaced with `provision_instance` and will '
|
||||
'be removed in release 3.3.')
|
||||
return super(Command, self).handle(*args, **options)
|
||||
@@ -19,11 +19,11 @@ class InstanceNotFound(Exception):
|
||||
class Command(BaseCommand):
|
||||
|
||||
def add_arguments(self, parser):
|
||||
parser.add_argument('--queuename', dest='queuename', type=lambda s: six.text_type(s, 'utf8'),
|
||||
parser.add_argument('--queuename', dest='queuename', type=str,
|
||||
help='Queue to create/update')
|
||||
parser.add_argument('--hostnames', dest='hostnames', type=lambda s: six.text_type(s, 'utf8'),
|
||||
parser.add_argument('--hostnames', dest='hostnames', type=str,
|
||||
help='Comma-Delimited Hosts to add to the Queue (will not remove already assigned instances)')
|
||||
parser.add_argument('--controller', dest='controller', type=lambda s: six.text_type(s, 'utf8'),
|
||||
parser.add_argument('--controller', dest='controller', type=str,
|
||||
default='', help='The controlling group (makes this an isolated group)')
|
||||
parser.add_argument('--instance_percent', dest='instance_percent', type=int, default=0,
|
||||
help='The percentage of active instances that will be assigned to this group'),
|
||||
|
||||
@@ -4,6 +4,7 @@
|
||||
import sys
|
||||
import time
|
||||
import json
|
||||
import random
|
||||
|
||||
from django.utils import timezone
|
||||
from django.core.management.base import BaseCommand
|
||||
@@ -26,7 +27,21 @@ from awx.api.serializers import (
|
||||
)
|
||||
|
||||
|
||||
class ReplayJobEvents():
|
||||
class JobStatusLifeCycle():
|
||||
def emit_job_status(self, job, status):
|
||||
# {"status": "successful", "project_id": 13, "unified_job_id": 659, "group_name": "jobs"}
|
||||
job.websocket_emit_status(status)
|
||||
|
||||
def determine_job_event_finish_status_index(self, job_event_count, random_seed):
|
||||
if random_seed == 0:
|
||||
return job_event_count - 1
|
||||
|
||||
random.seed(random_seed)
|
||||
job_event_index = random.randint(0, job_event_count - 1)
|
||||
return job_event_index
|
||||
|
||||
|
||||
class ReplayJobEvents(JobStatusLifeCycle):
|
||||
|
||||
recording_start = None
|
||||
replay_start = None
|
||||
@@ -76,9 +91,10 @@ class ReplayJobEvents():
|
||||
job_events = job.inventory_update_events.order_by('created')
|
||||
elif type(job) is SystemJob:
|
||||
job_events = job.system_job_events.order_by('created')
|
||||
if job_events.count() == 0:
|
||||
count = job_events.count()
|
||||
if count == 0:
|
||||
raise RuntimeError("No events for job id {}".format(job.id))
|
||||
return job_events
|
||||
return job_events, count
|
||||
|
||||
def get_serializer(self, job):
|
||||
if type(job) is Job:
|
||||
@@ -95,7 +111,7 @@ class ReplayJobEvents():
|
||||
raise RuntimeError("Job is of type {} and replay is not yet supported.".format(type(job)))
|
||||
sys.exit(1)
|
||||
|
||||
def run(self, job_id, speed=1.0, verbosity=0, skip_range=[]):
|
||||
def run(self, job_id, speed=1.0, verbosity=0, skip_range=[], random_seed=0, final_status_delay=0, debug=False):
|
||||
stats = {
|
||||
'events_ontime': {
|
||||
'total': 0,
|
||||
@@ -119,17 +135,27 @@ class ReplayJobEvents():
|
||||
}
|
||||
try:
|
||||
job = self.get_job(job_id)
|
||||
job_events = self.get_job_events(job)
|
||||
job_events, job_event_count = self.get_job_events(job)
|
||||
serializer = self.get_serializer(job)
|
||||
except RuntimeError as e:
|
||||
print("{}".format(e.message))
|
||||
sys.exit(1)
|
||||
|
||||
je_previous = None
|
||||
|
||||
self.emit_job_status(job, 'pending')
|
||||
self.emit_job_status(job, 'waiting')
|
||||
self.emit_job_status(job, 'running')
|
||||
|
||||
finish_status_index = self.determine_job_event_finish_status_index(job_event_count, random_seed)
|
||||
|
||||
for n, je_current in enumerate(job_events):
|
||||
if je_current.counter in skip_range:
|
||||
continue
|
||||
|
||||
if debug:
|
||||
input("{} of {}:".format(n, job_event_count))
|
||||
|
||||
if not je_previous:
|
||||
stats['recording_start'] = je_current.created
|
||||
self.start(je_current.created)
|
||||
@@ -146,7 +172,7 @@ class ReplayJobEvents():
|
||||
print("recording: next job in {} seconds".format(recording_diff))
|
||||
if replay_offset >= 0:
|
||||
replay_diff = recording_diff - replay_offset
|
||||
|
||||
|
||||
if replay_diff > 0:
|
||||
stats['events_ontime']['total'] += 1
|
||||
if verbosity >= 3:
|
||||
@@ -167,6 +193,11 @@ class ReplayJobEvents():
|
||||
stats['events_total'] += 1
|
||||
je_previous = je_current
|
||||
|
||||
if n == finish_status_index:
|
||||
if final_status_delay != 0:
|
||||
self.sleep(final_status_delay)
|
||||
self.emit_job_status(job, job.status)
|
||||
|
||||
if stats['events_total'] > 2:
|
||||
stats['replay_end'] = self.now()
|
||||
stats['replay_duration'] = (stats['replay_end'] - stats['replay_start']).total_seconds()
|
||||
@@ -206,16 +237,26 @@ class Command(BaseCommand):
|
||||
def add_arguments(self, parser):
|
||||
parser.add_argument('--job_id', dest='job_id', type=int, metavar='j',
|
||||
help='Id of the job to replay (job or adhoc)')
|
||||
parser.add_argument('--speed', dest='speed', type=int, metavar='s',
|
||||
parser.add_argument('--speed', dest='speed', type=float, metavar='s',
|
||||
help='Speedup factor.')
|
||||
parser.add_argument('--skip-range', dest='skip_range', type=str, metavar='k',
|
||||
default='0:-1:1', help='Range of events to skip')
|
||||
parser.add_argument('--random-seed', dest='random_seed', type=int, metavar='r',
|
||||
default=0, help='Random number generator seed to use when determining job_event index to emit final job status')
|
||||
parser.add_argument('--final-status-delay', dest='final_status_delay', type=float, metavar='f',
|
||||
default=0, help='Delay between event and final status emit')
|
||||
parser.add_argument('--debug', dest='debug', type=bool, metavar='d',
|
||||
default=False, help='Enable step mode to control emission of job events one at a time.')
|
||||
|
||||
def handle(self, *args, **options):
|
||||
job_id = options.get('job_id')
|
||||
speed = options.get('speed') or 1
|
||||
verbosity = options.get('verbosity') or 0
|
||||
random_seed = options.get('random_seed')
|
||||
final_status_delay = options.get('final_status_delay')
|
||||
debug = options.get('debug')
|
||||
skip = self._parse_slice_range(options.get('skip_range'))
|
||||
|
||||
replayer = ReplayJobEvents()
|
||||
replayer.run(job_id, speed, verbosity, skip)
|
||||
replayer.run(job_id, speed=speed, verbosity=verbosity, skip_range=skip, random_seed=random_seed,
|
||||
final_status_delay=final_status_delay, debug=debug)
|
||||
|
||||
37
awx/main/management/commands/revoke_oauth2_tokens.py
Normal file
37
awx/main/management/commands/revoke_oauth2_tokens.py
Normal file
@@ -0,0 +1,37 @@
|
||||
# Django
|
||||
from django.core.management.base import BaseCommand, CommandError
|
||||
from django.contrib.auth.models import User
|
||||
from django.core.exceptions import ObjectDoesNotExist
|
||||
|
||||
# AWX
|
||||
from awx.main.models.oauth import OAuth2AccessToken
|
||||
from oauth2_provider.models import RefreshToken
|
||||
|
||||
|
||||
def revoke_tokens(token_list):
|
||||
for token in token_list:
|
||||
token.revoke()
|
||||
print('revoked {} {}'.format(token.__class__.__name__, token.token))
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
"""Command that revokes OAuth2 access tokens."""
|
||||
help='Revokes OAuth2 access tokens. Use --all to revoke access and refresh tokens.'
|
||||
|
||||
def add_arguments(self, parser):
|
||||
parser.add_argument('--user', dest='user', type=str, help='revoke OAuth2 tokens for a specific username')
|
||||
parser.add_argument('--all', dest='all', action='store_true', help='revoke OAuth2 access tokens and refresh tokens')
|
||||
|
||||
def handle(self, *args, **options):
|
||||
if not options['user']:
|
||||
if options['all']:
|
||||
revoke_tokens(RefreshToken.objects.filter(revoked=None))
|
||||
revoke_tokens(OAuth2AccessToken.objects.all())
|
||||
else:
|
||||
try:
|
||||
user = User.objects.get(username=options['user'])
|
||||
except ObjectDoesNotExist:
|
||||
raise CommandError('A user with that username does not exist.')
|
||||
if options['all']:
|
||||
revoke_tokens(RefreshToken.objects.filter(revoked=None).filter(user=user))
|
||||
revoke_tokens(user.main_oauth2accesstoken.filter(user=user))
|
||||
@@ -19,7 +19,7 @@ logger = logging.getLogger('awx.main.dispatch')
|
||||
|
||||
|
||||
def construct_bcast_queue_name(common_name):
|
||||
return common_name.encode('utf8') + '_' + settings.CLUSTER_HOST_ID
|
||||
return common_name + '_' + settings.CLUSTER_HOST_ID
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
@@ -69,21 +69,42 @@ class Command(BaseCommand):
|
||||
|
||||
return TaskResult()
|
||||
|
||||
sched_file = '/var/lib/awx/beat.db'
|
||||
app = Celery()
|
||||
app.conf.BROKER_URL = settings.BROKER_URL
|
||||
app.conf.CELERY_TASK_RESULT_EXPIRES = False
|
||||
|
||||
# celery in py3 seems to have a bug where the celerybeat schedule
|
||||
# shelve can become corrupted; we've _only_ seen this in Ubuntu and py36
|
||||
# it can be avoided by detecting and removing the corrupted file
|
||||
# at some point, we'll just stop using celerybeat, because it's clearly
|
||||
# buggy, too -_-
|
||||
#
|
||||
# https://github.com/celery/celery/issues/4777
|
||||
sched = AWXScheduler(schedule_filename=sched_file, app=app)
|
||||
try:
|
||||
sched.setup_schedule()
|
||||
except Exception:
|
||||
logger.exception('{} is corrupted, removing.'.format(sched_file))
|
||||
sched._remove_db()
|
||||
finally:
|
||||
try:
|
||||
sched.close()
|
||||
except Exception:
|
||||
logger.exception('{} failed to sync/close'.format(sched_file))
|
||||
|
||||
beat.Beat(
|
||||
30,
|
||||
app,
|
||||
schedule='/var/lib/awx/beat.db', scheduler_cls=AWXScheduler
|
||||
schedule=sched_file, scheduler_cls=AWXScheduler
|
||||
).run()
|
||||
|
||||
def handle(self, *arg, **options):
|
||||
if options.get('status'):
|
||||
print Control('dispatcher').status()
|
||||
print(Control('dispatcher').status())
|
||||
return
|
||||
if options.get('running'):
|
||||
print Control('dispatcher').running()
|
||||
print(Control('dispatcher').running())
|
||||
return
|
||||
if options.get('reload'):
|
||||
return Control('dispatcher').control({'control': 'reload'})
|
||||
|
||||
@@ -126,8 +126,9 @@ class SessionTimeoutMiddleware(object):
|
||||
"""
|
||||
|
||||
def process_response(self, request, response):
|
||||
should_skip = 'HTTP_X_WS_SESSION_QUIET' in request.META
|
||||
req_session = getattr(request, 'session', None)
|
||||
if req_session and not req_session.is_empty():
|
||||
if req_session and not req_session.is_empty() and should_skip is False:
|
||||
expiry = int(settings.SESSION_COOKIE_AGE)
|
||||
request.session.set_expiry(expiry)
|
||||
response['Session-Timeout'] = expiry
|
||||
|
||||
@@ -27,7 +27,7 @@ class Migration(migrations.Migration):
|
||||
name='ActivityStream',
|
||||
fields=[
|
||||
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
|
||||
('operation', models.CharField(max_length=13, choices=[(b'create', 'Entity Created'), (b'update', 'Entity Updated'), (b'delete', 'Entity Deleted'), (b'associate', 'Entity Associated with another Entity'), (b'disassociate', 'Entity was Disassociated with another Entity')])),
|
||||
('operation', models.CharField(max_length=13, choices=[('create', 'Entity Created'), ('update', 'Entity Updated'), ('delete', 'Entity Deleted'), ('associate', 'Entity Associated with another Entity'), ('disassociate', 'Entity was Disassociated with another Entity')])),
|
||||
('timestamp', models.DateTimeField(auto_now_add=True)),
|
||||
('changes', models.TextField(blank=True)),
|
||||
('object_relationship_type', models.TextField(blank=True)),
|
||||
@@ -42,8 +42,8 @@ class Migration(migrations.Migration):
|
||||
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
|
||||
('created', models.DateTimeField(default=None, editable=False)),
|
||||
('modified', models.DateTimeField(default=None, editable=False)),
|
||||
('host_name', models.CharField(default=b'', max_length=1024, editable=False)),
|
||||
('event', models.CharField(max_length=100, choices=[(b'runner_on_failed', 'Host Failed'), (b'runner_on_ok', 'Host OK'), (b'runner_on_unreachable', 'Host Unreachable'), (b'runner_on_skipped', 'Host Skipped')])),
|
||||
('host_name', models.CharField(default='', max_length=1024, editable=False)),
|
||||
('event', models.CharField(max_length=100, choices=[('runner_on_failed', 'Host Failed'), ('runner_on_ok', 'Host OK'), ('runner_on_unreachable', 'Host Unreachable'), ('runner_on_skipped', 'Host Skipped')])),
|
||||
('event_data', jsonfield.fields.JSONField(default={}, blank=True)),
|
||||
('failed', models.BooleanField(default=False, editable=False)),
|
||||
('changed', models.BooleanField(default=False, editable=False)),
|
||||
@@ -60,8 +60,8 @@ class Migration(migrations.Migration):
|
||||
('created', models.DateTimeField(auto_now_add=True)),
|
||||
('modified', models.DateTimeField(auto_now=True)),
|
||||
('expires', models.DateTimeField(default=django.utils.timezone.now)),
|
||||
('request_hash', models.CharField(default=b'', max_length=40, blank=True)),
|
||||
('reason', models.CharField(default=b'', help_text='Reason the auth token was invalidated.', max_length=1024, blank=True)),
|
||||
('request_hash', models.CharField(default='', max_length=40, blank=True)),
|
||||
('reason', models.CharField(default='', help_text='Reason the auth token was invalidated.', max_length=1024, blank=True)),
|
||||
('user', models.ForeignKey(related_name='auth_tokens', to=settings.AUTH_USER_MODEL)),
|
||||
],
|
||||
),
|
||||
@@ -71,22 +71,22 @@ class Migration(migrations.Migration):
|
||||
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
|
||||
('created', models.DateTimeField(default=None, editable=False)),
|
||||
('modified', models.DateTimeField(default=None, editable=False)),
|
||||
('description', models.TextField(default=b'', blank=True)),
|
||||
('description', models.TextField(default='', blank=True)),
|
||||
('active', models.BooleanField(default=True, editable=False)),
|
||||
('name', models.CharField(max_length=512)),
|
||||
('kind', models.CharField(default=b'ssh', max_length=32, choices=[(b'ssh', 'Machine'), (b'scm', 'Source Control'), (b'aws', 'Amazon Web Services'), (b'rax', 'Rackspace'), (b'vmware', 'VMware vCenter'), (b'gce', 'Google Compute Engine'), (b'azure', 'Microsoft Azure'), (b'openstack', 'OpenStack')])),
|
||||
('kind', models.CharField(default='ssh', max_length=32, choices=[('ssh', 'Machine'), ('scm', 'Source Control'), ('aws', 'Amazon Web Services'), ('rax', 'Rackspace'), ('vmware', 'VMware vCenter'), ('gce', 'Google Compute Engine'), ('azure', 'Microsoft Azure'), ('openstack', 'OpenStack')])),
|
||||
('cloud', models.BooleanField(default=False, editable=False)),
|
||||
('host', models.CharField(default=b'', help_text='The hostname or IP address to use.', max_length=1024, verbose_name='Host', blank=True)),
|
||||
('username', models.CharField(default=b'', help_text='Username for this credential.', max_length=1024, verbose_name='Username', blank=True)),
|
||||
('password', models.CharField(default=b'', help_text='Password for this credential (or "ASK" to prompt the user for machine credentials).', max_length=1024, verbose_name='Password', blank=True)),
|
||||
('security_token', models.CharField(default=b'', help_text='Security Token for this credential', max_length=1024, verbose_name='Security Token', blank=True)),
|
||||
('project', models.CharField(default=b'', help_text='The identifier for the project.', max_length=100, verbose_name='Project', blank=True)),
|
||||
('ssh_key_data', models.TextField(default=b'', help_text='RSA or DSA private key to be used instead of password.', verbose_name='SSH private key', blank=True)),
|
||||
('ssh_key_unlock', models.CharField(default=b'', help_text='Passphrase to unlock SSH private key if encrypted (or "ASK" to prompt the user for machine credentials).', max_length=1024, verbose_name='SSH key unlock', blank=True)),
|
||||
('become_method', models.CharField(default=b'', help_text='Privilege escalation method.', max_length=32, blank=True, choices=[(b'', 'None'), (b'sudo', 'Sudo'), (b'su', 'Su'), (b'pbrun', 'Pbrun'), (b'pfexec', 'Pfexec')])),
|
||||
('become_username', models.CharField(default=b'', help_text='Privilege escalation username.', max_length=1024, blank=True)),
|
||||
('become_password', models.CharField(default=b'', help_text='Password for privilege escalation method.', max_length=1024, blank=True)),
|
||||
('vault_password', models.CharField(default=b'', help_text='Vault password (or "ASK" to prompt the user).', max_length=1024, blank=True)),
|
||||
('host', models.CharField(default='', help_text='The hostname or IP address to use.', max_length=1024, verbose_name='Host', blank=True)),
|
||||
('username', models.CharField(default='', help_text='Username for this credential.', max_length=1024, verbose_name='Username', blank=True)),
|
||||
('password', models.CharField(default='', help_text='Password for this credential (or "ASK" to prompt the user for machine credentials).', max_length=1024, verbose_name='Password', blank=True)),
|
||||
('security_token', models.CharField(default='', help_text='Security Token for this credential', max_length=1024, verbose_name='Security Token', blank=True)),
|
||||
('project', models.CharField(default='', help_text='The identifier for the project.', max_length=100, verbose_name='Project', blank=True)),
|
||||
('ssh_key_data', models.TextField(default='', help_text='RSA or DSA private key to be used instead of password.', verbose_name='SSH private key', blank=True)),
|
||||
('ssh_key_unlock', models.CharField(default='', help_text='Passphrase to unlock SSH private key if encrypted (or "ASK" to prompt the user for machine credentials).', max_length=1024, verbose_name='SSH key unlock', blank=True)),
|
||||
('become_method', models.CharField(default='', help_text='Privilege escalation method.', max_length=32, blank=True, choices=[('', 'None'), ('sudo', 'Sudo'), ('su', 'Su'), ('pbrun', 'Pbrun'), ('pfexec', 'Pfexec')])),
|
||||
('become_username', models.CharField(default='', help_text='Privilege escalation username.', max_length=1024, blank=True)),
|
||||
('become_password', models.CharField(default='', help_text='Password for privilege escalation method.', max_length=1024, blank=True)),
|
||||
('vault_password', models.CharField(default='', help_text='Vault password (or "ASK" to prompt the user).', max_length=1024, blank=True)),
|
||||
('created_by', models.ForeignKey(related_name="{u'class': 'credential', u'app_label': 'main'}(class)s_created+", on_delete=django.db.models.deletion.SET_NULL, default=None, editable=False, to=settings.AUTH_USER_MODEL, null=True)),
|
||||
('modified_by', models.ForeignKey(related_name="{u'class': 'credential', u'app_label': 'main'}(class)s_modified+", on_delete=django.db.models.deletion.SET_NULL, default=None, editable=False, to=settings.AUTH_USER_MODEL, null=True)),
|
||||
('tags', taggit.managers.TaggableManager(to='taggit.Tag', through='taggit.TaggedItem', blank=True, help_text='A comma-separated list of tags.', verbose_name='Tags')),
|
||||
@@ -101,10 +101,10 @@ class Migration(migrations.Migration):
|
||||
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
|
||||
('created', models.DateTimeField(default=None, editable=False)),
|
||||
('modified', models.DateTimeField(default=None, editable=False)),
|
||||
('description', models.TextField(default=b'', blank=True)),
|
||||
('description', models.TextField(default='', blank=True)),
|
||||
('active', models.BooleanField(default=True, editable=False)),
|
||||
('name', models.CharField(max_length=512)),
|
||||
('script', models.TextField(default=b'', help_text='Inventory script contents', blank=True)),
|
||||
('script', models.TextField(default='', help_text='Inventory script contents', blank=True)),
|
||||
('created_by', models.ForeignKey(related_name="{u'class': 'custominventoryscript', u'app_label': 'main'}(class)s_created+", on_delete=django.db.models.deletion.SET_NULL, default=None, editable=False, to=settings.AUTH_USER_MODEL, null=True)),
|
||||
('modified_by', models.ForeignKey(related_name="{u'class': 'custominventoryscript', u'app_label': 'main'}(class)s_modified+", on_delete=django.db.models.deletion.SET_NULL, default=None, editable=False, to=settings.AUTH_USER_MODEL, null=True)),
|
||||
],
|
||||
@@ -118,10 +118,10 @@ class Migration(migrations.Migration):
|
||||
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
|
||||
('created', models.DateTimeField(default=None, editable=False)),
|
||||
('modified', models.DateTimeField(default=None, editable=False)),
|
||||
('description', models.TextField(default=b'', blank=True)),
|
||||
('description', models.TextField(default='', blank=True)),
|
||||
('active', models.BooleanField(default=True, editable=False)),
|
||||
('name', models.CharField(max_length=512)),
|
||||
('variables', models.TextField(default=b'', help_text='Group variables in JSON or YAML format.', blank=True)),
|
||||
('variables', models.TextField(default='', help_text='Group variables in JSON or YAML format.', blank=True)),
|
||||
('total_hosts', models.PositiveIntegerField(default=0, help_text='Total number of hosts directly or indirectly in this group.', editable=False)),
|
||||
('has_active_failures', models.BooleanField(default=False, help_text='Flag indicating whether this group has any hosts with active failures.', editable=False)),
|
||||
('hosts_with_active_failures', models.PositiveIntegerField(default=0, help_text='Number of hosts in this group with active failures.', editable=False)),
|
||||
@@ -140,12 +140,12 @@ class Migration(migrations.Migration):
|
||||
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
|
||||
('created', models.DateTimeField(default=None, editable=False)),
|
||||
('modified', models.DateTimeField(default=None, editable=False)),
|
||||
('description', models.TextField(default=b'', blank=True)),
|
||||
('description', models.TextField(default='', blank=True)),
|
||||
('active', models.BooleanField(default=True, editable=False)),
|
||||
('name', models.CharField(max_length=512)),
|
||||
('enabled', models.BooleanField(default=True, help_text='Is this host online and available for running jobs?')),
|
||||
('instance_id', models.CharField(default=b'', max_length=100, blank=True)),
|
||||
('variables', models.TextField(default=b'', help_text='Host variables in JSON or YAML format.', blank=True)),
|
||||
('instance_id', models.CharField(default='', max_length=100, blank=True)),
|
||||
('variables', models.TextField(default='', help_text='Host variables in JSON or YAML format.', blank=True)),
|
||||
('has_active_failures', models.BooleanField(default=False, help_text='Flag indicating whether the last job failed for this host.', editable=False)),
|
||||
('has_inventory_sources', models.BooleanField(default=False, help_text='Flag indicating whether this host was created/updated from any external inventory sources.', editable=False)),
|
||||
('created_by', models.ForeignKey(related_name="{u'class': 'host', u'app_label': 'main'}(class)s_created+", on_delete=django.db.models.deletion.SET_NULL, default=None, editable=False, to=settings.AUTH_USER_MODEL, null=True)),
|
||||
@@ -171,10 +171,10 @@ class Migration(migrations.Migration):
|
||||
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
|
||||
('created', models.DateTimeField(default=None, editable=False)),
|
||||
('modified', models.DateTimeField(default=None, editable=False)),
|
||||
('description', models.TextField(default=b'', blank=True)),
|
||||
('description', models.TextField(default='', blank=True)),
|
||||
('active', models.BooleanField(default=True, editable=False)),
|
||||
('name', models.CharField(unique=True, max_length=512)),
|
||||
('variables', models.TextField(default=b'', help_text='Inventory variables in JSON or YAML format.', blank=True)),
|
||||
('variables', models.TextField(default='', help_text='Inventory variables in JSON or YAML format.', blank=True)),
|
||||
('has_active_failures', models.BooleanField(default=False, help_text='Flag indicating whether any hosts in this inventory have failed.', editable=False)),
|
||||
('total_hosts', models.PositiveIntegerField(default=0, help_text='Total number of hosts in this inventory.', editable=False)),
|
||||
('hosts_with_active_failures', models.PositiveIntegerField(default=0, help_text='Number of hosts in this inventory with active failures.', editable=False)),
|
||||
@@ -197,14 +197,14 @@ class Migration(migrations.Migration):
|
||||
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
|
||||
('created', models.DateTimeField(default=None, editable=False)),
|
||||
('modified', models.DateTimeField(default=None, editable=False)),
|
||||
('event', models.CharField(max_length=100, choices=[(b'runner_on_failed', 'Host Failed'), (b'runner_on_ok', 'Host OK'), (b'runner_on_error', 'Host Failure'), (b'runner_on_skipped', 'Host Skipped'), (b'runner_on_unreachable', 'Host Unreachable'), (b'runner_on_no_hosts', 'No Hosts Remaining'), (b'runner_on_async_poll', 'Host Polling'), (b'runner_on_async_ok', 'Host Async OK'), (b'runner_on_async_failed', 'Host Async Failure'), (b'runner_on_file_diff', 'File Difference'), (b'playbook_on_start', 'Playbook Started'), (b'playbook_on_notify', 'Running Handlers'), (b'playbook_on_no_hosts_matched', 'No Hosts Matched'), (b'playbook_on_no_hosts_remaining', 'No Hosts Remaining'), (b'playbook_on_task_start', 'Task Started'), (b'playbook_on_vars_prompt', 'Variables Prompted'), (b'playbook_on_setup', 'Gathering Facts'), (b'playbook_on_import_for_host', 'internal: on Import for Host'), (b'playbook_on_not_import_for_host', 'internal: on Not Import for Host'), (b'playbook_on_play_start', 'Play Started'), (b'playbook_on_stats', 'Playbook Complete')])),
|
||||
('event', models.CharField(max_length=100, choices=[('runner_on_failed', 'Host Failed'), ('runner_on_ok', 'Host OK'), ('runner_on_error', 'Host Failure'), ('runner_on_skipped', 'Host Skipped'), ('runner_on_unreachable', 'Host Unreachable'), ('runner_on_no_hosts', 'No Hosts Remaining'), ('runner_on_async_poll', 'Host Polling'), ('runner_on_async_ok', 'Host Async OK'), ('runner_on_async_failed', 'Host Async Failure'), ('runner_on_file_diff', 'File Difference'), ('playbook_on_start', 'Playbook Started'), ('playbook_on_notify', 'Running Handlers'), ('playbook_on_no_hosts_matched', 'No Hosts Matched'), ('playbook_on_no_hosts_remaining', 'No Hosts Remaining'), ('playbook_on_task_start', 'Task Started'), ('playbook_on_vars_prompt', 'Variables Prompted'), ('playbook_on_setup', 'Gathering Facts'), ('playbook_on_import_for_host', 'internal: on Import for Host'), ('playbook_on_not_import_for_host', 'internal: on Not Import for Host'), ('playbook_on_play_start', 'Play Started'), ('playbook_on_stats', 'Playbook Complete')])),
|
||||
('event_data', jsonfield.fields.JSONField(default={}, blank=True)),
|
||||
('failed', models.BooleanField(default=False, editable=False)),
|
||||
('changed', models.BooleanField(default=False, editable=False)),
|
||||
('host_name', models.CharField(default=b'', max_length=1024, editable=False)),
|
||||
('play', models.CharField(default=b'', max_length=1024, editable=False)),
|
||||
('role', models.CharField(default=b'', max_length=1024, editable=False)),
|
||||
('task', models.CharField(default=b'', max_length=1024, editable=False)),
|
||||
('host_name', models.CharField(default='', max_length=1024, editable=False)),
|
||||
('play', models.CharField(default='', max_length=1024, editable=False)),
|
||||
('role', models.CharField(default='', max_length=1024, editable=False)),
|
||||
('task', models.CharField(default='', max_length=1024, editable=False)),
|
||||
('counter', models.PositiveIntegerField(default=0)),
|
||||
('host', models.ForeignKey(related_name='job_events_as_primary_host', on_delete=django.db.models.deletion.SET_NULL, default=None, editable=False, to='main.Host', null=True)),
|
||||
('hosts', models.ManyToManyField(related_name='job_events', editable=False, to='main.Host')),
|
||||
@@ -220,7 +220,7 @@ class Migration(migrations.Migration):
|
||||
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
|
||||
('created', models.DateTimeField(default=None, editable=False)),
|
||||
('modified', models.DateTimeField(default=None, editable=False)),
|
||||
('host_name', models.CharField(default=b'', max_length=1024, editable=False)),
|
||||
('host_name', models.CharField(default='', max_length=1024, editable=False)),
|
||||
('changed', models.PositiveIntegerField(default=0, editable=False)),
|
||||
('dark', models.PositiveIntegerField(default=0, editable=False)),
|
||||
('failures', models.PositiveIntegerField(default=0, editable=False)),
|
||||
@@ -250,7 +250,7 @@ class Migration(migrations.Migration):
|
||||
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
|
||||
('created', models.DateTimeField(default=None, editable=False)),
|
||||
('modified', models.DateTimeField(default=None, editable=False)),
|
||||
('description', models.TextField(default=b'', blank=True)),
|
||||
('description', models.TextField(default='', blank=True)),
|
||||
('active', models.BooleanField(default=True, editable=False)),
|
||||
('name', models.CharField(unique=True, max_length=512)),
|
||||
('admins', models.ManyToManyField(related_name='admin_of_organizations', to=settings.AUTH_USER_MODEL, blank=True)),
|
||||
@@ -269,10 +269,10 @@ class Migration(migrations.Migration):
|
||||
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
|
||||
('created', models.DateTimeField(default=None, editable=False)),
|
||||
('modified', models.DateTimeField(default=None, editable=False)),
|
||||
('description', models.TextField(default=b'', blank=True)),
|
||||
('description', models.TextField(default='', blank=True)),
|
||||
('active', models.BooleanField(default=True, editable=False)),
|
||||
('name', models.CharField(max_length=512)),
|
||||
('permission_type', models.CharField(max_length=64, choices=[(b'read', 'Read Inventory'), (b'write', 'Edit Inventory'), (b'admin', 'Administrate Inventory'), (b'run', 'Deploy To Inventory'), (b'check', 'Deploy To Inventory (Dry Run)'), (b'scan', 'Scan an Inventory'), (b'create', 'Create a Job Template')])),
|
||||
('permission_type', models.CharField(max_length=64, choices=[('read', 'Read Inventory'), ('write', 'Edit Inventory'), ('admin', 'Administrate Inventory'), ('run', 'Deploy To Inventory'), ('check', 'Deploy To Inventory (Dry Run)'), ('scan', 'Scan an Inventory'), ('create', 'Create a Job Template')])),
|
||||
('run_ad_hoc_commands', models.BooleanField(default=False, help_text='Execute Commands on the Inventory')),
|
||||
('created_by', models.ForeignKey(related_name="{u'class': 'permission', u'app_label': 'main'}(class)s_created+", on_delete=django.db.models.deletion.SET_NULL, default=None, editable=False, to=settings.AUTH_USER_MODEL, null=True)),
|
||||
('inventory', models.ForeignKey(related_name='permissions', on_delete=django.db.models.deletion.SET_NULL, to='main.Inventory', null=True)),
|
||||
@@ -286,7 +286,7 @@ class Migration(migrations.Migration):
|
||||
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
|
||||
('created', models.DateTimeField(default=None, editable=False)),
|
||||
('modified', models.DateTimeField(default=None, editable=False)),
|
||||
('ldap_dn', models.CharField(default=b'', max_length=1024)),
|
||||
('ldap_dn', models.CharField(default='', max_length=1024)),
|
||||
('user', awx.main.fields.AutoOneToOneField(related_name='profile', editable=False, to=settings.AUTH_USER_MODEL)),
|
||||
],
|
||||
),
|
||||
@@ -296,7 +296,7 @@ class Migration(migrations.Migration):
|
||||
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
|
||||
('created', models.DateTimeField(default=None, editable=False)),
|
||||
('modified', models.DateTimeField(default=None, editable=False)),
|
||||
('description', models.TextField(default=b'', blank=True)),
|
||||
('description', models.TextField(default='', blank=True)),
|
||||
('active', models.BooleanField(default=True, editable=False)),
|
||||
('name', models.CharField(unique=True, max_length=512)),
|
||||
('enabled', models.BooleanField(default=True)),
|
||||
@@ -319,7 +319,7 @@ class Migration(migrations.Migration):
|
||||
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
|
||||
('created', models.DateTimeField(default=None, editable=False)),
|
||||
('modified', models.DateTimeField(default=None, editable=False)),
|
||||
('description', models.TextField(default=b'', blank=True)),
|
||||
('description', models.TextField(default='', blank=True)),
|
||||
('active', models.BooleanField(default=True, editable=False)),
|
||||
('name', models.CharField(max_length=512)),
|
||||
('created_by', models.ForeignKey(related_name="{u'class': 'team', u'app_label': 'main'}(class)s_created+", on_delete=django.db.models.deletion.SET_NULL, default=None, editable=False, to=settings.AUTH_USER_MODEL, null=True)),
|
||||
@@ -338,26 +338,26 @@ class Migration(migrations.Migration):
|
||||
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
|
||||
('created', models.DateTimeField(default=None, editable=False)),
|
||||
('modified', models.DateTimeField(default=None, editable=False)),
|
||||
('description', models.TextField(default=b'', blank=True)),
|
||||
('description', models.TextField(default='', blank=True)),
|
||||
('active', models.BooleanField(default=True, editable=False)),
|
||||
('name', models.CharField(max_length=512)),
|
||||
('old_pk', models.PositiveIntegerField(default=None, null=True, editable=False)),
|
||||
('launch_type', models.CharField(default=b'manual', max_length=20, editable=False, choices=[(b'manual', 'Manual'), (b'relaunch', 'Relaunch'), (b'callback', 'Callback'), (b'scheduled', 'Scheduled'), (b'dependency', 'Dependency')])),
|
||||
('launch_type', models.CharField(default='manual', max_length=20, editable=False, choices=[('manual', 'Manual'), ('relaunch', 'Relaunch'), ('callback', 'Callback'), ('scheduled', 'Scheduled'), ('dependency', 'Dependency')])),
|
||||
('cancel_flag', models.BooleanField(default=False, editable=False)),
|
||||
('status', models.CharField(default=b'new', max_length=20, editable=False, choices=[(b'new', 'New'), (b'pending', 'Pending'), (b'waiting', 'Waiting'), (b'running', 'Running'), (b'successful', 'Successful'), (b'failed', 'Failed'), (b'error', 'Error'), (b'canceled', 'Canceled')])),
|
||||
('status', models.CharField(default='new', max_length=20, editable=False, choices=[('new', 'New'), ('pending', 'Pending'), ('waiting', 'Waiting'), ('running', 'Running'), ('successful', 'Successful'), ('failed', 'Failed'), ('error', 'Error'), ('canceled', 'Canceled')])),
|
||||
('failed', models.BooleanField(default=False, editable=False)),
|
||||
('started', models.DateTimeField(default=None, null=True, editable=False)),
|
||||
('finished', models.DateTimeField(default=None, null=True, editable=False)),
|
||||
('elapsed', models.DecimalField(editable=False, max_digits=12, decimal_places=3)),
|
||||
('job_args', models.TextField(default=b'', editable=False, blank=True)),
|
||||
('job_cwd', models.CharField(default=b'', max_length=1024, editable=False, blank=True)),
|
||||
('job_args', models.TextField(default='', editable=False, blank=True)),
|
||||
('job_cwd', models.CharField(default='', max_length=1024, editable=False, blank=True)),
|
||||
('job_env', jsonfield.fields.JSONField(default={}, editable=False, blank=True)),
|
||||
('job_explanation', models.TextField(default=b'', editable=False, blank=True)),
|
||||
('start_args', models.TextField(default=b'', editable=False, blank=True)),
|
||||
('result_stdout_text', models.TextField(default=b'', editable=False, blank=True)),
|
||||
('result_stdout_file', models.TextField(default=b'', editable=False, blank=True)),
|
||||
('result_traceback', models.TextField(default=b'', editable=False, blank=True)),
|
||||
('celery_task_id', models.CharField(default=b'', max_length=100, editable=False, blank=True)),
|
||||
('job_explanation', models.TextField(default='', editable=False, blank=True)),
|
||||
('start_args', models.TextField(default='', editable=False, blank=True)),
|
||||
('result_stdout_text', models.TextField(default='', editable=False, blank=True)),
|
||||
('result_stdout_file', models.TextField(default='', editable=False, blank=True)),
|
||||
('result_traceback', models.TextField(default='', editable=False, blank=True)),
|
||||
('celery_task_id', models.CharField(default='', max_length=100, editable=False, blank=True)),
|
||||
],
|
||||
),
|
||||
migrations.CreateModel(
|
||||
@@ -366,7 +366,7 @@ class Migration(migrations.Migration):
|
||||
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
|
||||
('created', models.DateTimeField(default=None, editable=False)),
|
||||
('modified', models.DateTimeField(default=None, editable=False)),
|
||||
('description', models.TextField(default=b'', blank=True)),
|
||||
('description', models.TextField(default='', blank=True)),
|
||||
('active', models.BooleanField(default=True, editable=False)),
|
||||
('name', models.CharField(max_length=512)),
|
||||
('old_pk', models.PositiveIntegerField(default=None, null=True, editable=False)),
|
||||
@@ -374,19 +374,19 @@ class Migration(migrations.Migration):
|
||||
('last_job_run', models.DateTimeField(default=None, null=True, editable=False)),
|
||||
('has_schedules', models.BooleanField(default=False, editable=False)),
|
||||
('next_job_run', models.DateTimeField(default=None, null=True, editable=False)),
|
||||
('status', models.CharField(default=b'ok', max_length=32, editable=False, choices=[(b'new', 'New'), (b'pending', 'Pending'), (b'waiting', 'Waiting'), (b'running', 'Running'), (b'successful', 'Successful'), (b'failed', 'Failed'), (b'error', 'Error'), (b'canceled', 'Canceled'), (b'never updated', b'Never Updated'), (b'ok', b'OK'), (b'missing', b'Missing'), (b'none', 'No External Source'), (b'updating', 'Updating')])),
|
||||
('status', models.CharField(default='ok', max_length=32, editable=False, choices=[('new', 'New'), ('pending', 'Pending'), ('waiting', 'Waiting'), ('running', 'Running'), ('successful', 'Successful'), ('failed', 'Failed'), ('error', 'Error'), ('canceled', 'Canceled'), ('never updated', 'Never Updated'), ('ok', 'OK'), ('missing', 'Missing'), ('none', 'No External Source'), ('updating', 'Updating')])),
|
||||
],
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name='AdHocCommand',
|
||||
fields=[
|
||||
('unifiedjob_ptr', models.OneToOneField(parent_link=True, auto_created=True, primary_key=True, serialize=False, to='main.UnifiedJob')),
|
||||
('job_type', models.CharField(default=b'run', max_length=64, choices=[(b'run', 'Run'), (b'check', 'Check')])),
|
||||
('limit', models.CharField(default=b'', max_length=1024, blank=True)),
|
||||
('module_name', models.CharField(default=b'', max_length=1024, blank=True)),
|
||||
('module_args', models.TextField(default=b'', blank=True)),
|
||||
('job_type', models.CharField(default='run', max_length=64, choices=[('run', 'Run'), ('check', 'Check')])),
|
||||
('limit', models.CharField(default='', max_length=1024, blank=True)),
|
||||
('module_name', models.CharField(default='', max_length=1024, blank=True)),
|
||||
('module_args', models.TextField(default='', blank=True)),
|
||||
('forks', models.PositiveIntegerField(default=0, blank=True)),
|
||||
('verbosity', models.PositiveIntegerField(default=0, blank=True, choices=[(0, b'0 (Normal)'), (1, b'1 (Verbose)'), (2, b'2 (More Verbose)'), (3, b'3 (Debug)'), (4, b'4 (Connection Debug)'), (5, b'5 (WinRM Debug)')])),
|
||||
('verbosity', models.PositiveIntegerField(default=0, blank=True, choices=[(0, '0 (Normal)'), (1, '1 (Verbose)'), (2, '2 (More Verbose)'), (3, '3 (Debug)'), (4, '4 (Connection Debug)'), (5, '5 (WinRM Debug)')])),
|
||||
('become_enabled', models.BooleanField(default=False)),
|
||||
],
|
||||
bases=('main.unifiedjob',),
|
||||
@@ -395,12 +395,12 @@ class Migration(migrations.Migration):
|
||||
name='InventorySource',
|
||||
fields=[
|
||||
('unifiedjobtemplate_ptr', models.OneToOneField(parent_link=True, auto_created=True, primary_key=True, serialize=False, to='main.UnifiedJobTemplate')),
|
||||
('source', models.CharField(default=b'', max_length=32, blank=True, choices=[(b'', 'Manual'), (b'file', 'Local File, Directory or Script'), (b'rax', 'Rackspace Cloud Servers'), (b'ec2', 'Amazon EC2'), (b'gce', 'Google Compute Engine'), (b'azure', 'Microsoft Azure'), (b'vmware', 'VMware vCenter'), (b'openstack', 'OpenStack'), (b'custom', 'Custom Script')])),
|
||||
('source_path', models.CharField(default=b'', max_length=1024, editable=False, blank=True)),
|
||||
('source_vars', models.TextField(default=b'', help_text='Inventory source variables in YAML or JSON format.', blank=True)),
|
||||
('source_regions', models.CharField(default=b'', max_length=1024, blank=True)),
|
||||
('instance_filters', models.CharField(default=b'', help_text='Comma-separated list of filter expressions (EC2 only). Hosts are imported when ANY of the filters match.', max_length=1024, blank=True)),
|
||||
('group_by', models.CharField(default=b'', help_text='Limit groups automatically created from inventory source (EC2 only).', max_length=1024, blank=True)),
|
||||
('source', models.CharField(default='', max_length=32, blank=True, choices=[('', 'Manual'), ('file', 'Local File, Directory or Script'), ('rax', 'Rackspace Cloud Servers'), ('ec2', 'Amazon EC2'), ('gce', 'Google Compute Engine'), ('azure', 'Microsoft Azure'), ('vmware', 'VMware vCenter'), ('openstack', 'OpenStack'), ('custom', 'Custom Script')])),
|
||||
('source_path', models.CharField(default='', max_length=1024, editable=False, blank=True)),
|
||||
('source_vars', models.TextField(default='', help_text='Inventory source variables in YAML or JSON format.', blank=True)),
|
||||
('source_regions', models.CharField(default='', max_length=1024, blank=True)),
|
||||
('instance_filters', models.CharField(default='', help_text='Comma-separated list of filter expressions (EC2 only). Hosts are imported when ANY of the filters match.', max_length=1024, blank=True)),
|
||||
('group_by', models.CharField(default='', help_text='Limit groups automatically created from inventory source (EC2 only).', max_length=1024, blank=True)),
|
||||
('overwrite', models.BooleanField(default=False, help_text='Overwrite local groups and hosts from remote inventory source.')),
|
||||
('overwrite_vars', models.BooleanField(default=False, help_text='Overwrite local variables from remote inventory source.')),
|
||||
('update_on_launch', models.BooleanField(default=False)),
|
||||
@@ -412,12 +412,12 @@ class Migration(migrations.Migration):
|
||||
name='InventoryUpdate',
|
||||
fields=[
|
||||
('unifiedjob_ptr', models.OneToOneField(parent_link=True, auto_created=True, primary_key=True, serialize=False, to='main.UnifiedJob')),
|
||||
('source', models.CharField(default=b'', max_length=32, blank=True, choices=[(b'', 'Manual'), (b'file', 'Local File, Directory or Script'), (b'rax', 'Rackspace Cloud Servers'), (b'ec2', 'Amazon EC2'), (b'gce', 'Google Compute Engine'), (b'azure', 'Microsoft Azure'), (b'vmware', 'VMware vCenter'), (b'openstack', 'OpenStack'), (b'custom', 'Custom Script')])),
|
||||
('source_path', models.CharField(default=b'', max_length=1024, editable=False, blank=True)),
|
||||
('source_vars', models.TextField(default=b'', help_text='Inventory source variables in YAML or JSON format.', blank=True)),
|
||||
('source_regions', models.CharField(default=b'', max_length=1024, blank=True)),
|
||||
('instance_filters', models.CharField(default=b'', help_text='Comma-separated list of filter expressions (EC2 only). Hosts are imported when ANY of the filters match.', max_length=1024, blank=True)),
|
||||
('group_by', models.CharField(default=b'', help_text='Limit groups automatically created from inventory source (EC2 only).', max_length=1024, blank=True)),
|
||||
('source', models.CharField(default='', max_length=32, blank=True, choices=[('', 'Manual'), ('file', 'Local File, Directory or Script'), ('rax', 'Rackspace Cloud Servers'), ('ec2', 'Amazon EC2'), ('gce', 'Google Compute Engine'), ('azure', 'Microsoft Azure'), ('vmware', 'VMware vCenter'), ('openstack', 'OpenStack'), ('custom', 'Custom Script')])),
|
||||
('source_path', models.CharField(default='', max_length=1024, editable=False, blank=True)),
|
||||
('source_vars', models.TextField(default='', help_text='Inventory source variables in YAML or JSON format.', blank=True)),
|
||||
('source_regions', models.CharField(default='', max_length=1024, blank=True)),
|
||||
('instance_filters', models.CharField(default='', help_text='Comma-separated list of filter expressions (EC2 only). Hosts are imported when ANY of the filters match.', max_length=1024, blank=True)),
|
||||
('group_by', models.CharField(default='', help_text='Limit groups automatically created from inventory source (EC2 only).', max_length=1024, blank=True)),
|
||||
('overwrite', models.BooleanField(default=False, help_text='Overwrite local groups and hosts from remote inventory source.')),
|
||||
('overwrite_vars', models.BooleanField(default=False, help_text='Overwrite local variables from remote inventory source.')),
|
||||
('license_error', models.BooleanField(default=False, editable=False)),
|
||||
@@ -428,16 +428,16 @@ class Migration(migrations.Migration):
|
||||
name='Job',
|
||||
fields=[
|
||||
('unifiedjob_ptr', models.OneToOneField(parent_link=True, auto_created=True, primary_key=True, serialize=False, to='main.UnifiedJob')),
|
||||
('job_type', models.CharField(default=b'run', max_length=64, choices=[(b'run', 'Run'), (b'check', 'Check'), (b'scan', 'Scan')])),
|
||||
('playbook', models.CharField(default=b'', max_length=1024, blank=True)),
|
||||
('job_type', models.CharField(default='run', max_length=64, choices=[('run', 'Run'), ('check', 'Check'), ('scan', 'Scan')])),
|
||||
('playbook', models.CharField(default='', max_length=1024, blank=True)),
|
||||
('forks', models.PositiveIntegerField(default=0, blank=True)),
|
||||
('limit', models.CharField(default=b'', max_length=1024, blank=True)),
|
||||
('verbosity', models.PositiveIntegerField(default=0, blank=True, choices=[(0, b'0 (Normal)'), (1, b'1 (Verbose)'), (2, b'2 (More Verbose)'), (3, b'3 (Debug)'), (4, b'4 (Connection Debug)'), (5, b'5 (WinRM Debug)')])),
|
||||
('extra_vars', models.TextField(default=b'', blank=True)),
|
||||
('job_tags', models.CharField(default=b'', max_length=1024, blank=True)),
|
||||
('limit', models.CharField(default='', max_length=1024, blank=True)),
|
||||
('verbosity', models.PositiveIntegerField(default=0, blank=True, choices=[(0, '0 (Normal)'), (1, '1 (Verbose)'), (2, '2 (More Verbose)'), (3, '3 (Debug)'), (4, '4 (Connection Debug)'), (5, '5 (WinRM Debug)')])),
|
||||
('extra_vars', models.TextField(default='', blank=True)),
|
||||
('job_tags', models.CharField(default='', max_length=1024, blank=True)),
|
||||
('force_handlers', models.BooleanField(default=False)),
|
||||
('skip_tags', models.CharField(default=b'', max_length=1024, blank=True)),
|
||||
('start_at_task', models.CharField(default=b'', max_length=1024, blank=True)),
|
||||
('skip_tags', models.CharField(default='', max_length=1024, blank=True)),
|
||||
('start_at_task', models.CharField(default='', max_length=1024, blank=True)),
|
||||
('become_enabled', models.BooleanField(default=False)),
|
||||
],
|
||||
options={
|
||||
@@ -449,18 +449,18 @@ class Migration(migrations.Migration):
|
||||
name='JobTemplate',
|
||||
fields=[
|
||||
('unifiedjobtemplate_ptr', models.OneToOneField(parent_link=True, auto_created=True, primary_key=True, serialize=False, to='main.UnifiedJobTemplate')),
|
||||
('job_type', models.CharField(default=b'run', max_length=64, choices=[(b'run', 'Run'), (b'check', 'Check'), (b'scan', 'Scan')])),
|
||||
('playbook', models.CharField(default=b'', max_length=1024, blank=True)),
|
||||
('job_type', models.CharField(default='run', max_length=64, choices=[('run', 'Run'), ('check', 'Check'), ('scan', 'Scan')])),
|
||||
('playbook', models.CharField(default='', max_length=1024, blank=True)),
|
||||
('forks', models.PositiveIntegerField(default=0, blank=True)),
|
||||
('limit', models.CharField(default=b'', max_length=1024, blank=True)),
|
||||
('verbosity', models.PositiveIntegerField(default=0, blank=True, choices=[(0, b'0 (Normal)'), (1, b'1 (Verbose)'), (2, b'2 (More Verbose)'), (3, b'3 (Debug)'), (4, b'4 (Connection Debug)'), (5, b'5 (WinRM Debug)')])),
|
||||
('extra_vars', models.TextField(default=b'', blank=True)),
|
||||
('job_tags', models.CharField(default=b'', max_length=1024, blank=True)),
|
||||
('limit', models.CharField(default='', max_length=1024, blank=True)),
|
||||
('verbosity', models.PositiveIntegerField(default=0, blank=True, choices=[(0, '0 (Normal)'), (1, '1 (Verbose)'), (2, '2 (More Verbose)'), (3, '3 (Debug)'), (4, '4 (Connection Debug)'), (5, '5 (WinRM Debug)')])),
|
||||
('extra_vars', models.TextField(default='', blank=True)),
|
||||
('job_tags', models.CharField(default='', max_length=1024, blank=True)),
|
||||
('force_handlers', models.BooleanField(default=False)),
|
||||
('skip_tags', models.CharField(default=b'', max_length=1024, blank=True)),
|
||||
('start_at_task', models.CharField(default=b'', max_length=1024, blank=True)),
|
||||
('skip_tags', models.CharField(default='', max_length=1024, blank=True)),
|
||||
('start_at_task', models.CharField(default='', max_length=1024, blank=True)),
|
||||
('become_enabled', models.BooleanField(default=False)),
|
||||
('host_config_key', models.CharField(default=b'', max_length=1024, blank=True)),
|
||||
('host_config_key', models.CharField(default='', max_length=1024, blank=True)),
|
||||
('ask_variables_on_launch', models.BooleanField(default=False)),
|
||||
('survey_enabled', models.BooleanField(default=False)),
|
||||
('survey_spec', jsonfield.fields.JSONField(default={}, blank=True)),
|
||||
@@ -475,9 +475,9 @@ class Migration(migrations.Migration):
|
||||
fields=[
|
||||
('unifiedjobtemplate_ptr', models.OneToOneField(parent_link=True, auto_created=True, primary_key=True, serialize=False, to='main.UnifiedJobTemplate')),
|
||||
('local_path', models.CharField(help_text='Local path (relative to PROJECTS_ROOT) containing playbooks and related files for this project.', max_length=1024, blank=True)),
|
||||
('scm_type', models.CharField(default=b'', max_length=8, verbose_name='SCM Type', blank=True, choices=[(b'', 'Manual'), (b'git', 'Git'), (b'hg', 'Mercurial'), (b'svn', 'Subversion')])),
|
||||
('scm_url', models.CharField(default=b'', max_length=1024, verbose_name='SCM URL', blank=True)),
|
||||
('scm_branch', models.CharField(default=b'', help_text='Specific branch, tag or commit to checkout.', max_length=256, verbose_name='SCM Branch', blank=True)),
|
||||
('scm_type', models.CharField(default='', max_length=8, verbose_name='SCM Type', blank=True, choices=[('', 'Manual'), ('git', 'Git'), ('hg', 'Mercurial'), ('svn', 'Subversion')])),
|
||||
('scm_url', models.CharField(default='', max_length=1024, verbose_name='SCM URL', blank=True)),
|
||||
('scm_branch', models.CharField(default='', help_text='Specific branch, tag or commit to checkout.', max_length=256, verbose_name='SCM Branch', blank=True)),
|
||||
('scm_clean', models.BooleanField(default=False)),
|
||||
('scm_delete_on_update', models.BooleanField(default=False)),
|
||||
('scm_delete_on_next_update', models.BooleanField(default=False, editable=False)),
|
||||
@@ -494,9 +494,9 @@ class Migration(migrations.Migration):
|
||||
fields=[
|
||||
('unifiedjob_ptr', models.OneToOneField(parent_link=True, auto_created=True, primary_key=True, serialize=False, to='main.UnifiedJob')),
|
||||
('local_path', models.CharField(help_text='Local path (relative to PROJECTS_ROOT) containing playbooks and related files for this project.', max_length=1024, blank=True)),
|
||||
('scm_type', models.CharField(default=b'', max_length=8, verbose_name='SCM Type', blank=True, choices=[(b'', 'Manual'), (b'git', 'Git'), (b'hg', 'Mercurial'), (b'svn', 'Subversion')])),
|
||||
('scm_url', models.CharField(default=b'', max_length=1024, verbose_name='SCM URL', blank=True)),
|
||||
('scm_branch', models.CharField(default=b'', help_text='Specific branch, tag or commit to checkout.', max_length=256, verbose_name='SCM Branch', blank=True)),
|
||||
('scm_type', models.CharField(default='', max_length=8, verbose_name='SCM Type', blank=True, choices=[('', 'Manual'), ('git', 'Git'), ('hg', 'Mercurial'), ('svn', 'Subversion')])),
|
||||
('scm_url', models.CharField(default='', max_length=1024, verbose_name='SCM URL', blank=True)),
|
||||
('scm_branch', models.CharField(default='', help_text='Specific branch, tag or commit to checkout.', max_length=256, verbose_name='SCM Branch', blank=True)),
|
||||
('scm_clean', models.BooleanField(default=False)),
|
||||
('scm_delete_on_update', models.BooleanField(default=False)),
|
||||
],
|
||||
@@ -506,8 +506,8 @@ class Migration(migrations.Migration):
|
||||
name='SystemJob',
|
||||
fields=[
|
||||
('unifiedjob_ptr', models.OneToOneField(parent_link=True, auto_created=True, primary_key=True, serialize=False, to='main.UnifiedJob')),
|
||||
('job_type', models.CharField(default=b'', max_length=32, blank=True, choices=[(b'cleanup_jobs', 'Remove jobs older than a certain number of days'), (b'cleanup_activitystream', 'Remove activity stream entries older than a certain number of days'), (b'cleanup_deleted', 'Purge previously deleted items from the database'), (b'cleanup_facts', 'Purge and/or reduce the granularity of system tracking data')])),
|
||||
('extra_vars', models.TextField(default=b'', blank=True)),
|
||||
('job_type', models.CharField(default='', max_length=32, blank=True, choices=[('cleanup_jobs', 'Remove jobs older than a certain number of days'), ('cleanup_activitystream', 'Remove activity stream entries older than a certain number of days'), ('cleanup_deleted', 'Purge previously deleted items from the database'), ('cleanup_facts', 'Purge and/or reduce the granularity of system tracking data')])),
|
||||
('extra_vars', models.TextField(default='', blank=True)),
|
||||
],
|
||||
options={
|
||||
'ordering': ('id',),
|
||||
@@ -518,7 +518,7 @@ class Migration(migrations.Migration):
|
||||
name='SystemJobTemplate',
|
||||
fields=[
|
||||
('unifiedjobtemplate_ptr', models.OneToOneField(parent_link=True, auto_created=True, primary_key=True, serialize=False, to='main.UnifiedJobTemplate')),
|
||||
('job_type', models.CharField(default=b'', max_length=32, blank=True, choices=[(b'cleanup_jobs', 'Remove jobs older than a certain number of days'), (b'cleanup_activitystream', 'Remove activity stream entries older than a certain number of days'), (b'cleanup_deleted', 'Purge previously deleted items from the database'), (b'cleanup_facts', 'Purge and/or reduce the granularity of system tracking data')])),
|
||||
('job_type', models.CharField(default='', max_length=32, blank=True, choices=[('cleanup_jobs', 'Remove jobs older than a certain number of days'), ('cleanup_activitystream', 'Remove activity stream entries older than a certain number of days'), ('cleanup_deleted', 'Purge previously deleted items from the database'), ('cleanup_facts', 'Purge and/or reduce the granularity of system tracking data')])),
|
||||
],
|
||||
bases=('main.unifiedjobtemplate', models.Model),
|
||||
),
|
||||
|
||||
@@ -105,24 +105,24 @@ def create_system_job_templates(apps, schema_editor):
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
replaces = [(b'main', '0002_v300_tower_settings_changes'),
|
||||
(b'main', '0003_v300_notification_changes'),
|
||||
(b'main', '0004_v300_fact_changes'),
|
||||
(b'main', '0005_v300_migrate_facts'),
|
||||
(b'main', '0006_v300_active_flag_cleanup'),
|
||||
(b'main', '0007_v300_active_flag_removal'),
|
||||
(b'main', '0008_v300_rbac_changes'),
|
||||
(b'main', '0009_v300_rbac_migrations'),
|
||||
(b'main', '0010_v300_create_system_job_templates'),
|
||||
(b'main', '0011_v300_credential_domain_field'),
|
||||
(b'main', '0012_v300_create_labels'),
|
||||
(b'main', '0013_v300_label_changes'),
|
||||
(b'main', '0014_v300_invsource_cred'),
|
||||
(b'main', '0015_v300_label_changes'),
|
||||
(b'main', '0016_v300_prompting_changes'),
|
||||
(b'main', '0017_v300_prompting_migrations'),
|
||||
(b'main', '0018_v300_host_ordering'),
|
||||
(b'main', '0019_v300_new_azure_credential'),]
|
||||
replaces = [('main', '0002_v300_tower_settings_changes'),
|
||||
('main', '0003_v300_notification_changes'),
|
||||
('main', '0004_v300_fact_changes'),
|
||||
('main', '0005_v300_migrate_facts'),
|
||||
('main', '0006_v300_active_flag_cleanup'),
|
||||
('main', '0007_v300_active_flag_removal'),
|
||||
('main', '0008_v300_rbac_changes'),
|
||||
('main', '0009_v300_rbac_migrations'),
|
||||
('main', '0010_v300_create_system_job_templates'),
|
||||
('main', '0011_v300_credential_domain_field'),
|
||||
('main', '0012_v300_create_labels'),
|
||||
('main', '0013_v300_label_changes'),
|
||||
('main', '0014_v300_invsource_cred'),
|
||||
('main', '0015_v300_label_changes'),
|
||||
('main', '0016_v300_prompting_changes'),
|
||||
('main', '0017_v300_prompting_migrations'),
|
||||
('main', '0018_v300_host_ordering'),
|
||||
('main', '0019_v300_new_azure_credential'),]
|
||||
|
||||
dependencies = [
|
||||
('taggit', '0002_auto_20150616_2121'),
|
||||
@@ -143,7 +143,7 @@ class Migration(migrations.Migration):
|
||||
('description', models.TextField()),
|
||||
('category', models.CharField(max_length=128)),
|
||||
('value', models.TextField(blank=True)),
|
||||
('value_type', models.CharField(max_length=12, choices=[(b'string', 'String'), (b'int', 'Integer'), (b'float', 'Decimal'), (b'json', 'JSON'), (b'bool', 'Boolean'), (b'password', 'Password'), (b'list', 'List')])),
|
||||
('value_type', models.CharField(max_length=12, choices=[('string', 'String'), ('int', 'Integer'), ('float', 'Decimal'), ('json', 'JSON'), ('bool', 'Boolean'), ('password', 'Password'), ('list', 'List')])),
|
||||
('user', models.ForeignKey(related_name='settings', default=None, editable=False, to=settings.AUTH_USER_MODEL, null=True)),
|
||||
],
|
||||
),
|
||||
@@ -154,12 +154,12 @@ class Migration(migrations.Migration):
|
||||
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
|
||||
('created', models.DateTimeField(default=None, editable=False)),
|
||||
('modified', models.DateTimeField(default=None, editable=False)),
|
||||
('status', models.CharField(default=b'pending', max_length=20, editable=False, choices=[(b'pending', 'Pending'), (b'successful', 'Successful'), (b'failed', 'Failed')])),
|
||||
('error', models.TextField(default=b'', editable=False, blank=True)),
|
||||
('status', models.CharField(default='pending', max_length=20, editable=False, choices=[('pending', 'Pending'), ('successful', 'Successful'), ('failed', 'Failed')])),
|
||||
('error', models.TextField(default='', editable=False, blank=True)),
|
||||
('notifications_sent', models.IntegerField(default=0, editable=False)),
|
||||
('notification_type', models.CharField(max_length=32, choices=[(b'email', 'Email'), (b'slack', 'Slack'), (b'twilio', 'Twilio'), (b'pagerduty', 'Pagerduty'), (b'hipchat', 'HipChat'), (b'webhook', 'Webhook'), (b'mattermost', 'Mattermost'), (b'rocketchat', 'Rocket.Chat'), (b'irc', 'IRC')])),
|
||||
('recipients', models.TextField(default=b'', editable=False, blank=True)),
|
||||
('subject', models.TextField(default=b'', editable=False, blank=True)),
|
||||
('notification_type', models.CharField(max_length=32, choices=[('email', 'Email'), ('slack', 'Slack'), ('twilio', 'Twilio'), ('pagerduty', 'Pagerduty'), ('hipchat', 'HipChat'), ('webhook', 'Webhook'), ('mattermost', 'Mattermost'), ('rocketchat', 'Rocket.Chat'), ('irc', 'IRC')])),
|
||||
('recipients', models.TextField(default='', editable=False, blank=True)),
|
||||
('subject', models.TextField(default='', editable=False, blank=True)),
|
||||
('body', jsonfield.fields.JSONField(default=dict, blank=True)),
|
||||
],
|
||||
options={
|
||||
@@ -172,9 +172,9 @@ class Migration(migrations.Migration):
|
||||
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
|
||||
('created', models.DateTimeField(default=None, editable=False)),
|
||||
('modified', models.DateTimeField(default=None, editable=False)),
|
||||
('description', models.TextField(default=b'', blank=True)),
|
||||
('description', models.TextField(default='', blank=True)),
|
||||
('name', models.CharField(unique=True, max_length=512)),
|
||||
('notification_type', models.CharField(max_length=32, choices=[(b'email', 'Email'), (b'slack', 'Slack'), (b'twilio', 'Twilio'), (b'pagerduty', 'Pagerduty'), (b'hipchat', 'HipChat'), (b'webhook', 'Webhook'), (b'mattermost', 'Mattermost'), (b'rocketchat', 'Rocket.Chat'), (b'irc', 'IRC')])),
|
||||
('notification_type', models.CharField(max_length=32, choices=[('email', 'Email'), ('slack', 'Slack'), ('twilio', 'Twilio'), ('pagerduty', 'Pagerduty'), ('hipchat', 'HipChat'), ('webhook', 'Webhook'), ('mattermost', 'Mattermost'), ('rocketchat', 'Rocket.Chat'), ('irc', 'IRC')])),
|
||||
('notification_configuration', jsonfield.fields.JSONField(default=dict)),
|
||||
('created_by', models.ForeignKey(related_name="{u'class': 'notificationtemplate', u'app_label': 'main'}(class)s_created+", on_delete=django.db.models.deletion.SET_NULL, default=None, editable=False, to=settings.AUTH_USER_MODEL, null=True)),
|
||||
('modified_by', models.ForeignKey(related_name="{u'class': 'notificationtemplate', u'app_label': 'main'}(class)s_modified+", on_delete=django.db.models.deletion.SET_NULL, default=None, editable=False, to=settings.AUTH_USER_MODEL, null=True)),
|
||||
@@ -381,7 +381,7 @@ class Migration(migrations.Migration):
|
||||
('singleton_name', models.TextField(default=None, unique=True, null=True, db_index=True)),
|
||||
('members', models.ManyToManyField(related_name='roles', to=settings.AUTH_USER_MODEL)),
|
||||
('parents', models.ManyToManyField(related_name='children', to='main.Role')),
|
||||
('implicit_parents', models.TextField(default=b'[]')),
|
||||
('implicit_parents', models.TextField(default='[]')),
|
||||
('content_type', models.ForeignKey(default=None, to='contenttypes.ContentType', null=True)),
|
||||
('object_id', models.PositiveIntegerField(default=None, null=True)),
|
||||
|
||||
@@ -422,122 +422,122 @@ class Migration(migrations.Migration):
|
||||
migrations.AddField(
|
||||
model_name='credential',
|
||||
name='admin_role',
|
||||
field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=[b'singleton:system_administrator'], to='main.Role', null=b'True'),
|
||||
field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=['singleton:system_administrator'], to='main.Role', null='True'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='credential',
|
||||
name='use_role',
|
||||
field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=[b'admin_role'], to='main.Role', null=b'True'),
|
||||
field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=['admin_role'], to='main.Role', null='True'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='credential',
|
||||
name='read_role',
|
||||
field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=[b'singleton:system_auditor', b'organization.auditor_role', b'use_role', b'admin_role'], to='main.Role', null=b'True'),
|
||||
field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=['singleton:system_auditor', 'organization.auditor_role', 'use_role', 'admin_role'], to='main.Role', null='True'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='custominventoryscript',
|
||||
name='admin_role',
|
||||
field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=b'organization.admin_role', to='main.Role', null=b'True'),
|
||||
field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role='organization.admin_role', to='main.Role', null='True'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='custominventoryscript',
|
||||
name='read_role',
|
||||
field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=[b'organization.auditor_role', b'organization.member_role', b'admin_role'], to='main.Role', null=b'True'),
|
||||
field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=['organization.auditor_role', 'organization.member_role', 'admin_role'], to='main.Role', null='True'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='inventory',
|
||||
name='admin_role',
|
||||
field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=b'organization.admin_role', to='main.Role', null=b'True'),
|
||||
field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role='organization.admin_role', to='main.Role', null='True'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='inventory',
|
||||
name='adhoc_role',
|
||||
field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=b'admin_role', to='main.Role', null=b'True'),
|
||||
field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role='admin_role', to='main.Role', null='True'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='inventory',
|
||||
name='update_role',
|
||||
field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=b'admin_role', to='main.Role', null=b'True'),
|
||||
field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role='admin_role', to='main.Role', null='True'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='inventory',
|
||||
name='use_role',
|
||||
field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=b'adhoc_role', to='main.Role', null=b'True'),
|
||||
field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role='adhoc_role', to='main.Role', null='True'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='inventory',
|
||||
name='read_role',
|
||||
field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=[b'organization.auditor_role', b'update_role', b'use_role', b'admin_role'], to='main.Role', null=b'True'),
|
||||
field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=['organization.auditor_role', 'update_role', 'use_role', 'admin_role'], to='main.Role', null='True'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='jobtemplate',
|
||||
name='admin_role',
|
||||
field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=[b'project.organization.admin_role', b'inventory.organization.admin_role'], to='main.Role', null=b'True'),
|
||||
field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=['project.organization.admin_role', 'inventory.organization.admin_role'], to='main.Role', null='True'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='jobtemplate',
|
||||
name='execute_role',
|
||||
field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=[b'admin_role'], to='main.Role', null=b'True'),
|
||||
field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=['admin_role'], to='main.Role', null='True'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='jobtemplate',
|
||||
name='read_role',
|
||||
field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=[b'project.organization.auditor_role', b'inventory.organization.auditor_role', b'execute_role', b'admin_role'], to='main.Role', null=b'True'),
|
||||
field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=['project.organization.auditor_role', 'inventory.organization.auditor_role', 'execute_role', 'admin_role'], to='main.Role', null='True'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='organization',
|
||||
name='admin_role',
|
||||
field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=b'singleton:system_administrator', to='main.Role', null=b'True'),
|
||||
field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role='singleton:system_administrator', to='main.Role', null='True'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='organization',
|
||||
name='auditor_role',
|
||||
field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=b'singleton:system_auditor', to='main.Role', null=b'True'),
|
||||
field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role='singleton:system_auditor', to='main.Role', null='True'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='organization',
|
||||
name='member_role',
|
||||
field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=b'admin_role', to='main.Role', null=b'True'),
|
||||
field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role='admin_role', to='main.Role', null='True'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='organization',
|
||||
name='read_role',
|
||||
field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=[b'member_role', b'auditor_role'], to='main.Role', null=b'True'),
|
||||
field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=['member_role', 'auditor_role'], to='main.Role', null='True'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='project',
|
||||
name='admin_role',
|
||||
field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=[b'organization.admin_role', b'singleton:system_administrator'], to='main.Role', null=b'True'),
|
||||
field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=['organization.admin_role', 'singleton:system_administrator'], to='main.Role', null='True'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='project',
|
||||
name='use_role',
|
||||
field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=b'admin_role', to='main.Role', null=b'True'),
|
||||
field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role='admin_role', to='main.Role', null='True'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='project',
|
||||
name='update_role',
|
||||
field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=b'admin_role', to='main.Role', null=b'True'),
|
||||
field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role='admin_role', to='main.Role', null='True'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='project',
|
||||
name='read_role',
|
||||
field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=[b'organization.auditor_role', b'singleton:system_auditor', b'use_role', b'update_role'], to='main.Role', null=b'True'),
|
||||
field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=['organization.auditor_role', 'singleton:system_auditor', 'use_role', 'update_role'], to='main.Role', null='True'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='team',
|
||||
name='admin_role',
|
||||
field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=b'organization.admin_role', to='main.Role', null=b'True'),
|
||||
field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role='organization.admin_role', to='main.Role', null='True'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='team',
|
||||
name='member_role',
|
||||
field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=None, to='main.Role', null=b'True'),
|
||||
field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=None, to='main.Role', null='True'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='team',
|
||||
name='read_role',
|
||||
field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=[b'admin_role', b'organization.auditor_role', b'member_role'], to='main.Role', null=b'True'),
|
||||
field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=['admin_role', 'organization.auditor_role', 'member_role'], to='main.Role', null='True'),
|
||||
),
|
||||
|
||||
# System Job Templates
|
||||
@@ -545,18 +545,18 @@ class Migration(migrations.Migration):
|
||||
migrations.AlterField(
|
||||
model_name='systemjob',
|
||||
name='job_type',
|
||||
field=models.CharField(default=b'', max_length=32, blank=True, choices=[(b'cleanup_jobs', 'Remove jobs older than a certain number of days'), (b'cleanup_activitystream', 'Remove activity stream entries older than a certain number of days'), (b'cleanup_facts', 'Purge and/or reduce the granularity of system tracking data')]),
|
||||
field=models.CharField(default='', max_length=32, blank=True, choices=[('cleanup_jobs', 'Remove jobs older than a certain number of days'), ('cleanup_activitystream', 'Remove activity stream entries older than a certain number of days'), ('cleanup_facts', 'Purge and/or reduce the granularity of system tracking data')]),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='systemjobtemplate',
|
||||
name='job_type',
|
||||
field=models.CharField(default=b'', max_length=32, blank=True, choices=[(b'cleanup_jobs', 'Remove jobs older than a certain number of days'), (b'cleanup_activitystream', 'Remove activity stream entries older than a certain number of days'), (b'cleanup_facts', 'Purge and/or reduce the granularity of system tracking data')]),
|
||||
field=models.CharField(default='', max_length=32, blank=True, choices=[('cleanup_jobs', 'Remove jobs older than a certain number of days'), ('cleanup_activitystream', 'Remove activity stream entries older than a certain number of days'), ('cleanup_facts', 'Purge and/or reduce the granularity of system tracking data')]),
|
||||
),
|
||||
# Credential domain field
|
||||
migrations.AddField(
|
||||
model_name='credential',
|
||||
name='domain',
|
||||
field=models.CharField(default=b'', help_text='The identifier for the domain.', max_length=100, verbose_name='Domain', blank=True),
|
||||
field=models.CharField(default='', help_text='The identifier for the domain.', max_length=100, verbose_name='Domain', blank=True),
|
||||
),
|
||||
# Create Labels
|
||||
migrations.CreateModel(
|
||||
@@ -565,7 +565,7 @@ class Migration(migrations.Migration):
|
||||
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
|
||||
('created', models.DateTimeField(default=None, editable=False)),
|
||||
('modified', models.DateTimeField(default=None, editable=False)),
|
||||
('description', models.TextField(default=b'', blank=True)),
|
||||
('description', models.TextField(default='', blank=True)),
|
||||
('name', models.CharField(max_length=512)),
|
||||
('created_by', models.ForeignKey(related_name="{u'class': 'label', u'app_label': 'main'}(class)s_created+", on_delete=django.db.models.deletion.SET_NULL, default=None, editable=False, to=settings.AUTH_USER_MODEL, null=True)),
|
||||
('modified_by', models.ForeignKey(related_name="{u'class': 'label', u'app_label': 'main'}(class)s_modified+", on_delete=django.db.models.deletion.SET_NULL, default=None, editable=False, to=settings.AUTH_USER_MODEL, null=True)),
|
||||
@@ -625,7 +625,7 @@ class Migration(migrations.Migration):
|
||||
migrations.AddField(
|
||||
model_name='credential',
|
||||
name='authorize_password',
|
||||
field=models.CharField(default=b'', help_text='Password used by the authorize mechanism.', max_length=1024, blank=True),
|
||||
field=models.CharField(default='', help_text='Password used by the authorize mechanism.', max_length=1024, blank=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='credential',
|
||||
@@ -640,17 +640,17 @@ class Migration(migrations.Migration):
|
||||
migrations.AlterField(
|
||||
model_name='credential',
|
||||
name='kind',
|
||||
field=models.CharField(default=b'ssh', max_length=32, choices=[(b'ssh', 'Machine'), (b'net', 'Network'), (b'scm', 'Source Control'), (b'aws', 'Amazon Web Services'), (b'rax', 'Rackspace'), (b'vmware', 'VMware vCenter'), (b'satellite6', 'Red Hat Satellite 6'), (b'cloudforms', 'Red Hat CloudForms'), (b'gce', 'Google Compute Engine'), (b'azure', 'Microsoft Azure'), (b'openstack', 'OpenStack')]),
|
||||
field=models.CharField(default='ssh', max_length=32, choices=[('ssh', 'Machine'), ('net', 'Network'), ('scm', 'Source Control'), ('aws', 'Amazon Web Services'), ('rax', 'Rackspace'), ('vmware', 'VMware vCenter'), ('satellite6', 'Red Hat Satellite 6'), ('cloudforms', 'Red Hat CloudForms'), ('gce', 'Google Compute Engine'), ('azure', 'Microsoft Azure'), ('openstack', 'OpenStack')]),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='inventorysource',
|
||||
name='source',
|
||||
field=models.CharField(default=b'', max_length=32, blank=True, choices=[(b'', 'Manual'), (b'file', 'Local File, Directory or Script'), (b'rax', 'Rackspace Cloud Servers'), (b'ec2', 'Amazon EC2'), (b'gce', 'Google Compute Engine'), (b'azure', 'Microsoft Azure'), (b'vmware', 'VMware vCenter'), (b'satellite6', 'Red Hat Satellite 6'), (b'cloudforms', 'Red Hat CloudForms'), (b'openstack', 'OpenStack'), (b'custom', 'Custom Script')]),
|
||||
field=models.CharField(default='', max_length=32, blank=True, choices=[('', 'Manual'), ('file', 'Local File, Directory or Script'), ('rax', 'Rackspace Cloud Servers'), ('ec2', 'Amazon EC2'), ('gce', 'Google Compute Engine'), ('azure', 'Microsoft Azure'), ('vmware', 'VMware vCenter'), ('satellite6', 'Red Hat Satellite 6'), ('cloudforms', 'Red Hat CloudForms'), ('openstack', 'OpenStack'), ('custom', 'Custom Script')]),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='inventoryupdate',
|
||||
name='source',
|
||||
field=models.CharField(default=b'', max_length=32, blank=True, choices=[(b'', 'Manual'), (b'file', 'Local File, Directory or Script'), (b'rax', 'Rackspace Cloud Servers'), (b'ec2', 'Amazon EC2'), (b'gce', 'Google Compute Engine'), (b'azure', 'Microsoft Azure'), (b'vmware', 'VMware vCenter'), (b'satellite6', 'Red Hat Satellite 6'), (b'cloudforms', 'Red Hat CloudForms'), (b'openstack', 'OpenStack'), (b'custom', 'Custom Script')]),
|
||||
field=models.CharField(default='', max_length=32, blank=True, choices=[('', 'Manual'), ('file', 'Local File, Directory or Script'), ('rax', 'Rackspace Cloud Servers'), ('ec2', 'Amazon EC2'), ('gce', 'Google Compute Engine'), ('azure', 'Microsoft Azure'), ('vmware', 'VMware vCenter'), ('satellite6', 'Red Hat Satellite 6'), ('cloudforms', 'Red Hat CloudForms'), ('openstack', 'OpenStack'), ('custom', 'Custom Script')]),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='team',
|
||||
@@ -702,41 +702,41 @@ class Migration(migrations.Migration):
|
||||
migrations.AddField(
|
||||
model_name='credential',
|
||||
name='client',
|
||||
field=models.CharField(default=b'', help_text='Client Id or Application Id for the credential', max_length=128, blank=True),
|
||||
field=models.CharField(default='', help_text='Client Id or Application Id for the credential', max_length=128, blank=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='credential',
|
||||
name='secret',
|
||||
field=models.CharField(default=b'', help_text='Secret Token for this credential', max_length=1024, blank=True),
|
||||
field=models.CharField(default='', help_text='Secret Token for this credential', max_length=1024, blank=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='credential',
|
||||
name='subscription',
|
||||
field=models.CharField(default=b'', help_text='Subscription identifier for this credential', max_length=1024, blank=True),
|
||||
field=models.CharField(default='', help_text='Subscription identifier for this credential', max_length=1024, blank=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='credential',
|
||||
name='tenant',
|
||||
field=models.CharField(default=b'', help_text='Tenant identifier for this credential', max_length=1024, blank=True),
|
||||
field=models.CharField(default='', help_text='Tenant identifier for this credential', max_length=1024, blank=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='credential',
|
||||
name='kind',
|
||||
field=models.CharField(default=b'ssh', max_length=32, choices=[(b'ssh', 'Machine'), (b'net', 'Network'), (b'scm', 'Source Control'), (b'aws', 'Amazon Web Services'), (b'rax', 'Rackspace'), (b'vmware', 'VMware vCenter'), (b'satellite6', 'Satellite 6'), (b'cloudforms', 'CloudForms'), (b'gce', 'Google Compute Engine'), (b'azure', 'Microsoft Azure Classic (deprecated)'), (b'azure_rm', 'Microsoft Azure Resource Manager'), (b'openstack', 'OpenStack')]),
|
||||
field=models.CharField(default='ssh', max_length=32, choices=[('ssh', 'Machine'), ('net', 'Network'), ('scm', 'Source Control'), ('aws', 'Amazon Web Services'), ('rax', 'Rackspace'), ('vmware', 'VMware vCenter'), ('satellite6', 'Satellite 6'), ('cloudforms', 'CloudForms'), ('gce', 'Google Compute Engine'), ('azure', 'Microsoft Azure Classic (deprecated)'), ('azure_rm', 'Microsoft Azure Resource Manager'), ('openstack', 'OpenStack')]),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='host',
|
||||
name='instance_id',
|
||||
field=models.CharField(default=b'', max_length=1024, blank=True),
|
||||
field=models.CharField(default='', max_length=1024, blank=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='inventorysource',
|
||||
name='source',
|
||||
field=models.CharField(default=b'', max_length=32, blank=True, choices=[(b'', 'Manual'), (b'file', 'Local File, Directory or Script'), (b'rax', 'Rackspace Cloud Servers'), (b'ec2', 'Amazon EC2'), (b'gce', 'Google Compute Engine'), (b'azure', 'Microsoft Azure Classic (deprecated)'), (b'azure_rm', 'Microsoft Azure Resource Manager'), (b'vmware', 'VMware vCenter'), (b'satellite6', 'Satellite 6'), (b'cloudforms', 'CloudForms'), (b'openstack', 'OpenStack'), (b'custom', 'Custom Script')]),
|
||||
field=models.CharField(default='', max_length=32, blank=True, choices=[('', 'Manual'), ('file', 'Local File, Directory or Script'), ('rax', 'Rackspace Cloud Servers'), ('ec2', 'Amazon EC2'), ('gce', 'Google Compute Engine'), ('azure', 'Microsoft Azure Classic (deprecated)'), ('azure_rm', 'Microsoft Azure Resource Manager'), ('vmware', 'VMware vCenter'), ('satellite6', 'Satellite 6'), ('cloudforms', 'CloudForms'), ('openstack', 'OpenStack'), ('custom', 'Custom Script')]),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='inventoryupdate',
|
||||
name='source',
|
||||
field=models.CharField(default=b'', max_length=32, blank=True, choices=[(b'', 'Manual'), (b'file', 'Local File, Directory or Script'), (b'rax', 'Rackspace Cloud Servers'), (b'ec2', 'Amazon EC2'), (b'gce', 'Google Compute Engine'), (b'azure', 'Microsoft Azure Classic (deprecated)'), (b'azure_rm', 'Microsoft Azure Resource Manager'), (b'vmware', 'VMware vCenter'), (b'satellite6', 'Satellite 6'), (b'cloudforms', 'CloudForms'), (b'openstack', 'OpenStack'), (b'custom', 'Custom Script')]),
|
||||
field=models.CharField(default='', max_length=32, blank=True, choices=[('', 'Manual'), ('file', 'Local File, Directory or Script'), ('rax', 'Rackspace Cloud Servers'), ('ec2', 'Amazon EC2'), ('gce', 'Google Compute Engine'), ('azure', 'Microsoft Azure Classic (deprecated)'), ('azure_rm', 'Microsoft Azure Resource Manager'), ('vmware', 'VMware vCenter'), ('satellite6', 'Satellite 6'), ('cloudforms', 'CloudForms'), ('openstack', 'OpenStack'), ('custom', 'Custom Script')]),
|
||||
),
|
||||
]
|
||||
|
||||
@@ -9,20 +9,20 @@ from django.db import migrations, models
|
||||
from django.conf import settings
|
||||
import awx.main.fields
|
||||
|
||||
import _squashed
|
||||
from _squashed_30 import SQUASHED_30
|
||||
from . import _squashed
|
||||
from ._squashed_30 import SQUASHED_30
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
replaces = [(b'main', '0020_v300_labels_changes'),
|
||||
(b'main', '0021_v300_activity_stream'),
|
||||
(b'main', '0022_v300_adhoc_extravars'),
|
||||
(b'main', '0023_v300_activity_stream_ordering'),
|
||||
(b'main', '0024_v300_jobtemplate_allow_simul'),
|
||||
(b'main', '0025_v300_update_rbac_parents'),
|
||||
(b'main', '0026_v300_credential_unique'),
|
||||
(b'main', '0027_v300_team_migrations'),
|
||||
(b'main', '0028_v300_org_team_cascade')] + _squashed.replaces(SQUASHED_30, applied=True)
|
||||
replaces = [('main', '0020_v300_labels_changes'),
|
||||
('main', '0021_v300_activity_stream'),
|
||||
('main', '0022_v300_adhoc_extravars'),
|
||||
('main', '0023_v300_activity_stream_ordering'),
|
||||
('main', '0024_v300_jobtemplate_allow_simul'),
|
||||
('main', '0025_v300_update_rbac_parents'),
|
||||
('main', '0026_v300_credential_unique'),
|
||||
('main', '0027_v300_team_migrations'),
|
||||
('main', '0028_v300_org_team_cascade')] + _squashed.replaces(SQUASHED_30, applied=True)
|
||||
|
||||
dependencies = [
|
||||
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
|
||||
@@ -63,22 +63,22 @@ class Migration(migrations.Migration):
|
||||
migrations.AddField(
|
||||
model_name='adhoccommand',
|
||||
name='extra_vars',
|
||||
field=models.TextField(default=b'', blank=True),
|
||||
field=models.TextField(default='', blank=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='credential',
|
||||
name='kind',
|
||||
field=models.CharField(default=b'ssh', max_length=32, choices=[(b'ssh', 'Machine'), (b'net', 'Network'), (b'scm', 'Source Control'), (b'aws', 'Amazon Web Services'), (b'rax', 'Rackspace'), (b'vmware', 'VMware vCenter'), (b'satellite6', 'Red Hat Satellite 6'), (b'cloudforms', 'Red Hat CloudForms'), (b'gce', 'Google Compute Engine'), (b'azure', 'Microsoft Azure Classic (deprecated)'), (b'azure_rm', 'Microsoft Azure Resource Manager'), (b'openstack', 'OpenStack')]),
|
||||
field=models.CharField(default='ssh', max_length=32, choices=[('ssh', 'Machine'), ('net', 'Network'), ('scm', 'Source Control'), ('aws', 'Amazon Web Services'), ('rax', 'Rackspace'), ('vmware', 'VMware vCenter'), ('satellite6', 'Red Hat Satellite 6'), ('cloudforms', 'Red Hat CloudForms'), ('gce', 'Google Compute Engine'), ('azure', 'Microsoft Azure Classic (deprecated)'), ('azure_rm', 'Microsoft Azure Resource Manager'), ('openstack', 'OpenStack')]),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='inventorysource',
|
||||
name='source',
|
||||
field=models.CharField(default=b'', max_length=32, blank=True, choices=[(b'', 'Manual'), (b'file', 'Local File, Directory or Script'), (b'rax', 'Rackspace Cloud Servers'), (b'ec2', 'Amazon EC2'), (b'gce', 'Google Compute Engine'), (b'azure', 'Microsoft Azure Classic (deprecated)'), (b'azure_rm', 'Microsoft Azure Resource Manager'), (b'vmware', 'VMware vCenter'), (b'satellite6', 'Red Hat Satellite 6'), (b'cloudforms', 'Red Hat CloudForms'), (b'openstack', 'OpenStack'), (b'custom', 'Custom Script')]),
|
||||
field=models.CharField(default='', max_length=32, blank=True, choices=[('', 'Manual'), ('file', 'Local File, Directory or Script'), ('rax', 'Rackspace Cloud Servers'), ('ec2', 'Amazon EC2'), ('gce', 'Google Compute Engine'), ('azure', 'Microsoft Azure Classic (deprecated)'), ('azure_rm', 'Microsoft Azure Resource Manager'), ('vmware', 'VMware vCenter'), ('satellite6', 'Red Hat Satellite 6'), ('cloudforms', 'Red Hat CloudForms'), ('openstack', 'OpenStack'), ('custom', 'Custom Script')]),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='inventoryupdate',
|
||||
name='source',
|
||||
field=models.CharField(default=b'', max_length=32, blank=True, choices=[(b'', 'Manual'), (b'file', 'Local File, Directory or Script'), (b'rax', 'Rackspace Cloud Servers'), (b'ec2', 'Amazon EC2'), (b'gce', 'Google Compute Engine'), (b'azure', 'Microsoft Azure Classic (deprecated)'), (b'azure_rm', 'Microsoft Azure Resource Manager'), (b'vmware', 'VMware vCenter'), (b'satellite6', 'Red Hat Satellite 6'), (b'cloudforms', 'Red Hat CloudForms'), (b'openstack', 'OpenStack'), (b'custom', 'Custom Script')]),
|
||||
field=models.CharField(default='', max_length=32, blank=True, choices=[('', 'Manual'), ('file', 'Local File, Directory or Script'), ('rax', 'Rackspace Cloud Servers'), ('ec2', 'Amazon EC2'), ('gce', 'Google Compute Engine'), ('azure', 'Microsoft Azure Classic (deprecated)'), ('azure_rm', 'Microsoft Azure Resource Manager'), ('vmware', 'VMware vCenter'), ('satellite6', 'Red Hat Satellite 6'), ('cloudforms', 'Red Hat CloudForms'), ('openstack', 'OpenStack'), ('custom', 'Custom Script')]),
|
||||
),
|
||||
# jobtemplate allow simul
|
||||
migrations.AddField(
|
||||
@@ -90,17 +90,17 @@ class Migration(migrations.Migration):
|
||||
migrations.AlterField(
|
||||
model_name='credential',
|
||||
name='use_role',
|
||||
field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=[b'organization.admin_role', b'admin_role'], to='main.Role', null=b'True'),
|
||||
field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=['organization.admin_role', 'admin_role'], to='main.Role', null='True'),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='team',
|
||||
name='member_role',
|
||||
field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=b'admin_role', to='main.Role', null=b'True'),
|
||||
field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role='admin_role', to='main.Role', null='True'),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='team',
|
||||
name='read_role',
|
||||
field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=[b'organization.auditor_role', b'member_role'], to='main.Role', null=b'True'),
|
||||
field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=['organization.auditor_role', 'member_role'], to='main.Role', null='True'),
|
||||
),
|
||||
# Unique credential
|
||||
migrations.AlterUniqueTogether(
|
||||
@@ -110,7 +110,7 @@ class Migration(migrations.Migration):
|
||||
migrations.AlterField(
|
||||
model_name='credential',
|
||||
name='read_role',
|
||||
field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=[b'singleton:system_auditor', b'organization.auditor_role', b'use_role', b'admin_role'], to='main.Role', null=b'True'),
|
||||
field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=['singleton:system_auditor', 'organization.auditor_role', 'use_role', 'admin_role'], to='main.Role', null='True'),
|
||||
),
|
||||
# Team cascade
|
||||
migrations.AlterField(
|
||||
|
||||
@@ -8,8 +8,8 @@ import django.db.models.deletion
|
||||
import awx.main.models.workflow
|
||||
import awx.main.fields
|
||||
|
||||
import _squashed
|
||||
from _squashed_30 import SQUASHED_30
|
||||
from . import _squashed
|
||||
from ._squashed_30 import SQUASHED_30
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
@@ -19,7 +19,7 @@ class Migration(migrations.Migration):
|
||||
]
|
||||
|
||||
replaces = _squashed.replaces(SQUASHED_30) + [
|
||||
(b'main', '0034_v310_release'),
|
||||
('main', '0034_v310_release'),
|
||||
]
|
||||
|
||||
operations = _squashed.operations(SQUASHED_30) + [
|
||||
@@ -42,13 +42,13 @@ class Migration(migrations.Migration):
|
||||
migrations.AddField(
|
||||
model_name='jobevent',
|
||||
name='uuid',
|
||||
field=models.CharField(default=b'', max_length=1024, editable=False),
|
||||
field=models.CharField(default='', max_length=1024, editable=False),
|
||||
),
|
||||
# Job Parent Event UUID
|
||||
migrations.AddField(
|
||||
model_name='jobevent',
|
||||
name='parent_uuid',
|
||||
field=models.CharField(default=b'', max_length=1024, editable=False),
|
||||
field=models.CharField(default='', max_length=1024, editable=False),
|
||||
),
|
||||
# Modify the HA Instance
|
||||
migrations.RemoveField(
|
||||
@@ -63,19 +63,19 @@ class Migration(migrations.Migration):
|
||||
migrations.AlterField(
|
||||
model_name='credential',
|
||||
name='become_method',
|
||||
field=models.CharField(default=b'', help_text='Privilege escalation method.', max_length=32, blank=True, choices=[(b'', 'None'), (b'sudo', 'Sudo'), (b'su', 'Su'), (b'pbrun', 'Pbrun'), (b'pfexec', 'Pfexec'), (b'dzdo', 'DZDO'), (b'pmrun', 'Pmrun')]),
|
||||
field=models.CharField(default='', help_text='Privilege escalation method.', max_length=32, blank=True, choices=[('', 'None'), ('sudo', 'Sudo'), ('su', 'Su'), ('pbrun', 'Pbrun'), ('pfexec', 'Pfexec'), ('dzdo', 'DZDO'), ('pmrun', 'Pmrun')]),
|
||||
),
|
||||
# Add Workflows
|
||||
migrations.AlterField(
|
||||
model_name='unifiedjob',
|
||||
name='launch_type',
|
||||
field=models.CharField(default=b'manual', max_length=20, editable=False, choices=[(b'manual', 'Manual'), (b'relaunch', 'Relaunch'), (b'callback', 'Callback'), (b'scheduled', 'Scheduled'), (b'dependency', 'Dependency'), (b'workflow', 'Workflow'), (b'sync', 'Sync')]),
|
||||
field=models.CharField(default='manual', max_length=20, editable=False, choices=[('manual', 'Manual'), ('relaunch', 'Relaunch'), ('callback', 'Callback'), ('scheduled', 'Scheduled'), ('dependency', 'Dependency'), ('workflow', 'Workflow'), ('sync', 'Sync')]),
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name='WorkflowJob',
|
||||
fields=[
|
||||
('unifiedjob_ptr', models.OneToOneField(parent_link=True, auto_created=True, primary_key=True, serialize=False, to='main.UnifiedJob')),
|
||||
('extra_vars', models.TextField(default=b'', blank=True)),
|
||||
('extra_vars', models.TextField(default='', blank=True)),
|
||||
],
|
||||
options={
|
||||
'ordering': ('id',),
|
||||
@@ -101,8 +101,8 @@ class Migration(migrations.Migration):
|
||||
name='WorkflowJobTemplate',
|
||||
fields=[
|
||||
('unifiedjobtemplate_ptr', models.OneToOneField(parent_link=True, auto_created=True, primary_key=True, serialize=False, to='main.UnifiedJobTemplate')),
|
||||
('extra_vars', models.TextField(default=b'', blank=True)),
|
||||
('admin_role', awx.main.fields.ImplicitRoleField(related_name='+', parent_role=b'singleton:system_administrator', to='main.Role', null=b'True')),
|
||||
('extra_vars', models.TextField(default='', blank=True)),
|
||||
('admin_role', awx.main.fields.ImplicitRoleField(related_name='+', parent_role='singleton:system_administrator', to='main.Role', null='True')),
|
||||
],
|
||||
bases=('main.unifiedjobtemplate', models.Model),
|
||||
),
|
||||
@@ -176,7 +176,7 @@ class Migration(migrations.Migration):
|
||||
migrations.AddField(
|
||||
model_name='workflowjobtemplate',
|
||||
name='execute_role',
|
||||
field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=[b'admin_role'], to='main.Role', null=b'True'),
|
||||
field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=['admin_role'], to='main.Role', null='True'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='workflowjobtemplate',
|
||||
@@ -186,7 +186,7 @@ class Migration(migrations.Migration):
|
||||
migrations.AddField(
|
||||
model_name='workflowjobtemplate',
|
||||
name='read_role',
|
||||
field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=[b'singleton:system_auditor', b'organization.auditor_role', b'execute_role', b'admin_role'], to='main.Role', null=b'True'),
|
||||
field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=['singleton:system_auditor', 'organization.auditor_role', 'execute_role', 'admin_role'], to='main.Role', null='True'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='workflowjobtemplatenode',
|
||||
@@ -216,7 +216,7 @@ class Migration(migrations.Migration):
|
||||
migrations.AlterField(
|
||||
model_name='workflowjobtemplate',
|
||||
name='admin_role',
|
||||
field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=[b'singleton:system_administrator', b'organization.admin_role'], to='main.Role', null=b'True'),
|
||||
field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=['singleton:system_administrator', 'organization.admin_role'], to='main.Role', null='True'),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='workflowjobtemplatenode',
|
||||
@@ -269,23 +269,23 @@ class Migration(migrations.Migration):
|
||||
migrations.AddField(
|
||||
model_name='unifiedjob',
|
||||
name='execution_node',
|
||||
field=models.TextField(default=b'', editable=False, blank=True),
|
||||
field=models.TextField(default='', editable=False, blank=True),
|
||||
),
|
||||
# SCM Revision
|
||||
migrations.AddField(
|
||||
model_name='project',
|
||||
name='scm_revision',
|
||||
field=models.CharField(default=b'', editable=False, max_length=1024, blank=True, help_text='The last revision fetched by a project update', verbose_name='SCM Revision'),
|
||||
field=models.CharField(default='', editable=False, max_length=1024, blank=True, help_text='The last revision fetched by a project update', verbose_name='SCM Revision'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='projectupdate',
|
||||
name='job_type',
|
||||
field=models.CharField(default=b'check', max_length=64, choices=[(b'run', 'Run'), (b'check', 'Check')]),
|
||||
field=models.CharField(default='check', max_length=64, choices=[('run', 'Run'), ('check', 'Check')]),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='job',
|
||||
name='scm_revision',
|
||||
field=models.CharField(default=b'', editable=False, max_length=1024, blank=True, help_text='The SCM Revision from the Project used for this job, if available', verbose_name='SCM Revision'),
|
||||
field=models.CharField(default='', editable=False, max_length=1024, blank=True, help_text='The SCM Revision from the Project used for this job, if available', verbose_name='SCM Revision'),
|
||||
),
|
||||
# Project Playbook Files
|
||||
migrations.AddField(
|
||||
@@ -307,12 +307,12 @@ class Migration(migrations.Migration):
|
||||
migrations.AddField(
|
||||
model_name='adhoccommandevent',
|
||||
name='stdout',
|
||||
field=models.TextField(default=b'', editable=False),
|
||||
field=models.TextField(default='', editable=False),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='adhoccommandevent',
|
||||
name='uuid',
|
||||
field=models.CharField(default=b'', max_length=1024, editable=False),
|
||||
field=models.CharField(default='', max_length=1024, editable=False),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='adhoccommandevent',
|
||||
@@ -327,7 +327,7 @@ class Migration(migrations.Migration):
|
||||
migrations.AddField(
|
||||
model_name='jobevent',
|
||||
name='playbook',
|
||||
field=models.CharField(default=b'', max_length=1024, editable=False),
|
||||
field=models.CharField(default='', max_length=1024, editable=False),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='jobevent',
|
||||
@@ -337,7 +337,7 @@ class Migration(migrations.Migration):
|
||||
migrations.AddField(
|
||||
model_name='jobevent',
|
||||
name='stdout',
|
||||
field=models.TextField(default=b'', editable=False),
|
||||
field=models.TextField(default='', editable=False),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='jobevent',
|
||||
@@ -352,7 +352,7 @@ class Migration(migrations.Migration):
|
||||
migrations.AlterField(
|
||||
model_name='adhoccommandevent',
|
||||
name='event',
|
||||
field=models.CharField(max_length=100, choices=[(b'runner_on_failed', 'Host Failed'), (b'runner_on_ok', 'Host OK'), (b'runner_on_unreachable', 'Host Unreachable'), (b'runner_on_skipped', 'Host Skipped'), (b'debug', 'Debug'), (b'verbose', 'Verbose'), (b'deprecated', 'Deprecated'), (b'warning', 'Warning'), (b'system_warning', 'System Warning'), (b'error', 'Error')]),
|
||||
field=models.CharField(max_length=100, choices=[('runner_on_failed', 'Host Failed'), ('runner_on_ok', 'Host OK'), ('runner_on_unreachable', 'Host Unreachable'), ('runner_on_skipped', 'Host Skipped'), ('debug', 'Debug'), ('verbose', 'Verbose'), ('deprecated', 'Deprecated'), ('warning', 'Warning'), ('system_warning', 'System Warning'), ('error', 'Error')]),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='jobevent',
|
||||
@@ -362,7 +362,7 @@ class Migration(migrations.Migration):
|
||||
migrations.AlterField(
|
||||
model_name='jobevent',
|
||||
name='event',
|
||||
field=models.CharField(max_length=100, choices=[(b'runner_on_failed', 'Host Failed'), (b'runner_on_ok', 'Host OK'), (b'runner_on_error', 'Host Failure'), (b'runner_on_skipped', 'Host Skipped'), (b'runner_on_unreachable', 'Host Unreachable'), (b'runner_on_no_hosts', 'No Hosts Remaining'), (b'runner_on_async_poll', 'Host Polling'), (b'runner_on_async_ok', 'Host Async OK'), (b'runner_on_async_failed', 'Host Async Failure'), (b'runner_item_on_ok', 'Item OK'), (b'runner_item_on_failed', 'Item Failed'), (b'runner_item_on_skipped', 'Item Skipped'), (b'runner_retry', 'Host Retry'), (b'runner_on_file_diff', 'File Difference'), (b'playbook_on_start', 'Playbook Started'), (b'playbook_on_notify', 'Running Handlers'), (b'playbook_on_include', 'Including File'), (b'playbook_on_no_hosts_matched', 'No Hosts Matched'), (b'playbook_on_no_hosts_remaining', 'No Hosts Remaining'), (b'playbook_on_task_start', 'Task Started'), (b'playbook_on_vars_prompt', 'Variables Prompted'), (b'playbook_on_setup', 'Gathering Facts'), (b'playbook_on_import_for_host', 'internal: on Import for Host'), (b'playbook_on_not_import_for_host', 'internal: on Not Import for Host'), (b'playbook_on_play_start', 'Play Started'), (b'playbook_on_stats', 'Playbook Complete'), (b'debug', 'Debug'), (b'verbose', 'Verbose'), (b'deprecated', 'Deprecated'), (b'warning', 'Warning'), (b'system_warning', 'System Warning'), (b'error', 'Error')]),
|
||||
field=models.CharField(max_length=100, choices=[('runner_on_failed', 'Host Failed'), ('runner_on_ok', 'Host OK'), ('runner_on_error', 'Host Failure'), ('runner_on_skipped', 'Host Skipped'), ('runner_on_unreachable', 'Host Unreachable'), ('runner_on_no_hosts', 'No Hosts Remaining'), ('runner_on_async_poll', 'Host Polling'), ('runner_on_async_ok', 'Host Async OK'), ('runner_on_async_failed', 'Host Async Failure'), ('runner_item_on_ok', 'Item OK'), ('runner_item_on_failed', 'Item Failed'), ('runner_item_on_skipped', 'Item Skipped'), ('runner_retry', 'Host Retry'), ('runner_on_file_diff', 'File Difference'), ('playbook_on_start', 'Playbook Started'), ('playbook_on_notify', 'Running Handlers'), ('playbook_on_include', 'Including File'), ('playbook_on_no_hosts_matched', 'No Hosts Matched'), ('playbook_on_no_hosts_remaining', 'No Hosts Remaining'), ('playbook_on_task_start', 'Task Started'), ('playbook_on_vars_prompt', 'Variables Prompted'), ('playbook_on_setup', 'Gathering Facts'), ('playbook_on_import_for_host', 'internal: on Import for Host'), ('playbook_on_not_import_for_host', 'internal: on Not Import for Host'), ('playbook_on_play_start', 'Play Started'), ('playbook_on_stats', 'Playbook Complete'), ('debug', 'Debug'), ('verbose', 'Verbose'), ('deprecated', 'Deprecated'), ('warning', 'Warning'), ('system_warning', 'System Warning'), ('error', 'Error')]),
|
||||
),
|
||||
migrations.AlterUniqueTogether(
|
||||
name='adhoccommandevent',
|
||||
@@ -505,7 +505,7 @@ class Migration(migrations.Migration):
|
||||
migrations.AlterField(
|
||||
model_name='host',
|
||||
name='instance_id',
|
||||
field=models.CharField(default=b'', help_text='The value used by the remote inventory source to uniquely identify the host', max_length=1024, blank=True),
|
||||
field=models.CharField(default='', help_text='The value used by the remote inventory source to uniquely identify the host', max_length=1024, blank=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='project',
|
||||
@@ -520,7 +520,7 @@ class Migration(migrations.Migration):
|
||||
migrations.AlterField(
|
||||
model_name='project',
|
||||
name='scm_type',
|
||||
field=models.CharField(default=b'', choices=[(b'', 'Manual'), (b'git', 'Git'), (b'hg', 'Mercurial'), (b'svn', 'Subversion')], max_length=8, blank=True, help_text='Specifies the source control system used to store the project.', verbose_name='SCM Type'),
|
||||
field=models.CharField(default='', choices=[('', 'Manual'), ('git', 'Git'), ('hg', 'Mercurial'), ('svn', 'Subversion')], max_length=8, blank=True, help_text='Specifies the source control system used to store the project.', verbose_name='SCM Type'),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='project',
|
||||
@@ -535,7 +535,7 @@ class Migration(migrations.Migration):
|
||||
migrations.AlterField(
|
||||
model_name='project',
|
||||
name='scm_url',
|
||||
field=models.CharField(default=b'', help_text='The location where the project is stored.', max_length=1024, verbose_name='SCM URL', blank=True),
|
||||
field=models.CharField(default='', help_text='The location where the project is stored.', max_length=1024, verbose_name='SCM URL', blank=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='project',
|
||||
@@ -555,12 +555,12 @@ class Migration(migrations.Migration):
|
||||
migrations.AlterField(
|
||||
model_name='projectupdate',
|
||||
name='scm_type',
|
||||
field=models.CharField(default=b'', choices=[(b'', 'Manual'), (b'git', 'Git'), (b'hg', 'Mercurial'), (b'svn', 'Subversion')], max_length=8, blank=True, help_text='Specifies the source control system used to store the project.', verbose_name='SCM Type'),
|
||||
field=models.CharField(default='', choices=[('', 'Manual'), ('git', 'Git'), ('hg', 'Mercurial'), ('svn', 'Subversion')], max_length=8, blank=True, help_text='Specifies the source control system used to store the project.', verbose_name='SCM Type'),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='projectupdate',
|
||||
name='scm_url',
|
||||
field=models.CharField(default=b'', help_text='The location where the project is stored.', max_length=1024, verbose_name='SCM URL', blank=True),
|
||||
field=models.CharField(default='', help_text='The location where the project is stored.', max_length=1024, verbose_name='SCM URL', blank=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='projectupdate',
|
||||
@@ -600,7 +600,7 @@ class Migration(migrations.Migration):
|
||||
migrations.AlterField(
|
||||
model_name='unifiedjob',
|
||||
name='execution_node',
|
||||
field=models.TextField(default=b'', help_text='The Tower node the job executed on.', editable=False, blank=True),
|
||||
field=models.TextField(default='', help_text='The Tower node the job executed on.', editable=False, blank=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='unifiedjob',
|
||||
@@ -610,7 +610,7 @@ class Migration(migrations.Migration):
|
||||
migrations.AlterField(
|
||||
model_name='unifiedjob',
|
||||
name='job_explanation',
|
||||
field=models.TextField(default=b'', help_text="A status field to indicate the state of the job if it wasn't able to run and capture stdout", editable=False, blank=True),
|
||||
field=models.TextField(default='', help_text="A status field to indicate the state of the job if it wasn't able to run and capture stdout", editable=False, blank=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='unifiedjob',
|
||||
|
||||
@@ -2,8 +2,8 @@
|
||||
from __future__ import unicode_literals
|
||||
from django.db import migrations
|
||||
|
||||
import _squashed
|
||||
from _squashed_31 import SQUASHED_31
|
||||
from . import _squashed
|
||||
from ._squashed_31 import SQUASHED_31
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
@@ -72,7 +72,7 @@ class Migration(migrations.Migration):
|
||||
migrations.AddField(
|
||||
model_name='inventory',
|
||||
name='kind',
|
||||
field=models.CharField(default=b'', help_text='Kind of inventory being represented.', max_length=32, blank=True, choices=[(b'', 'Hosts have a direct link to this inventory.'), (b'smart', 'Hosts for inventory generated using the host_filter property.')]),
|
||||
field=models.CharField(default='', help_text='Kind of inventory being represented.', max_length=32, blank=True, choices=[('', 'Hosts have a direct link to this inventory.'), ('smart', 'Hosts for inventory generated using the host_filter property.')]),
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name='SmartInventoryMembership',
|
||||
@@ -143,7 +143,7 @@ class Migration(migrations.Migration):
|
||||
migrations.AddField(
|
||||
model_name='inventorysource',
|
||||
name='scm_last_revision',
|
||||
field=models.CharField(default=b'', max_length=1024, editable=False, blank=True),
|
||||
field=models.CharField(default='', max_length=1024, editable=False, blank=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='inventorysource',
|
||||
@@ -163,27 +163,27 @@ class Migration(migrations.Migration):
|
||||
migrations.AlterField(
|
||||
model_name='inventorysource',
|
||||
name='source',
|
||||
field=models.CharField(default=b'', max_length=32, blank=True, choices=[(b'', 'Manual'), (b'file', 'File, Directory or Script'), (b'scm', 'Sourced from a Project'), (b'ec2', 'Amazon EC2'), (b'gce', 'Google Compute Engine'), (b'azure_rm', 'Microsoft Azure Resource Manager'), (b'vmware', 'VMware vCenter'), (b'satellite6', 'Red Hat Satellite 6'), (b'cloudforms', 'Red Hat CloudForms'), (b'openstack', 'OpenStack'), (b'custom', 'Custom Script')]),
|
||||
field=models.CharField(default='', max_length=32, blank=True, choices=[('', 'Manual'), ('file', 'File, Directory or Script'), ('scm', 'Sourced from a Project'), ('ec2', 'Amazon EC2'), ('gce', 'Google Compute Engine'), ('azure_rm', 'Microsoft Azure Resource Manager'), ('vmware', 'VMware vCenter'), ('satellite6', 'Red Hat Satellite 6'), ('cloudforms', 'Red Hat CloudForms'), ('openstack', 'OpenStack'), ('custom', 'Custom Script')]),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='inventoryupdate',
|
||||
name='source',
|
||||
field=models.CharField(default=b'', max_length=32, blank=True, choices=[(b'', 'Manual'), (b'file', 'File, Directory or Script'), (b'scm', 'Sourced from a Project'), (b'ec2', 'Amazon EC2'), (b'gce', 'Google Compute Engine'), (b'azure_rm', 'Microsoft Azure Resource Manager'), (b'vmware', 'VMware vCenter'), (b'satellite6', 'Red Hat Satellite 6'), (b'cloudforms', 'Red Hat CloudForms'), (b'openstack', 'OpenStack'), (b'custom', 'Custom Script')]),
|
||||
field=models.CharField(default='', max_length=32, blank=True, choices=[('', 'Manual'), ('file', 'File, Directory or Script'), ('scm', 'Sourced from a Project'), ('ec2', 'Amazon EC2'), ('gce', 'Google Compute Engine'), ('azure_rm', 'Microsoft Azure Resource Manager'), ('vmware', 'VMware vCenter'), ('satellite6', 'Red Hat Satellite 6'), ('cloudforms', 'Red Hat CloudForms'), ('openstack', 'OpenStack'), ('custom', 'Custom Script')]),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='inventorysource',
|
||||
name='source_path',
|
||||
field=models.CharField(default=b'', max_length=1024, blank=True),
|
||||
field=models.CharField(default='', max_length=1024, blank=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='inventoryupdate',
|
||||
name='source_path',
|
||||
field=models.CharField(default=b'', max_length=1024, blank=True),
|
||||
field=models.CharField(default='', max_length=1024, blank=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='unifiedjob',
|
||||
name='launch_type',
|
||||
field=models.CharField(default=b'manual', max_length=20, editable=False, choices=[(b'manual', 'Manual'), (b'relaunch', 'Relaunch'), (b'callback', 'Callback'), (b'scheduled', 'Scheduled'), (b'dependency', 'Dependency'), (b'workflow', 'Workflow'), (b'sync', 'Sync'), (b'scm', 'SCM Update')]),
|
||||
field=models.CharField(default='manual', max_length=20, editable=False, choices=[('manual', 'Manual'), ('relaunch', 'Relaunch'), ('callback', 'Callback'), ('scheduled', 'Scheduled'), ('dependency', 'Dependency'), ('workflow', 'Workflow'), ('sync', 'Sync'), ('scm', 'SCM Update')]),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='inventorysource',
|
||||
@@ -211,12 +211,12 @@ class Migration(migrations.Migration):
|
||||
migrations.AddField(
|
||||
model_name='inventorysource',
|
||||
name='verbosity',
|
||||
field=models.PositiveIntegerField(default=1, blank=True, choices=[(0, b'0 (WARNING)'), (1, b'1 (INFO)'), (2, b'2 (DEBUG)')]),
|
||||
field=models.PositiveIntegerField(default=1, blank=True, choices=[(0, '0 (WARNING)'), (1, '1 (INFO)'), (2, '2 (DEBUG)')]),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='inventoryupdate',
|
||||
name='verbosity',
|
||||
field=models.PositiveIntegerField(default=1, blank=True, choices=[(0, b'0 (WARNING)'), (1, b'1 (INFO)'), (2, b'2 (DEBUG)')]),
|
||||
field=models.PositiveIntegerField(default=1, blank=True, choices=[(0, '0 (WARNING)'), (1, '1 (INFO)'), (2, '2 (DEBUG)')]),
|
||||
),
|
||||
|
||||
# Job Templates
|
||||
@@ -317,7 +317,7 @@ class Migration(migrations.Migration):
|
||||
migrations.AlterField(
|
||||
model_name='inventory',
|
||||
name='kind',
|
||||
field=models.CharField(default=b'', help_text='Kind of inventory being represented.', max_length=32, blank=True, choices=[(b'', 'Hosts have a direct link to this inventory.'), (b'smart', 'Hosts for inventory generated using the host_filter property.')]),
|
||||
field=models.CharField(default='', help_text='Kind of inventory being represented.', max_length=32, blank=True, choices=[('', 'Hosts have a direct link to this inventory.'), ('smart', 'Hosts for inventory generated using the host_filter property.')]),
|
||||
),
|
||||
|
||||
# Timeout help text update
|
||||
@@ -378,9 +378,9 @@ class Migration(migrations.Migration):
|
||||
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
|
||||
('created', models.DateTimeField(default=None, editable=False)),
|
||||
('modified', models.DateTimeField(default=None, editable=False)),
|
||||
('description', models.TextField(default=b'', blank=True)),
|
||||
('description', models.TextField(default='', blank=True)),
|
||||
('name', models.CharField(max_length=512)),
|
||||
('kind', models.CharField(max_length=32, choices=[(b'ssh', 'Machine'), (b'vault', 'Vault'), (b'net', 'Network'), (b'scm', 'Source Control'), (b'cloud', 'Cloud'), (b'insights', 'Insights')])),
|
||||
('kind', models.CharField(max_length=32, choices=[('ssh', 'Machine'), ('vault', 'Vault'), ('net', 'Network'), ('scm', 'Source Control'), ('cloud', 'Cloud'), ('insights', 'Insights')])),
|
||||
('managed_by_tower', models.BooleanField(default=False, editable=False)),
|
||||
('inputs', awx.main.fields.CredentialTypeInputField(default={}, blank=True, help_text='Enter inputs using either JSON or YAML syntax. Use the radio button to toggle between the two. Refer to the Ansible Tower documentation for example syntax.')),
|
||||
('injectors', awx.main.fields.CredentialTypeInjectorField(default={}, blank=True, help_text='Enter injectors using either JSON or YAML syntax. Use the radio button to toggle between the two. Refer to the Ansible Tower documentation for example syntax.')),
|
||||
@@ -435,7 +435,7 @@ class Migration(migrations.Migration):
|
||||
migrations.AlterField(
|
||||
model_name='credential',
|
||||
name='become_method',
|
||||
field=models.CharField(default=b'', help_text='Privilege escalation method.', max_length=32, blank=True, choices=[(b'', 'None'), (b'sudo', 'Sudo'), (b'su', 'Su'), (b'pbrun', 'Pbrun'), (b'pfexec', 'Pfexec'), (b'dzdo', 'DZDO'), (b'pmrun', 'Pmrun'), (b'runas', 'Runas')]),
|
||||
field=models.CharField(default='', help_text='Privilege escalation method.', max_length=32, blank=True, choices=[('', 'None'), ('sudo', 'Sudo'), ('su', 'Su'), ('pbrun', 'Pbrun'), ('pfexec', 'Pfexec'), ('dzdo', 'DZDO'), ('pmrun', 'Pmrun'), ('runas', 'Runas')]),
|
||||
),
|
||||
|
||||
# Connecting activity stream
|
||||
@@ -496,6 +496,6 @@ class Migration(migrations.Migration):
|
||||
migrations.AlterField(
|
||||
model_name='unifiedjob',
|
||||
name='execution_node',
|
||||
field=models.TextField(default=b'', help_text='The node the job executed on.', editable=False, blank=True),
|
||||
field=models.TextField(default='', help_text='The node the job executed on.', editable=False, blank=True),
|
||||
),
|
||||
]
|
||||
|
||||
@@ -20,11 +20,11 @@ class Migration(migrations.Migration):
|
||||
migrations.AlterField(
|
||||
model_name='inventorysource',
|
||||
name='source',
|
||||
field=models.CharField(default=b'', max_length=32, blank=True, choices=[(b'', 'Manual'), (b'file', 'File, Directory or Script'), (b'scm', 'Sourced from a Project'), (b'ec2', 'Amazon EC2'), (b'gce', 'Google Compute Engine'), (b'azure_rm', 'Microsoft Azure Resource Manager'), (b'vmware', 'VMware vCenter'), (b'satellite6', 'Red Hat Satellite 6'), (b'cloudforms', 'Red Hat CloudForms'), (b'openstack', 'OpenStack'), (b'rhv', 'Red Hat Virtualization'), (b'tower', 'Ansible Tower'), (b'custom', 'Custom Script')]),
|
||||
field=models.CharField(default='', max_length=32, blank=True, choices=[('', 'Manual'), ('file', 'File, Directory or Script'), ('scm', 'Sourced from a Project'), ('ec2', 'Amazon EC2'), ('gce', 'Google Compute Engine'), ('azure_rm', 'Microsoft Azure Resource Manager'), ('vmware', 'VMware vCenter'), ('satellite6', 'Red Hat Satellite 6'), ('cloudforms', 'Red Hat CloudForms'), ('openstack', 'OpenStack'), ('rhv', 'Red Hat Virtualization'), ('tower', 'Ansible Tower'), ('custom', 'Custom Script')]),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='inventoryupdate',
|
||||
name='source',
|
||||
field=models.CharField(default=b'', max_length=32, blank=True, choices=[(b'', 'Manual'), (b'file', 'File, Directory or Script'), (b'scm', 'Sourced from a Project'), (b'ec2', 'Amazon EC2'), (b'gce', 'Google Compute Engine'), (b'azure_rm', 'Microsoft Azure Resource Manager'), (b'vmware', 'VMware vCenter'), (b'satellite6', 'Red Hat Satellite 6'), (b'cloudforms', 'Red Hat CloudForms'), (b'openstack', 'OpenStack'), (b'rhv', 'Red Hat Virtualization'), (b'tower', 'Ansible Tower'), (b'custom', 'Custom Script')]),
|
||||
field=models.CharField(default='', max_length=32, blank=True, choices=[('', 'Manual'), ('file', 'File, Directory or Script'), ('scm', 'Sourced from a Project'), ('ec2', 'Amazon EC2'), ('gce', 'Google Compute Engine'), ('azure_rm', 'Microsoft Azure Resource Manager'), ('vmware', 'VMware vCenter'), ('satellite6', 'Red Hat Satellite 6'), ('cloudforms', 'Red Hat CloudForms'), ('openstack', 'OpenStack'), ('rhv', 'Red Hat Virtualization'), ('tower', 'Ansible Tower'), ('custom', 'Custom Script')]),
|
||||
),
|
||||
]
|
||||
|
||||
@@ -21,9 +21,9 @@ class Migration(migrations.Migration):
|
||||
('created', models.DateTimeField(default=None, editable=False)),
|
||||
('modified', models.DateTimeField(default=None, editable=False)),
|
||||
('event_data', awx.main.fields.JSONField(blank=True, default={})),
|
||||
('uuid', models.CharField(default=b'', editable=False, max_length=1024)),
|
||||
('uuid', models.CharField(default='', editable=False, max_length=1024)),
|
||||
('counter', models.PositiveIntegerField(default=0, editable=False)),
|
||||
('stdout', models.TextField(default=b'', editable=False)),
|
||||
('stdout', models.TextField(default='', editable=False)),
|
||||
('verbosity', models.PositiveIntegerField(default=0, editable=False)),
|
||||
('start_line', models.PositiveIntegerField(default=0, editable=False)),
|
||||
('end_line', models.PositiveIntegerField(default=0, editable=False)),
|
||||
@@ -39,17 +39,17 @@ class Migration(migrations.Migration):
|
||||
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||
('created', models.DateTimeField(default=None, editable=False)),
|
||||
('modified', models.DateTimeField(default=None, editable=False)),
|
||||
('event', models.CharField(choices=[(b'runner_on_failed', 'Host Failed'), (b'runner_on_ok', 'Host OK'), (b'runner_on_error', 'Host Failure'), (b'runner_on_skipped', 'Host Skipped'), (b'runner_on_unreachable', 'Host Unreachable'), (b'runner_on_no_hosts', 'No Hosts Remaining'), (b'runner_on_async_poll', 'Host Polling'), (b'runner_on_async_ok', 'Host Async OK'), (b'runner_on_async_failed', 'Host Async Failure'), (b'runner_item_on_ok', 'Item OK'), (b'runner_item_on_failed', 'Item Failed'), (b'runner_item_on_skipped', 'Item Skipped'), (b'runner_retry', 'Host Retry'), (b'runner_on_file_diff', 'File Difference'), (b'playbook_on_start', 'Playbook Started'), (b'playbook_on_notify', 'Running Handlers'), (b'playbook_on_include', 'Including File'), (b'playbook_on_no_hosts_matched', 'No Hosts Matched'), (b'playbook_on_no_hosts_remaining', 'No Hosts Remaining'), (b'playbook_on_task_start', 'Task Started'), (b'playbook_on_vars_prompt', 'Variables Prompted'), (b'playbook_on_setup', 'Gathering Facts'), (b'playbook_on_import_for_host', 'internal: on Import for Host'), (b'playbook_on_not_import_for_host', 'internal: on Not Import for Host'), (b'playbook_on_play_start', 'Play Started'), (b'playbook_on_stats', 'Playbook Complete'), (b'debug', 'Debug'), (b'verbose', 'Verbose'), (b'deprecated', 'Deprecated'), (b'warning', 'Warning'), (b'system_warning', 'System Warning'), (b'error', 'Error')], max_length=100)),
|
||||
('event', models.CharField(choices=[('runner_on_failed', 'Host Failed'), ('runner_on_ok', 'Host OK'), ('runner_on_error', 'Host Failure'), ('runner_on_skipped', 'Host Skipped'), ('runner_on_unreachable', 'Host Unreachable'), ('runner_on_no_hosts', 'No Hosts Remaining'), ('runner_on_async_poll', 'Host Polling'), ('runner_on_async_ok', 'Host Async OK'), ('runner_on_async_failed', 'Host Async Failure'), ('runner_item_on_ok', 'Item OK'), ('runner_item_on_failed', 'Item Failed'), ('runner_item_on_skipped', 'Item Skipped'), ('runner_retry', 'Host Retry'), ('runner_on_file_diff', 'File Difference'), ('playbook_on_start', 'Playbook Started'), ('playbook_on_notify', 'Running Handlers'), ('playbook_on_include', 'Including File'), ('playbook_on_no_hosts_matched', 'No Hosts Matched'), ('playbook_on_no_hosts_remaining', 'No Hosts Remaining'), ('playbook_on_task_start', 'Task Started'), ('playbook_on_vars_prompt', 'Variables Prompted'), ('playbook_on_setup', 'Gathering Facts'), ('playbook_on_import_for_host', 'internal: on Import for Host'), ('playbook_on_not_import_for_host', 'internal: on Not Import for Host'), ('playbook_on_play_start', 'Play Started'), ('playbook_on_stats', 'Playbook Complete'), ('debug', 'Debug'), ('verbose', 'Verbose'), ('deprecated', 'Deprecated'), ('warning', 'Warning'), ('system_warning', 'System Warning'), ('error', 'Error')], max_length=100)),
|
||||
('event_data', awx.main.fields.JSONField(blank=True, default={})),
|
||||
('failed', models.BooleanField(default=False, editable=False)),
|
||||
('changed', models.BooleanField(default=False, editable=False)),
|
||||
('uuid', models.CharField(default=b'', editable=False, max_length=1024)),
|
||||
('playbook', models.CharField(default=b'', editable=False, max_length=1024)),
|
||||
('play', models.CharField(default=b'', editable=False, max_length=1024)),
|
||||
('role', models.CharField(default=b'', editable=False, max_length=1024)),
|
||||
('task', models.CharField(default=b'', editable=False, max_length=1024)),
|
||||
('uuid', models.CharField(default='', editable=False, max_length=1024)),
|
||||
('playbook', models.CharField(default='', editable=False, max_length=1024)),
|
||||
('play', models.CharField(default='', editable=False, max_length=1024)),
|
||||
('role', models.CharField(default='', editable=False, max_length=1024)),
|
||||
('task', models.CharField(default='', editable=False, max_length=1024)),
|
||||
('counter', models.PositiveIntegerField(default=0, editable=False)),
|
||||
('stdout', models.TextField(default=b'', editable=False)),
|
||||
('stdout', models.TextField(default='', editable=False)),
|
||||
('verbosity', models.PositiveIntegerField(default=0, editable=False)),
|
||||
('start_line', models.PositiveIntegerField(default=0, editable=False)),
|
||||
('end_line', models.PositiveIntegerField(default=0, editable=False)),
|
||||
@@ -66,9 +66,9 @@ class Migration(migrations.Migration):
|
||||
('created', models.DateTimeField(default=None, editable=False)),
|
||||
('modified', models.DateTimeField(default=None, editable=False)),
|
||||
('event_data', awx.main.fields.JSONField(blank=True, default={})),
|
||||
('uuid', models.CharField(default=b'', editable=False, max_length=1024)),
|
||||
('uuid', models.CharField(default='', editable=False, max_length=1024)),
|
||||
('counter', models.PositiveIntegerField(default=0, editable=False)),
|
||||
('stdout', models.TextField(default=b'', editable=False)),
|
||||
('stdout', models.TextField(default='', editable=False)),
|
||||
('verbosity', models.PositiveIntegerField(default=0, editable=False)),
|
||||
('start_line', models.PositiveIntegerField(default=0, editable=False)),
|
||||
('end_line', models.PositiveIntegerField(default=0, editable=False)),
|
||||
|
||||
@@ -18,77 +18,77 @@ class Migration(migrations.Migration):
|
||||
migrations.AddField(
|
||||
model_name='organization',
|
||||
name='execute_role',
|
||||
field=awx.main.fields.ImplicitRoleField(null=b'True', on_delete=django.db.models.deletion.CASCADE, parent_role=b'admin_role', related_name='+', to='main.Role'),
|
||||
field=awx.main.fields.ImplicitRoleField(null='True', on_delete=django.db.models.deletion.CASCADE, parent_role='admin_role', related_name='+', to='main.Role'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='organization',
|
||||
name='job_template_admin_role',
|
||||
field=awx.main.fields.ImplicitRoleField(editable=False, null=b'True', on_delete=django.db.models.deletion.CASCADE, parent_role=b'admin_role', related_name='+', to='main.Role'),
|
||||
field=awx.main.fields.ImplicitRoleField(editable=False, null='True', on_delete=django.db.models.deletion.CASCADE, parent_role='admin_role', related_name='+', to='main.Role'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='organization',
|
||||
name='credential_admin_role',
|
||||
field=awx.main.fields.ImplicitRoleField(null=b'True', on_delete=django.db.models.deletion.CASCADE, parent_role=b'admin_role', related_name='+', to='main.Role'),
|
||||
field=awx.main.fields.ImplicitRoleField(null='True', on_delete=django.db.models.deletion.CASCADE, parent_role='admin_role', related_name='+', to='main.Role'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='organization',
|
||||
name='inventory_admin_role',
|
||||
field=awx.main.fields.ImplicitRoleField(null=b'True', on_delete=django.db.models.deletion.CASCADE, parent_role=b'admin_role', related_name='+', to='main.Role'),
|
||||
field=awx.main.fields.ImplicitRoleField(null='True', on_delete=django.db.models.deletion.CASCADE, parent_role='admin_role', related_name='+', to='main.Role'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='organization',
|
||||
name='project_admin_role',
|
||||
field=awx.main.fields.ImplicitRoleField(null=b'True', on_delete=django.db.models.deletion.CASCADE, parent_role=b'admin_role', related_name='+', to='main.Role'),
|
||||
field=awx.main.fields.ImplicitRoleField(null='True', on_delete=django.db.models.deletion.CASCADE, parent_role='admin_role', related_name='+', to='main.Role'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='organization',
|
||||
name='workflow_admin_role',
|
||||
field=awx.main.fields.ImplicitRoleField(null=b'True', on_delete=django.db.models.deletion.CASCADE, parent_role=b'admin_role', related_name='+', to='main.Role'),
|
||||
field=awx.main.fields.ImplicitRoleField(null='True', on_delete=django.db.models.deletion.CASCADE, parent_role='admin_role', related_name='+', to='main.Role'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='organization',
|
||||
name='notification_admin_role',
|
||||
field=awx.main.fields.ImplicitRoleField(null=b'True', on_delete=django.db.models.deletion.CASCADE, parent_role=b'admin_role', related_name='+', to='main.Role'),
|
||||
field=awx.main.fields.ImplicitRoleField(null='True', on_delete=django.db.models.deletion.CASCADE, parent_role='admin_role', related_name='+', to='main.Role'),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='credential',
|
||||
name='admin_role',
|
||||
field=awx.main.fields.ImplicitRoleField(null=b'True', on_delete=django.db.models.deletion.CASCADE, parent_role=[b'singleton:system_administrator', b'organization.credential_admin_role'], related_name='+', to='main.Role'),
|
||||
field=awx.main.fields.ImplicitRoleField(null='True', on_delete=django.db.models.deletion.CASCADE, parent_role=['singleton:system_administrator', 'organization.credential_admin_role'], related_name='+', to='main.Role'),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='inventory',
|
||||
name='admin_role',
|
||||
field=awx.main.fields.ImplicitRoleField(null=b'True', on_delete=django.db.models.deletion.CASCADE, parent_role=b'organization.inventory_admin_role', related_name='+', to='main.Role'),
|
||||
field=awx.main.fields.ImplicitRoleField(null='True', on_delete=django.db.models.deletion.CASCADE, parent_role='organization.inventory_admin_role', related_name='+', to='main.Role'),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='project',
|
||||
name='admin_role',
|
||||
field=awx.main.fields.ImplicitRoleField(null=b'True', on_delete=django.db.models.deletion.CASCADE, parent_role=[b'organization.project_admin_role', b'singleton:system_administrator'], related_name='+', to='main.Role'),
|
||||
field=awx.main.fields.ImplicitRoleField(null='True', on_delete=django.db.models.deletion.CASCADE, parent_role=['organization.project_admin_role', 'singleton:system_administrator'], related_name='+', to='main.Role'),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='workflowjobtemplate',
|
||||
name='admin_role',
|
||||
field=awx.main.fields.ImplicitRoleField(null=b'True', on_delete=django.db.models.deletion.CASCADE, parent_role=[b'singleton:system_administrator', b'organization.workflow_admin_role'], related_name='+', to='main.Role'),
|
||||
field=awx.main.fields.ImplicitRoleField(null='True', on_delete=django.db.models.deletion.CASCADE, parent_role=['singleton:system_administrator', 'organization.workflow_admin_role'], related_name='+', to='main.Role'),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='workflowjobtemplate',
|
||||
name='execute_role',
|
||||
field=awx.main.fields.ImplicitRoleField(null=b'True', on_delete=django.db.models.deletion.CASCADE, parent_role=[b'admin_role', b'organization.execute_role'], related_name='+', to='main.Role'),
|
||||
field=awx.main.fields.ImplicitRoleField(null='True', on_delete=django.db.models.deletion.CASCADE, parent_role=['admin_role', 'organization.execute_role'], related_name='+', to='main.Role'),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='jobtemplate',
|
||||
name='admin_role',
|
||||
field=awx.main.fields.ImplicitRoleField(editable=False, null=b'True', on_delete=django.db.models.deletion.CASCADE, parent_role=[b'project.organization.job_template_admin_role', b'inventory.organization.job_template_admin_role'], related_name='+', to='main.Role'),
|
||||
field=awx.main.fields.ImplicitRoleField(editable=False, null='True', on_delete=django.db.models.deletion.CASCADE, parent_role=['project.organization.job_template_admin_role', 'inventory.organization.job_template_admin_role'], related_name='+', to='main.Role'),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='jobtemplate',
|
||||
name='execute_role',
|
||||
field=awx.main.fields.ImplicitRoleField(null=b'True', on_delete=django.db.models.deletion.CASCADE, parent_role=[b'admin_role', b'project.organization.execute_role', b'inventory.organization.execute_role'], related_name='+', to='main.Role'),
|
||||
field=awx.main.fields.ImplicitRoleField(null='True', on_delete=django.db.models.deletion.CASCADE, parent_role=['admin_role', 'project.organization.execute_role', 'inventory.organization.execute_role'], related_name='+', to='main.Role'),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='organization',
|
||||
name='member_role',
|
||||
field=awx.main.fields.ImplicitRoleField(editable=False, null=b'True', on_delete=django.db.models.deletion.CASCADE, parent_role=[b'admin_role', b'execute_role', b'project_admin_role', b'inventory_admin_role', b'workflow_admin_role', b'notification_admin_role', b'credential_admin_role', b'job_template_admin_role'], related_name='+', to='main.Role'),
|
||||
field=awx.main.fields.ImplicitRoleField(editable=False, null='True', on_delete=django.db.models.deletion.CASCADE, parent_role=['admin_role', 'execute_role', 'project_admin_role', 'inventory_admin_role', 'workflow_admin_role', 'notification_admin_role', 'credential_admin_role', 'job_template_admin_role'], related_name='+', to='main.Role'),
|
||||
),
|
||||
|
||||
]
|
||||
|
||||
@@ -35,8 +35,8 @@ class Migration(migrations.Migration):
|
||||
('skip_authorization', models.BooleanField(default=False)),
|
||||
('created', models.DateTimeField(auto_now_add=True)),
|
||||
('updated', models.DateTimeField(auto_now=True)),
|
||||
('description', models.TextField(blank=True, default=b'')),
|
||||
('logo_data', models.TextField(default=b'', editable=False, validators=[django.core.validators.RegexValidator(re.compile(b'.*'))])),
|
||||
('description', models.TextField(blank=True, default='')),
|
||||
('logo_data', models.TextField(default='', editable=False, validators=[django.core.validators.RegexValidator(re.compile('.*'))])),
|
||||
('user', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='main_oauth2application', to=settings.AUTH_USER_MODEL)),
|
||||
],
|
||||
options={
|
||||
@@ -52,7 +52,7 @@ class Migration(migrations.Migration):
|
||||
('scope', models.TextField(blank=True)),
|
||||
('created', models.DateTimeField(auto_now_add=True)),
|
||||
('updated', models.DateTimeField(auto_now=True)),
|
||||
('description', models.CharField(blank=True, default=b'', max_length=200)),
|
||||
('description', models.CharField(blank=True, default='', max_length=200)),
|
||||
('last_used', models.DateTimeField(default=None, editable=False, null=True)),
|
||||
('application', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to=settings.OAUTH2_PROVIDER_APPLICATION_MODEL)),
|
||||
('user', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='main_oauth2accesstoken', to=settings.AUTH_USER_MODEL)),
|
||||
|
||||
@@ -20,7 +20,7 @@ class Migration(migrations.Migration):
|
||||
migrations.AlterField(
|
||||
model_name='oauth2accesstoken',
|
||||
name='scope',
|
||||
field=models.TextField(blank=True, default=b'write', help_text="Allowed scopes, further restricts user's permissions."),
|
||||
field=models.TextField(blank=True, default='write', help_text="Allowed scopes, further restricts user's permissions."),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='oauth2accesstoken',
|
||||
@@ -30,7 +30,7 @@ class Migration(migrations.Migration):
|
||||
migrations.AlterField(
|
||||
model_name='oauth2application',
|
||||
name='authorization_grant_type',
|
||||
field=models.CharField(choices=[(b'authorization-code', 'Authorization code'), (b'implicit', 'Implicit'), (b'password', 'Resource owner password-based'), (b'client-credentials', 'Client credentials')], help_text='The Grant type the user must use for acquire tokens for this application.', max_length=32),
|
||||
field=models.CharField(choices=[('authorization-code', 'Authorization code'), ('implicit', 'Implicit'), ('password', 'Resource owner password-based'), ('client-credentials', 'Client credentials')], help_text='The Grant type the user must use for acquire tokens for this application.', max_length=32),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='oauth2application',
|
||||
@@ -40,7 +40,7 @@ class Migration(migrations.Migration):
|
||||
migrations.AlterField(
|
||||
model_name='oauth2application',
|
||||
name='client_type',
|
||||
field=models.CharField(choices=[(b'confidential', 'Confidential'), (b'public', 'Public')], help_text='Set to Public or Confidential depending on how secure the client device is.', max_length=32),
|
||||
field=models.CharField(choices=[('confidential', 'Confidential'), ('public', 'Public')], help_text='Set to Public or Confidential depending on how secure the client device is.', max_length=32),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='oauth2application',
|
||||
|
||||
@@ -16,6 +16,6 @@ class Migration(migrations.Migration):
|
||||
migrations.AlterField(
|
||||
model_name='oauth2accesstoken',
|
||||
name='scope',
|
||||
field=models.TextField(blank=True, default=b'write', help_text="Allowed scopes, further restricts user's permissions. Must be a simple space-separated string with allowed scopes ['read', 'write']."),
|
||||
field=models.TextField(blank=True, default='write', help_text="Allowed scopes, further restricts user's permissions. Must be a simple space-separated string with allowed scopes ['read', 'write']."),
|
||||
),
|
||||
]
|
||||
|
||||
@@ -15,6 +15,6 @@ class Migration(migrations.Migration):
|
||||
migrations.AddField(
|
||||
model_name='unifiedjob',
|
||||
name='controller_node',
|
||||
field=models.TextField(blank=True, default=b'', editable=False, help_text='The instance that managed the isolated execution environment.'),
|
||||
field=models.TextField(blank=True, default='', editable=False, help_text='The instance that managed the isolated execution environment.'),
|
||||
),
|
||||
]
|
||||
|
||||
@@ -18,12 +18,12 @@ class Migration(migrations.Migration):
|
||||
migrations.AlterField(
|
||||
model_name='organization',
|
||||
name='member_role',
|
||||
field=awx.main.fields.ImplicitRoleField(editable=False, null=b'True', on_delete=django.db.models.deletion.CASCADE, parent_role=[b'admin_role'], related_name='+', to='main.Role'),
|
||||
field=awx.main.fields.ImplicitRoleField(editable=False, null='True', on_delete=django.db.models.deletion.CASCADE, parent_role=['admin_role'], related_name='+', to='main.Role'),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='organization',
|
||||
name='read_role',
|
||||
field=awx.main.fields.ImplicitRoleField(editable=False, null=b'True', on_delete=django.db.models.deletion.CASCADE, parent_role=[b'member_role', b'auditor_role', b'execute_role', b'project_admin_role', b'inventory_admin_role', b'workflow_admin_role', b'notification_admin_role', b'credential_admin_role', b'job_template_admin_role'], related_name='+', to='main.Role'),
|
||||
field=awx.main.fields.ImplicitRoleField(editable=False, null='True', on_delete=django.db.models.deletion.CASCADE, parent_role=['member_role', 'auditor_role', 'execute_role', 'project_admin_role', 'inventory_admin_role', 'workflow_admin_role', 'notification_admin_role', 'credential_admin_role', 'job_template_admin_role'], related_name='+', to='main.Role'),
|
||||
),
|
||||
migrations.RunPython(rebuild_role_hierarchy),
|
||||
]
|
||||
|
||||
@@ -15,6 +15,6 @@ class Migration(migrations.Migration):
|
||||
migrations.AlterField(
|
||||
model_name='oauth2application',
|
||||
name='authorization_grant_type',
|
||||
field=models.CharField(choices=[(b'authorization-code', 'Authorization code'), (b'implicit', 'Implicit'), (b'password', 'Resource owner password-based')], help_text='The Grant type the user must use for acquire tokens for this application.', max_length=32),
|
||||
field=models.CharField(choices=[('authorization-code', 'Authorization code'), ('implicit', 'Implicit'), ('password', 'Resource owner password-based')], help_text='The Grant type the user must use for acquire tokens for this application.', max_length=32),
|
||||
),
|
||||
]
|
||||
|
||||
@@ -17,131 +17,131 @@ class Migration(migrations.Migration):
|
||||
migrations.AlterField(
|
||||
model_name='credential',
|
||||
name='created_by',
|
||||
field=models.ForeignKey(default=None, editable=False, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name="{u'app_label': 'main', u'class': 'credential', u'model_name': 'credential'}(class)s_created+", to=settings.AUTH_USER_MODEL),
|
||||
field=models.ForeignKey(default=None, editable=False, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name="{'class': 'credential', 'model_name': 'credential', 'app_label': 'main'}(class)s_created+", to=settings.AUTH_USER_MODEL),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='credential',
|
||||
name='modified_by',
|
||||
field=models.ForeignKey(default=None, editable=False, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name="{u'app_label': 'main', u'class': 'credential', u'model_name': 'credential'}(class)s_modified+", to=settings.AUTH_USER_MODEL),
|
||||
field=models.ForeignKey(default=None, editable=False, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name="{'class': 'credential', 'model_name': 'credential', 'app_label': 'main'}(class)s_modified+", to=settings.AUTH_USER_MODEL),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='credentialtype',
|
||||
name='created_by',
|
||||
field=models.ForeignKey(default=None, editable=False, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name="{u'app_label': 'main', u'class': 'credentialtype', u'model_name': 'credentialtype'}(class)s_created+", to=settings.AUTH_USER_MODEL),
|
||||
field=models.ForeignKey(default=None, editable=False, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name="{'class': 'credentialtype', 'model_name': 'credentialtype', 'app_label': 'main'}(class)s_created+", to=settings.AUTH_USER_MODEL),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='credentialtype',
|
||||
name='modified_by',
|
||||
field=models.ForeignKey(default=None, editable=False, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name="{u'app_label': 'main', u'class': 'credentialtype', u'model_name': 'credentialtype'}(class)s_modified+", to=settings.AUTH_USER_MODEL),
|
||||
field=models.ForeignKey(default=None, editable=False, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name="{'class': 'credentialtype', 'model_name': 'credentialtype', 'app_label': 'main'}(class)s_modified+", to=settings.AUTH_USER_MODEL),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='custominventoryscript',
|
||||
name='created_by',
|
||||
field=models.ForeignKey(default=None, editable=False, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name="{u'app_label': 'main', u'class': 'custominventoryscript', u'model_name': 'custominventoryscript'}(class)s_created+", to=settings.AUTH_USER_MODEL),
|
||||
field=models.ForeignKey(default=None, editable=False, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name="{'class': 'custominventoryscript', 'model_name': 'custominventoryscript', 'app_label': 'main'}(class)s_created+", to=settings.AUTH_USER_MODEL),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='custominventoryscript',
|
||||
name='modified_by',
|
||||
field=models.ForeignKey(default=None, editable=False, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name="{u'app_label': 'main', u'class': 'custominventoryscript', u'model_name': 'custominventoryscript'}(class)s_modified+", to=settings.AUTH_USER_MODEL),
|
||||
field=models.ForeignKey(default=None, editable=False, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name="{'class': 'custominventoryscript', 'model_name': 'custominventoryscript', 'app_label': 'main'}(class)s_modified+", to=settings.AUTH_USER_MODEL),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='group',
|
||||
name='created_by',
|
||||
field=models.ForeignKey(default=None, editable=False, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name="{u'app_label': 'main', u'class': 'group', u'model_name': 'group'}(class)s_created+", to=settings.AUTH_USER_MODEL),
|
||||
field=models.ForeignKey(default=None, editable=False, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name="{'class': 'group', 'model_name': 'group', 'app_label': 'main'}(class)s_created+", to=settings.AUTH_USER_MODEL),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='group',
|
||||
name='modified_by',
|
||||
field=models.ForeignKey(default=None, editable=False, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name="{u'app_label': 'main', u'class': 'group', u'model_name': 'group'}(class)s_modified+", to=settings.AUTH_USER_MODEL),
|
||||
field=models.ForeignKey(default=None, editable=False, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name="{'class': 'group', 'model_name': 'group', 'app_label': 'main'}(class)s_modified+", to=settings.AUTH_USER_MODEL),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='host',
|
||||
name='created_by',
|
||||
field=models.ForeignKey(default=None, editable=False, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name="{u'app_label': 'main', u'class': 'host', u'model_name': 'host'}(class)s_created+", to=settings.AUTH_USER_MODEL),
|
||||
field=models.ForeignKey(default=None, editable=False, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name="{'class': 'host', 'model_name': 'host', 'app_label': 'main'}(class)s_created+", to=settings.AUTH_USER_MODEL),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='host',
|
||||
name='modified_by',
|
||||
field=models.ForeignKey(default=None, editable=False, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name="{u'app_label': 'main', u'class': 'host', u'model_name': 'host'}(class)s_modified+", to=settings.AUTH_USER_MODEL),
|
||||
field=models.ForeignKey(default=None, editable=False, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name="{'class': 'host', 'model_name': 'host', 'app_label': 'main'}(class)s_modified+", to=settings.AUTH_USER_MODEL),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='inventory',
|
||||
name='created_by',
|
||||
field=models.ForeignKey(default=None, editable=False, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name="{u'app_label': 'main', u'class': 'inventory', u'model_name': 'inventory'}(class)s_created+", to=settings.AUTH_USER_MODEL),
|
||||
field=models.ForeignKey(default=None, editable=False, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name="{'class': 'inventory', 'model_name': 'inventory', 'app_label': 'main'}(class)s_created+", to=settings.AUTH_USER_MODEL),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='inventory',
|
||||
name='modified_by',
|
||||
field=models.ForeignKey(default=None, editable=False, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name="{u'app_label': 'main', u'class': 'inventory', u'model_name': 'inventory'}(class)s_modified+", to=settings.AUTH_USER_MODEL),
|
||||
field=models.ForeignKey(default=None, editable=False, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name="{'class': 'inventory', 'model_name': 'inventory', 'app_label': 'main'}(class)s_modified+", to=settings.AUTH_USER_MODEL),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='label',
|
||||
name='created_by',
|
||||
field=models.ForeignKey(default=None, editable=False, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name="{u'app_label': 'main', u'class': 'label', u'model_name': 'label'}(class)s_created+", to=settings.AUTH_USER_MODEL),
|
||||
field=models.ForeignKey(default=None, editable=False, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name="{'class': 'label', 'model_name': 'label', 'app_label': 'main'}(class)s_created+", to=settings.AUTH_USER_MODEL),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='label',
|
||||
name='modified_by',
|
||||
field=models.ForeignKey(default=None, editable=False, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name="{u'app_label': 'main', u'class': 'label', u'model_name': 'label'}(class)s_modified+", to=settings.AUTH_USER_MODEL),
|
||||
field=models.ForeignKey(default=None, editable=False, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name="{'class': 'label', 'model_name': 'label', 'app_label': 'main'}(class)s_modified+", to=settings.AUTH_USER_MODEL),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='notificationtemplate',
|
||||
name='created_by',
|
||||
field=models.ForeignKey(default=None, editable=False, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name="{u'app_label': 'main', u'class': 'notificationtemplate', u'model_name': 'notificationtemplate'}(class)s_created+", to=settings.AUTH_USER_MODEL),
|
||||
field=models.ForeignKey(default=None, editable=False, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name="{'class': 'notificationtemplate', 'model_name': 'notificationtemplate', 'app_label': 'main'}(class)s_created+", to=settings.AUTH_USER_MODEL),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='notificationtemplate',
|
||||
name='modified_by',
|
||||
field=models.ForeignKey(default=None, editable=False, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name="{u'app_label': 'main', u'class': 'notificationtemplate', u'model_name': 'notificationtemplate'}(class)s_modified+", to=settings.AUTH_USER_MODEL),
|
||||
field=models.ForeignKey(default=None, editable=False, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name="{'class': 'notificationtemplate', 'model_name': 'notificationtemplate', 'app_label': 'main'}(class)s_modified+", to=settings.AUTH_USER_MODEL),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='organization',
|
||||
name='created_by',
|
||||
field=models.ForeignKey(default=None, editable=False, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name="{u'app_label': 'main', u'class': 'organization', u'model_name': 'organization'}(class)s_created+", to=settings.AUTH_USER_MODEL),
|
||||
field=models.ForeignKey(default=None, editable=False, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name="{'class': 'organization', 'model_name': 'organization', 'app_label': 'main'}(class)s_created+", to=settings.AUTH_USER_MODEL),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='organization',
|
||||
name='modified_by',
|
||||
field=models.ForeignKey(default=None, editable=False, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name="{u'app_label': 'main', u'class': 'organization', u'model_name': 'organization'}(class)s_modified+", to=settings.AUTH_USER_MODEL),
|
||||
field=models.ForeignKey(default=None, editable=False, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name="{'class': 'organization', 'model_name': 'organization', 'app_label': 'main'}(class)s_modified+", to=settings.AUTH_USER_MODEL),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='schedule',
|
||||
name='created_by',
|
||||
field=models.ForeignKey(default=None, editable=False, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name="{u'app_label': 'main', u'class': 'schedule', u'model_name': 'schedule'}(class)s_created+", to=settings.AUTH_USER_MODEL),
|
||||
field=models.ForeignKey(default=None, editable=False, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name="{'class': 'schedule', 'model_name': 'schedule', 'app_label': 'main'}(class)s_created+", to=settings.AUTH_USER_MODEL),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='schedule',
|
||||
name='modified_by',
|
||||
field=models.ForeignKey(default=None, editable=False, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name="{u'app_label': 'main', u'class': 'schedule', u'model_name': 'schedule'}(class)s_modified+", to=settings.AUTH_USER_MODEL),
|
||||
field=models.ForeignKey(default=None, editable=False, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name="{'class': 'schedule', 'model_name': 'schedule', 'app_label': 'main'}(class)s_modified+", to=settings.AUTH_USER_MODEL),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='team',
|
||||
name='created_by',
|
||||
field=models.ForeignKey(default=None, editable=False, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name="{u'app_label': 'main', u'class': 'team', u'model_name': 'team'}(class)s_created+", to=settings.AUTH_USER_MODEL),
|
||||
field=models.ForeignKey(default=None, editable=False, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name="{'class': 'team', 'model_name': 'team', 'app_label': 'main'}(class)s_created+", to=settings.AUTH_USER_MODEL),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='team',
|
||||
name='modified_by',
|
||||
field=models.ForeignKey(default=None, editable=False, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name="{u'app_label': 'main', u'class': 'team', u'model_name': 'team'}(class)s_modified+", to=settings.AUTH_USER_MODEL),
|
||||
field=models.ForeignKey(default=None, editable=False, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name="{'class': 'team', 'model_name': 'team', 'app_label': 'main'}(class)s_modified+", to=settings.AUTH_USER_MODEL),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='unifiedjob',
|
||||
name='created_by',
|
||||
field=models.ForeignKey(default=None, editable=False, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name="{u'app_label': 'main', u'class': 'unifiedjob', u'model_name': 'unifiedjob'}(class)s_created+", to=settings.AUTH_USER_MODEL),
|
||||
field=models.ForeignKey(default=None, editable=False, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name="{'class': 'unifiedjob', 'model_name': 'unifiedjob', 'app_label': 'main'}(class)s_created+", to=settings.AUTH_USER_MODEL),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='unifiedjob',
|
||||
name='modified_by',
|
||||
field=models.ForeignKey(default=None, editable=False, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name="{u'app_label': 'main', u'class': 'unifiedjob', u'model_name': 'unifiedjob'}(class)s_modified+", to=settings.AUTH_USER_MODEL),
|
||||
field=models.ForeignKey(default=None, editable=False, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name="{'class': 'unifiedjob', 'model_name': 'unifiedjob', 'app_label': 'main'}(class)s_modified+", to=settings.AUTH_USER_MODEL),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='unifiedjobtemplate',
|
||||
name='created_by',
|
||||
field=models.ForeignKey(default=None, editable=False, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name="{u'app_label': 'main', u'class': 'unifiedjobtemplate', u'model_name': 'unifiedjobtemplate'}(class)s_created+", to=settings.AUTH_USER_MODEL),
|
||||
field=models.ForeignKey(default=None, editable=False, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name="{'class': 'unifiedjobtemplate', 'model_name': 'unifiedjobtemplate', 'app_label': 'main'}(class)s_created+", to=settings.AUTH_USER_MODEL),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='unifiedjobtemplate',
|
||||
name='modified_by',
|
||||
field=models.ForeignKey(default=None, editable=False, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name="{u'app_label': 'main', u'class': 'unifiedjobtemplate', u'model_name': 'unifiedjobtemplate'}(class)s_modified+", to=settings.AUTH_USER_MODEL),
|
||||
field=models.ForeignKey(default=None, editable=False, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name="{'class': 'unifiedjobtemplate', 'model_name': 'unifiedjobtemplate', 'app_label': 'main'}(class)s_modified+", to=settings.AUTH_USER_MODEL),
|
||||
),
|
||||
]
|
||||
|
||||
@@ -0,0 +1,19 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Generated by Django 1.11.11 on 2018-05-18 17:49
|
||||
from __future__ import unicode_literals
|
||||
|
||||
from django.db import migrations
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('main', '0051_v340_job_slicing'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RemoveField(
|
||||
model_name='project',
|
||||
name='scm_delete_on_next_update',
|
||||
),
|
||||
]
|
||||
37
awx/main/migrations/0053_v340_workflow_inventory.py
Normal file
37
awx/main/migrations/0053_v340_workflow_inventory.py
Normal file
@@ -0,0 +1,37 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Generated by Django 1.11.11 on 2018-09-27 19:50
|
||||
from __future__ import unicode_literals
|
||||
|
||||
import awx.main.fields
|
||||
from django.db import migrations, models
|
||||
import django.db.models.deletion
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('main', '0052_v340_remove_project_scm_delete_on_next_update'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='workflowjob',
|
||||
name='char_prompts',
|
||||
field=awx.main.fields.JSONField(blank=True, default={}),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='workflowjob',
|
||||
name='inventory',
|
||||
field=models.ForeignKey(blank=True, default=None, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='workflowjobs', to='main.Inventory'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='workflowjobtemplate',
|
||||
name='ask_inventory_on_launch',
|
||||
field=awx.main.fields.AskForField(default=False),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='workflowjobtemplate',
|
||||
name='inventory',
|
||||
field=models.ForeignKey(blank=True, default=None, help_text='Inventory applied to all job templates in workflow that prompt for inventory.', null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='workflowjobtemplates', to='main.Inventory'),
|
||||
),
|
||||
]
|
||||
20
awx/main/migrations/0054_v340_workflow_convergence.py
Normal file
20
awx/main/migrations/0054_v340_workflow_convergence.py
Normal file
@@ -0,0 +1,20 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Generated by Django 1.11.11 on 2018-09-28 14:23
|
||||
from __future__ import unicode_literals
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('main', '0053_v340_workflow_inventory'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='workflowjobnode',
|
||||
name='do_not_run',
|
||||
field=models.BooleanField(default=False, help_text='Indidcates that a job will not be created when True. Workflow runtime semantics will mark this True if the node is in a path that will decidedly not be ran. A value of False means the node may not run.'),
|
||||
),
|
||||
]
|
||||
25
awx/main/migrations/0055_v340_add_grafana_notification.py
Normal file
25
awx/main/migrations/0055_v340_add_grafana_notification.py
Normal file
@@ -0,0 +1,25 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Generated by Django 1.11.16 on 2019-01-20 12:00
|
||||
from __future__ import unicode_literals
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('main', '0054_v340_workflow_convergence'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name='notification',
|
||||
name='notification_type',
|
||||
field=models.CharField(choices=[('email', 'Email'), ('slack', 'Slack'), ('twilio', 'Twilio'), ('pagerduty', 'Pagerduty'), ('grafana', 'Grafana'), ('hipchat', 'HipChat'), ('webhook', 'Webhook'), ('mattermost', 'Mattermost'), ('rocketchat', 'Rocket.Chat'), ('irc', 'IRC')], max_length=32),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='notificationtemplate',
|
||||
name='notification_type',
|
||||
field=models.CharField(choices=[('email', 'Email'), ('slack', 'Slack'), ('twilio', 'Twilio'), ('pagerduty', 'Pagerduty'), ('grafana', 'Grafana'), ('hipchat', 'HipChat'), ('webhook', 'Webhook'), ('mattermost', 'Mattermost'), ('rocketchat', 'Rocket.Chat'), ('irc', 'IRC')], max_length=32),
|
||||
),
|
||||
]
|
||||
@@ -45,8 +45,8 @@ def replaces(squashed, applied=False):
|
||||
'''
|
||||
squashed_keys, key_index = squash_data(squashed)
|
||||
if applied:
|
||||
return [(b'main', key) for key in squashed_keys[:key_index]]
|
||||
return [(b'main', key) for key in squashed_keys[key_index:]]
|
||||
return [('main', key) for key in squashed_keys[:key_index]]
|
||||
return [('main', key) for key in squashed_keys[key_index:]]
|
||||
|
||||
|
||||
def operations(squashed, applied=False):
|
||||
|
||||
@@ -42,12 +42,12 @@ SQUASHED_30 = {
|
||||
migrations.AlterField(
|
||||
model_name='credential',
|
||||
name='admin_role',
|
||||
field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=[b'singleton:system_administrator', b'organization.admin_role'], to='main.Role', null=b'True'),
|
||||
field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=['singleton:system_administrator', 'organization.admin_role'], to='main.Role', null='True'),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='credential',
|
||||
name='use_role',
|
||||
field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=[b'admin_role'], to='main.Role', null=b'True'),
|
||||
field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=['admin_role'], to='main.Role', null='True'),
|
||||
),
|
||||
],
|
||||
'0033_v303_v245_host_variable_fix': [
|
||||
|
||||
@@ -17,24 +17,24 @@ SQUASHED_31 = {
|
||||
migrations.AlterField(
|
||||
model_name='project',
|
||||
name='scm_type',
|
||||
field=models.CharField(default=b'', choices=[(b'', 'Manual'), (b'git', 'Git'), (b'hg', 'Mercurial'), (b'svn', 'Subversion'), (b'insights', 'Red Hat Insights')], max_length=8, blank=True, help_text='Specifies the source control system used to store the project.', verbose_name='SCM Type'),
|
||||
field=models.CharField(default='', choices=[('', 'Manual'), ('git', 'Git'), ('hg', 'Mercurial'), ('svn', 'Subversion'), ('insights', 'Red Hat Insights')], max_length=8, blank=True, help_text='Specifies the source control system used to store the project.', verbose_name='SCM Type'),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='projectupdate',
|
||||
name='scm_type',
|
||||
field=models.CharField(default=b'', choices=[(b'', 'Manual'), (b'git', 'Git'), (b'hg', 'Mercurial'), (b'svn', 'Subversion'), (b'insights', 'Red Hat Insights')], max_length=8, blank=True, help_text='Specifies the source control system used to store the project.', verbose_name='SCM Type'),
|
||||
field=models.CharField(default='', choices=[('', 'Manual'), ('git', 'Git'), ('hg', 'Mercurial'), ('svn', 'Subversion'), ('insights', 'Red Hat Insights')], max_length=8, blank=True, help_text='Specifies the source control system used to store the project.', verbose_name='SCM Type'),
|
||||
),
|
||||
],
|
||||
'0036_v311_insights': [
|
||||
migrations.AlterField(
|
||||
model_name='project',
|
||||
name='scm_type',
|
||||
field=models.CharField(default=b'', choices=[(b'', 'Manual'), (b'git', 'Git'), (b'hg', 'Mercurial'), (b'svn', 'Subversion'), (b'insights', 'Red Hat Insights')], max_length=8, blank=True, help_text='Specifies the source control system used to store the project.', verbose_name='SCM Type'),
|
||||
field=models.CharField(default='', choices=[('', 'Manual'), ('git', 'Git'), ('hg', 'Mercurial'), ('svn', 'Subversion'), ('insights', 'Red Hat Insights')], max_length=8, blank=True, help_text='Specifies the source control system used to store the project.', verbose_name='SCM Type'),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='projectupdate',
|
||||
name='scm_type',
|
||||
field=models.CharField(default=b'', choices=[(b'', 'Manual'), (b'git', 'Git'), (b'hg', 'Mercurial'), (b'svn', 'Subversion'), (b'insights', 'Red Hat Insights')], max_length=8, blank=True, help_text='Specifies the source control system used to store the project.', verbose_name='SCM Type'),
|
||||
field=models.CharField(default='', choices=[('', 'Manual'), ('git', 'Git'), ('hg', 'Mercurial'), ('svn', 'Subversion'), ('insights', 'Red Hat Insights')], max_length=8, blank=True, help_text='Specifies the source control system used to store the project.', verbose_name='SCM Type'),
|
||||
),
|
||||
],
|
||||
'0037_v313_instance_version': [
|
||||
|
||||
@@ -134,6 +134,9 @@ User.add_to_class('is_in_enterprise_category', user_is_in_enterprise_category)
|
||||
|
||||
|
||||
def o_auth2_application_get_absolute_url(self, request=None):
|
||||
# this page does not exist in v1
|
||||
if request.version == 'v1':
|
||||
return reverse('api:o_auth2_application_detail', kwargs={'pk': self.pk}) # use default version
|
||||
return reverse('api:o_auth2_application_detail', kwargs={'pk': self.pk}, request=request)
|
||||
|
||||
|
||||
@@ -141,15 +144,14 @@ OAuth2Application.add_to_class('get_absolute_url', o_auth2_application_get_absol
|
||||
|
||||
|
||||
def o_auth2_token_get_absolute_url(self, request=None):
|
||||
# this page does not exist in v1
|
||||
if request.version == 'v1':
|
||||
return reverse('api:o_auth2_token_detail', kwargs={'pk': self.pk}) # use default version
|
||||
return reverse('api:o_auth2_token_detail', kwargs={'pk': self.pk}, request=request)
|
||||
|
||||
|
||||
OAuth2AccessToken.add_to_class('get_absolute_url', o_auth2_token_get_absolute_url)
|
||||
|
||||
|
||||
# Import signal handlers only after models have been defined.
|
||||
import awx.main.signals # noqa
|
||||
|
||||
from awx.main.registrar import activity_stream_registrar # noqa
|
||||
activity_stream_registrar.connect(Organization)
|
||||
activity_stream_registrar.connect(Inventory)
|
||||
|
||||
@@ -7,6 +7,7 @@ from awx.main.fields import JSONField
|
||||
|
||||
# Django
|
||||
from django.db import models
|
||||
from django.utils.encoding import smart_str
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
|
||||
__all__ = ['ActivityStream']
|
||||
@@ -84,9 +85,9 @@ class ActivityStream(models.Model):
|
||||
if self.actor:
|
||||
self.deleted_actor = {
|
||||
'id': self.actor_id,
|
||||
'username': self.actor.username,
|
||||
'first_name': self.actor.first_name,
|
||||
'last_name': self.actor.last_name,
|
||||
'username': smart_str(self.actor.username),
|
||||
'first_name': smart_str(self.actor.first_name),
|
||||
'last_name': smart_str(self.actor.last_name),
|
||||
}
|
||||
if 'update_fields' in kwargs and 'deleted_actor' not in kwargs['update_fields']:
|
||||
kwargs['update_fields'].append('deleted_actor')
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
|
||||
# Python
|
||||
import logging
|
||||
from urlparse import urljoin
|
||||
from urllib.parse import urljoin
|
||||
|
||||
# Django
|
||||
from django.conf import settings
|
||||
@@ -109,7 +109,7 @@ class AdHocCommand(UnifiedJob, JobNotificationMixin):
|
||||
return self.limit
|
||||
|
||||
def clean_module_name(self):
|
||||
if type(self.module_name) not in (str, unicode):
|
||||
if type(self.module_name) is not str:
|
||||
raise ValidationError(_("Invalid type for ad hoc command"))
|
||||
module_name = self.module_name.strip() or 'command'
|
||||
if module_name not in settings.AD_HOC_COMMANDS:
|
||||
@@ -117,7 +117,7 @@ class AdHocCommand(UnifiedJob, JobNotificationMixin):
|
||||
return module_name
|
||||
|
||||
def clean_module_args(self):
|
||||
if type(self.module_args) not in (str, unicode):
|
||||
if type(self.module_args) is not str:
|
||||
raise ValidationError(_("Invalid type for ad hoc command"))
|
||||
module_args = self.module_args
|
||||
if self.module_name in ('command', 'shell') and not module_args:
|
||||
|
||||
@@ -92,7 +92,7 @@ class BaseModel(models.Model):
|
||||
class Meta:
|
||||
abstract = True
|
||||
|
||||
def __unicode__(self):
|
||||
def __str__(self):
|
||||
if 'name' in self.__dict__:
|
||||
return u'%s-%s' % (self.name, self.pk)
|
||||
else:
|
||||
@@ -152,7 +152,7 @@ class CreatedModifiedModel(BaseModel):
|
||||
)
|
||||
|
||||
def save(self, *args, **kwargs):
|
||||
update_fields = kwargs.get('update_fields', [])
|
||||
update_fields = list(kwargs.get('update_fields', []))
|
||||
# Manually perform auto_now_add and auto_now logic.
|
||||
if not self.pk and not self.created:
|
||||
self.created = now()
|
||||
|
||||
@@ -383,6 +383,9 @@ class Credential(PasswordFieldsModel, CommonModelNameNotUnique, ResourceMixin):
|
||||
super(Credential, self).save(*args, **kwargs)
|
||||
|
||||
def encrypt_field(self, field, ask):
|
||||
if not hasattr(self, field):
|
||||
return None
|
||||
|
||||
encrypted = encrypt_field(self, field, ask=ask)
|
||||
if encrypted:
|
||||
self.inputs[field] = encrypted
|
||||
@@ -413,12 +416,12 @@ class Credential(PasswordFieldsModel, CommonModelNameNotUnique, ResourceMixin):
|
||||
type_alias = self.credential_type.name
|
||||
else:
|
||||
type_alias = self.credential_type_id
|
||||
if self.kind == 'vault' and self.inputs.get('vault_id', None):
|
||||
if self.kind == 'vault' and self.has_input('vault_id'):
|
||||
if display:
|
||||
fmt_str = six.text_type('{} (id={})')
|
||||
else:
|
||||
fmt_str = six.text_type('{}_{}')
|
||||
return fmt_str.format(type_alias, self.inputs.get('vault_id'))
|
||||
return fmt_str.format(type_alias, self.get_input('vault_id'))
|
||||
return six.text_type(type_alias)
|
||||
|
||||
@staticmethod
|
||||
@@ -428,6 +431,29 @@ class Credential(PasswordFieldsModel, CommonModelNameNotUnique, ResourceMixin):
|
||||
ret[cred.unique_hash()] = cred
|
||||
return ret
|
||||
|
||||
def get_input(self, field_name, **kwargs):
|
||||
"""
|
||||
Get an injectable and decrypted value for an input field.
|
||||
|
||||
Retrieves the value for a given credential input field name. Return
|
||||
values for secret input fields are decrypted. If the credential doesn't
|
||||
have an input value defined for the given field name, an AttributeError
|
||||
is raised unless a default value is provided.
|
||||
|
||||
:param field_name(str): The name of the input field.
|
||||
:param default(optional[str]): A default return value to use.
|
||||
"""
|
||||
if field_name in self.credential_type.secret_fields:
|
||||
return decrypt_field(self, field_name)
|
||||
if field_name in self.inputs:
|
||||
return self.inputs[field_name]
|
||||
if 'default' in kwargs:
|
||||
return kwargs['default']
|
||||
raise AttributeError(field_name)
|
||||
|
||||
def has_input(self, field_name):
|
||||
return field_name in self.inputs and self.inputs[field_name] not in ('', None)
|
||||
|
||||
|
||||
class CredentialType(CommonModelNameNotUnique):
|
||||
'''
|
||||
@@ -477,6 +503,9 @@ class CredentialType(CommonModelNameNotUnique):
|
||||
)
|
||||
|
||||
def get_absolute_url(self, request=None):
|
||||
# Page does not exist in API v1
|
||||
if request.version == 'v1':
|
||||
return reverse('api:credential_type_detail', kwargs={'pk': self.pk})
|
||||
return reverse('api:credential_type_detail', kwargs={'pk': self.pk}, request=request)
|
||||
|
||||
@property
|
||||
@@ -580,7 +609,7 @@ class CredentialType(CommonModelNameNotUnique):
|
||||
if not self.injectors:
|
||||
if self.managed_by_tower and credential.kind in dir(builtin_injectors):
|
||||
injected_env = {}
|
||||
getattr(builtin_injectors, credential.kind)(credential, injected_env)
|
||||
getattr(builtin_injectors, credential.kind)(credential, injected_env, private_data_dir)
|
||||
env.update(injected_env)
|
||||
safe_env.update(build_safe_env(injected_env))
|
||||
return
|
||||
@@ -606,8 +635,9 @@ class CredentialType(CommonModelNameNotUnique):
|
||||
safe_namespace[field_name] = namespace[field_name] = value
|
||||
continue
|
||||
|
||||
value = credential.get_input(field_name)
|
||||
|
||||
if field_name in self.secret_fields:
|
||||
value = decrypt_field(credential, field_name)
|
||||
safe_namespace[field_name] = '**********'
|
||||
elif len(value):
|
||||
safe_namespace[field_name] = value
|
||||
@@ -627,7 +657,7 @@ class CredentialType(CommonModelNameNotUnique):
|
||||
data = Template(file_tmpl).render(**namespace)
|
||||
_, path = tempfile.mkstemp(dir=private_data_dir)
|
||||
with open(path, 'w') as f:
|
||||
f.write(data.encode('utf-8'))
|
||||
f.write(data)
|
||||
os.chmod(path, stat.S_IRUSR | stat.S_IWUSR)
|
||||
|
||||
# determine if filename indicates single file or many
|
||||
@@ -977,7 +1007,7 @@ def cloudforms(cls):
|
||||
'label': ugettext_noop('CloudForms URL'),
|
||||
'type': 'string',
|
||||
'help_text': ugettext_noop('Enter the URL for the virtual machine that '
|
||||
'corresponds to your CloudForm instance. '
|
||||
'corresponds to your CloudForms instance. '
|
||||
'For example, https://cloudforms.example.org')
|
||||
}, {
|
||||
'id': 'username',
|
||||
|
||||
@@ -1,35 +1,59 @@
|
||||
from awx.main.utils import decrypt_field
|
||||
import json
|
||||
import os
|
||||
import stat
|
||||
import tempfile
|
||||
|
||||
from django.conf import settings
|
||||
|
||||
|
||||
def aws(cred, env):
|
||||
env['AWS_ACCESS_KEY_ID'] = cred.username
|
||||
env['AWS_SECRET_ACCESS_KEY'] = decrypt_field(cred, 'password')
|
||||
if len(cred.security_token) > 0:
|
||||
env['AWS_SECURITY_TOKEN'] = decrypt_field(cred, 'security_token')
|
||||
def aws(cred, env, private_data_dir):
|
||||
env['AWS_ACCESS_KEY_ID'] = cred.get_input('username', default='')
|
||||
env['AWS_SECRET_ACCESS_KEY'] = cred.get_input('password', default='')
|
||||
|
||||
if cred.has_input('security_token'):
|
||||
env['AWS_SECURITY_TOKEN'] = cred.get_input('security_token', default='')
|
||||
|
||||
|
||||
def gce(cred, env):
|
||||
env['GCE_EMAIL'] = cred.username
|
||||
env['GCE_PROJECT'] = cred.project
|
||||
def gce(cred, env, private_data_dir):
|
||||
project = cred.get_input('project', default='')
|
||||
username = cred.get_input('username', default='')
|
||||
|
||||
env['GCE_EMAIL'] = username
|
||||
env['GCE_PROJECT'] = project
|
||||
json_cred = {
|
||||
'type': 'service_account',
|
||||
'private_key': cred.get_input('ssh_key_data', default=''),
|
||||
'client_email': username,
|
||||
'project_id': project
|
||||
}
|
||||
handle, path = tempfile.mkstemp(dir=private_data_dir)
|
||||
f = os.fdopen(handle, 'w')
|
||||
json.dump(json_cred, f)
|
||||
f.close()
|
||||
os.chmod(path, stat.S_IRUSR | stat.S_IWUSR)
|
||||
env['GCE_CREDENTIALS_FILE_PATH'] = path
|
||||
|
||||
|
||||
def azure_rm(cred, env):
|
||||
if len(cred.client) and len(cred.tenant):
|
||||
env['AZURE_CLIENT_ID'] = cred.client
|
||||
env['AZURE_SECRET'] = decrypt_field(cred, 'secret')
|
||||
env['AZURE_TENANT'] = cred.tenant
|
||||
env['AZURE_SUBSCRIPTION_ID'] = cred.subscription
|
||||
def azure_rm(cred, env, private_data_dir):
|
||||
client = cred.get_input('client', default='')
|
||||
tenant = cred.get_input('tenant', default='')
|
||||
|
||||
if len(client) and len(tenant):
|
||||
env['AZURE_CLIENT_ID'] = client
|
||||
env['AZURE_TENANT'] = tenant
|
||||
env['AZURE_SECRET'] = cred.get_input('secret', default='')
|
||||
env['AZURE_SUBSCRIPTION_ID'] = cred.get_input('subscription', default='')
|
||||
else:
|
||||
env['AZURE_SUBSCRIPTION_ID'] = cred.subscription
|
||||
env['AZURE_AD_USER'] = cred.username
|
||||
env['AZURE_PASSWORD'] = decrypt_field(cred, 'password')
|
||||
if cred.inputs.get('cloud_environment', None):
|
||||
env['AZURE_CLOUD_ENVIRONMENT'] = cred.inputs['cloud_environment']
|
||||
env['AZURE_SUBSCRIPTION_ID'] = cred.get_input('subscription', default='')
|
||||
env['AZURE_AD_USER'] = cred.get_input('username', default='')
|
||||
env['AZURE_PASSWORD'] = cred.get_input('password', default='')
|
||||
|
||||
if cred.has_input('cloud_environment'):
|
||||
env['AZURE_CLOUD_ENVIRONMENT'] = cred.get_input('cloud_environment')
|
||||
|
||||
|
||||
def vmware(cred, env):
|
||||
env['VMWARE_USER'] = cred.username
|
||||
env['VMWARE_PASSWORD'] = decrypt_field(cred, 'password')
|
||||
env['VMWARE_HOST'] = cred.host
|
||||
def vmware(cred, env, private_data_dir):
|
||||
env['VMWARE_USER'] = cred.get_input('username', default='')
|
||||
env['VMWARE_PASSWORD'] = cred.get_input('password', default='')
|
||||
env['VMWARE_HOST'] = cred.get_input('host', default='')
|
||||
env['VMWARE_VALIDATE_CERTS'] = str(settings.VMWARE_VALIDATE_CERTS)
|
||||
|
||||
@@ -27,7 +27,7 @@ __all__ = ['JobEvent', 'ProjectUpdateEvent', 'AdHocCommandEvent',
|
||||
|
||||
def sanitize_event_keys(kwargs, valid_keys):
|
||||
# Sanity check: Don't honor keys that we don't recognize.
|
||||
for key in kwargs.keys():
|
||||
for key in list(kwargs.keys()):
|
||||
if key not in valid_keys:
|
||||
kwargs.pop(key)
|
||||
|
||||
@@ -424,7 +424,7 @@ class JobEvent(BasePlaybookEvent):
|
||||
def get_absolute_url(self, request=None):
|
||||
return reverse('api:job_event_detail', kwargs={'pk': self.pk}, request=request)
|
||||
|
||||
def __unicode__(self):
|
||||
def __str__(self):
|
||||
return u'%s @ %s' % (self.get_event_display2(), self.created.isoformat())
|
||||
|
||||
def _update_from_event_data(self):
|
||||
@@ -580,7 +580,7 @@ class BaseCommandEvent(CreatedModifiedModel):
|
||||
editable=False,
|
||||
)
|
||||
|
||||
def __unicode__(self):
|
||||
def __str__(self):
|
||||
return u'%s @ %s' % (self.get_event_display(), self.created.isoformat())
|
||||
|
||||
@classmethod
|
||||
|
||||
@@ -1,11 +1,9 @@
|
||||
# Copyright (c) 2015 Ansible, Inc.
|
||||
# All Rights Reserved.
|
||||
|
||||
import six
|
||||
import random
|
||||
from decimal import Decimal
|
||||
|
||||
from django.core.exceptions import ValidationError
|
||||
from django.core.validators import MinValueValidator
|
||||
from django.db import models, connection
|
||||
from django.db.models.signals import post_save, post_delete
|
||||
@@ -31,15 +29,6 @@ from awx.main.models.mixins import RelatedJobsMixin
|
||||
__all__ = ('Instance', 'InstanceGroup', 'JobOrigin', 'TowerScheduleState',)
|
||||
|
||||
|
||||
def validate_queuename(v):
|
||||
# kombu doesn't play nice with unicode in queue names
|
||||
if v:
|
||||
try:
|
||||
'{}'.format(v.decode('utf-8'))
|
||||
except UnicodeEncodeError:
|
||||
raise ValidationError(_(six.text_type('{} contains unsupported characters')).format(v))
|
||||
|
||||
|
||||
class HasPolicyEditsMixin(HasEditsMixin):
|
||||
|
||||
class Meta:
|
||||
@@ -164,7 +153,6 @@ class Instance(HasPolicyEditsMixin, BaseModel):
|
||||
'memory', 'cpu_capacity', 'mem_capacity'])
|
||||
|
||||
def clean_hostname(self):
|
||||
validate_queuename(self.hostname)
|
||||
return self.hostname
|
||||
|
||||
|
||||
@@ -235,7 +223,6 @@ class InstanceGroup(HasPolicyEditsMixin, BaseModel, RelatedJobsMixin):
|
||||
app_label = 'main'
|
||||
|
||||
def clean_name(self):
|
||||
validate_queuename(self.name)
|
||||
return self.name
|
||||
|
||||
def fit_task_to_most_remaining_capacity_instance(self, task):
|
||||
|
||||
@@ -3,12 +3,14 @@
|
||||
|
||||
# Python
|
||||
import datetime
|
||||
import time
|
||||
import itertools
|
||||
import logging
|
||||
import re
|
||||
import copy
|
||||
from urlparse import urljoin
|
||||
import os.path
|
||||
import six
|
||||
from urllib.parse import urljoin
|
||||
|
||||
# Django
|
||||
from django.conf import settings
|
||||
@@ -294,7 +296,9 @@ class Inventory(CommonModelNameNotUnique, ResourceMixin, RelatedJobsMixin):
|
||||
|
||||
# Remove any empty groups
|
||||
for group_name in list(data.keys()):
|
||||
if not data.get(group_name, {}).get('hosts', []):
|
||||
if group_name == 'all':
|
||||
continue
|
||||
if not (data.get(group_name, {}).get('hosts', []) or data.get(group_name, {}).get('children', [])):
|
||||
data.pop(group_name)
|
||||
|
||||
if hostvars:
|
||||
@@ -340,9 +344,13 @@ class Inventory(CommonModelNameNotUnique, ResourceMixin, RelatedJobsMixin):
|
||||
host_updates = hosts_to_update.setdefault(host_pk, {})
|
||||
host_updates['has_inventory_sources'] = False
|
||||
# Now apply updates to hosts where needed (in batches).
|
||||
all_update_pks = hosts_to_update.keys()
|
||||
for offset in xrange(0, len(all_update_pks), 500):
|
||||
update_pks = all_update_pks[offset:(offset + 500)]
|
||||
all_update_pks = list(hosts_to_update.keys())
|
||||
|
||||
def _chunk(items, chunk_size):
|
||||
for i, group in itertools.groupby(enumerate(items), lambda x: x[0] // chunk_size):
|
||||
yield (g[1] for g in group)
|
||||
|
||||
for update_pks in _chunk(all_update_pks, 500):
|
||||
for host in hosts_qs.filter(pk__in=update_pks):
|
||||
host_updates = hosts_to_update[host.pk]
|
||||
for field, value in host_updates.items():
|
||||
@@ -409,12 +417,12 @@ class Inventory(CommonModelNameNotUnique, ResourceMixin, RelatedJobsMixin):
|
||||
failed_group_pks.add(group_pk)
|
||||
|
||||
# Now apply updates to each group as needed (in batches).
|
||||
all_update_pks = groups_to_update.keys()
|
||||
for offset in xrange(0, len(all_update_pks), 500):
|
||||
all_update_pks = list(groups_to_update.keys())
|
||||
for offset in range(0, len(all_update_pks), 500):
|
||||
update_pks = all_update_pks[offset:(offset + 500)]
|
||||
for group in self.groups.filter(pk__in=update_pks):
|
||||
group_updates = groups_to_update[group.pk]
|
||||
for field, value in group_updates.items():
|
||||
for field, value in list(group_updates.items()):
|
||||
if getattr(group, field) != value:
|
||||
setattr(group, field, value)
|
||||
else:
|
||||
@@ -426,7 +434,8 @@ class Inventory(CommonModelNameNotUnique, ResourceMixin, RelatedJobsMixin):
|
||||
'''
|
||||
Update model fields that are computed from database relationships.
|
||||
'''
|
||||
logger.debug("Going to update inventory computed fields")
|
||||
logger.debug("Going to update inventory computed fields, pk={0}".format(self.pk))
|
||||
start_time = time.time()
|
||||
if update_hosts:
|
||||
self.update_host_computed_fields()
|
||||
if update_groups:
|
||||
@@ -454,7 +463,7 @@ class Inventory(CommonModelNameNotUnique, ResourceMixin, RelatedJobsMixin):
|
||||
}
|
||||
# CentOS python seems to have issues clobbering the inventory on poor timing during certain operations
|
||||
iobj = Inventory.objects.get(id=self.id)
|
||||
for field, value in computed_fields.items():
|
||||
for field, value in list(computed_fields.items()):
|
||||
if getattr(iobj, field) != value:
|
||||
setattr(iobj, field, value)
|
||||
# update in-memory object
|
||||
@@ -463,7 +472,8 @@ class Inventory(CommonModelNameNotUnique, ResourceMixin, RelatedJobsMixin):
|
||||
computed_fields.pop(field)
|
||||
if computed_fields:
|
||||
iobj.save(update_fields=computed_fields.keys())
|
||||
logger.debug("Finished updating inventory computed fields")
|
||||
logger.debug("Finished updating inventory computed fields, pk={0}, in "
|
||||
"{1:.3f} seconds".format(self.pk, time.time() - start_time))
|
||||
|
||||
def websocket_emit_status(self, status):
|
||||
connection.on_commit(lambda: emit_channel_notification(
|
||||
@@ -1731,6 +1741,14 @@ class InventoryUpdate(UnifiedJob, InventorySourceOptions, JobNotificationMixin,
|
||||
return self.global_instance_groups
|
||||
return selected_groups
|
||||
|
||||
@property
|
||||
def ansible_virtualenv_path(self):
|
||||
if self.inventory_source and self.inventory_source.inventory:
|
||||
organization = self.inventory_source.inventory.organization
|
||||
if organization and organization.custom_virtualenv:
|
||||
return organization.custom_virtualenv
|
||||
return settings.ANSIBLE_VENV_PATH
|
||||
|
||||
def cancel(self, job_explanation=None, is_chain=False):
|
||||
res = super(InventoryUpdate, self).cancel(job_explanation=job_explanation, is_chain=is_chain)
|
||||
if res:
|
||||
|
||||
@@ -8,7 +8,7 @@ import logging
|
||||
import os
|
||||
import time
|
||||
import json
|
||||
from urlparse import urljoin
|
||||
from urllib.parse import urljoin
|
||||
|
||||
import six
|
||||
|
||||
@@ -34,7 +34,7 @@ from awx.main.models.notifications import (
|
||||
JobNotificationMixin,
|
||||
)
|
||||
from awx.main.utils import parse_yaml_or_json, getattr_dne
|
||||
from awx.main.fields import ImplicitRoleField
|
||||
from awx.main.fields import ImplicitRoleField, JSONField, AskForField
|
||||
from awx.main.models.mixins import (
|
||||
ResourceMixin,
|
||||
SurveyJobTemplateMixin,
|
||||
@@ -43,7 +43,6 @@ from awx.main.models.mixins import (
|
||||
CustomVirtualEnvMixin,
|
||||
RelatedJobsMixin,
|
||||
)
|
||||
from awx.main.fields import JSONField, AskForField
|
||||
|
||||
|
||||
logger = logging.getLogger('awx.main.models.jobs')
|
||||
@@ -315,7 +314,7 @@ class JobTemplate(UnifiedJobTemplate, JobOptions, SurveyJobTemplateMixin, Resour
|
||||
if self.inventory is None and not self.ask_inventory_on_launch:
|
||||
validation_errors['inventory'] = [_("Job Template must provide 'inventory' or allow prompting for it."),]
|
||||
if self.project is None:
|
||||
validation_errors['project'] = [_("Job types 'run' and 'check' must have assigned a project."),]
|
||||
validation_errors['project'] = [_("Job Templates must have a project assigned."),]
|
||||
return validation_errors
|
||||
|
||||
@property
|
||||
@@ -338,6 +337,9 @@ class JobTemplate(UnifiedJobTemplate, JobOptions, SurveyJobTemplateMixin, Resour
|
||||
kwargs['_parent_field_name'] = "job_template"
|
||||
kwargs.setdefault('_eager_fields', {})
|
||||
kwargs['_eager_fields']['is_sliced_job'] = True
|
||||
elif prevent_slicing:
|
||||
kwargs.setdefault('_eager_fields', {})
|
||||
kwargs['_eager_fields'].setdefault('job_slice_count', 1)
|
||||
job = super(JobTemplate, self).create_unified_job(**kwargs)
|
||||
if slice_event:
|
||||
try:
|
||||
@@ -345,8 +347,8 @@ class JobTemplate(UnifiedJobTemplate, JobOptions, SurveyJobTemplateMixin, Resour
|
||||
except JobLaunchConfig.DoesNotExist:
|
||||
wj_config = JobLaunchConfig()
|
||||
actual_inventory = wj_config.inventory if wj_config.inventory else self.inventory
|
||||
for idx in xrange(min(self.job_slice_count,
|
||||
actual_inventory.hosts.count())):
|
||||
for idx in range(min(self.job_slice_count,
|
||||
actual_inventory.hosts.count())):
|
||||
create_kwargs = dict(workflow_job=job,
|
||||
unified_job_template=self,
|
||||
ancestor_artifacts=dict(job_slice=idx + 1))
|
||||
@@ -693,7 +695,7 @@ class Job(UnifiedJob, JobOptions, SurveyJobMixin, JobNotificationMixin, TaskMana
|
||||
count_hosts = Host.objects.filter(inventory__jobs__pk=self.pk).count()
|
||||
if self.job_slice_count > 1:
|
||||
# Integer division intentional
|
||||
count_hosts = (count_hosts + self.job_slice_count - self.job_slice_number) / self.job_slice_count
|
||||
count_hosts = (count_hosts + self.job_slice_count - self.job_slice_number) // self.job_slice_count
|
||||
return min(count_hosts, 5 if self.forks == 0 else self.forks) + 1
|
||||
|
||||
@property
|
||||
@@ -892,19 +894,19 @@ class NullablePromptPsuedoField(object):
|
||||
instance.char_prompts[self.field_name] = value
|
||||
|
||||
|
||||
class LaunchTimeConfig(BaseModel):
|
||||
class LaunchTimeConfigBase(BaseModel):
|
||||
'''
|
||||
Common model for all objects that save details of a saved launch config
|
||||
WFJT / WJ nodes, schedules, and job launch configs (not all implemented yet)
|
||||
Needed as separate class from LaunchTimeConfig because some models
|
||||
use `extra_data` and some use `extra_vars`. We cannot change the API,
|
||||
so we force fake it in the model definitions
|
||||
- model defines extra_vars - use this class
|
||||
- model needs to use extra data - use LaunchTimeConfig
|
||||
Use this for models which are SurveyMixins and UnifiedJobs or Templates
|
||||
'''
|
||||
class Meta:
|
||||
abstract = True
|
||||
|
||||
# Prompting-related fields that have to be handled as special cases
|
||||
credentials = models.ManyToManyField(
|
||||
'Credential',
|
||||
related_name='%(class)ss'
|
||||
)
|
||||
inventory = models.ForeignKey(
|
||||
'Inventory',
|
||||
related_name='%(class)ss',
|
||||
@@ -913,15 +915,6 @@ class LaunchTimeConfig(BaseModel):
|
||||
default=None,
|
||||
on_delete=models.SET_NULL,
|
||||
)
|
||||
extra_data = JSONField(
|
||||
blank=True,
|
||||
default={}
|
||||
)
|
||||
survey_passwords = prevent_search(JSONField(
|
||||
blank=True,
|
||||
default={},
|
||||
editable=False,
|
||||
))
|
||||
# All standard fields are stored in this dictionary field
|
||||
# This is a solution to the nullable CharField problem, specific to prompting
|
||||
char_prompts = JSONField(
|
||||
@@ -931,6 +924,7 @@ class LaunchTimeConfig(BaseModel):
|
||||
|
||||
def prompts_dict(self, display=False):
|
||||
data = {}
|
||||
# Some types may have different prompts, but always subset of JT prompts
|
||||
for prompt_name in JobTemplate.get_ask_mapping().keys():
|
||||
try:
|
||||
field = self._meta.get_field(prompt_name)
|
||||
@@ -943,11 +937,11 @@ class LaunchTimeConfig(BaseModel):
|
||||
if len(prompt_val) > 0:
|
||||
data[prompt_name] = prompt_val
|
||||
elif prompt_name == 'extra_vars':
|
||||
if self.extra_data:
|
||||
if self.extra_vars:
|
||||
if display:
|
||||
data[prompt_name] = self.display_extra_data()
|
||||
data[prompt_name] = self.display_extra_vars()
|
||||
else:
|
||||
data[prompt_name] = self.extra_data
|
||||
data[prompt_name] = self.extra_vars
|
||||
if self.survey_passwords and not display:
|
||||
data['survey_passwords'] = self.survey_passwords
|
||||
else:
|
||||
@@ -956,18 +950,21 @@ class LaunchTimeConfig(BaseModel):
|
||||
data[prompt_name] = prompt_val
|
||||
return data
|
||||
|
||||
def display_extra_data(self):
|
||||
def display_extra_vars(self):
|
||||
'''
|
||||
Hides fields marked as passwords in survey.
|
||||
'''
|
||||
if self.survey_passwords:
|
||||
extra_data = parse_yaml_or_json(self.extra_data).copy()
|
||||
extra_vars = parse_yaml_or_json(self.extra_vars).copy()
|
||||
for key, value in self.survey_passwords.items():
|
||||
if key in extra_data:
|
||||
extra_data[key] = value
|
||||
return extra_data
|
||||
if key in extra_vars:
|
||||
extra_vars[key] = value
|
||||
return extra_vars
|
||||
else:
|
||||
return self.extra_data
|
||||
return self.extra_vars
|
||||
|
||||
def display_extra_data(self):
|
||||
return self.display_extra_vars()
|
||||
|
||||
@property
|
||||
def _credential(self):
|
||||
@@ -991,7 +988,42 @@ class LaunchTimeConfig(BaseModel):
|
||||
return None
|
||||
|
||||
|
||||
class LaunchTimeConfig(LaunchTimeConfigBase):
|
||||
'''
|
||||
Common model for all objects that save details of a saved launch config
|
||||
WFJT / WJ nodes, schedules, and job launch configs (not all implemented yet)
|
||||
'''
|
||||
class Meta:
|
||||
abstract = True
|
||||
|
||||
# Special case prompting fields, even more special than the other ones
|
||||
extra_data = JSONField(
|
||||
blank=True,
|
||||
default={}
|
||||
)
|
||||
survey_passwords = prevent_search(JSONField(
|
||||
blank=True,
|
||||
default={},
|
||||
editable=False,
|
||||
))
|
||||
# Credentials needed for non-unified job / unified JT models
|
||||
credentials = models.ManyToManyField(
|
||||
'Credential',
|
||||
related_name='%(class)ss'
|
||||
)
|
||||
|
||||
@property
|
||||
def extra_vars(self):
|
||||
return self.extra_data
|
||||
|
||||
@extra_vars.setter
|
||||
def extra_vars(self, extra_vars):
|
||||
self.extra_data = extra_vars
|
||||
|
||||
|
||||
for field_name in JobTemplate.get_ask_mapping().keys():
|
||||
if field_name == 'extra_vars':
|
||||
continue
|
||||
try:
|
||||
LaunchTimeConfig._meta.get_field(field_name)
|
||||
except FieldDoesNotExist:
|
||||
@@ -1088,7 +1120,7 @@ class JobHostSummary(CreatedModifiedModel):
|
||||
skipped = models.PositiveIntegerField(default=0, editable=False)
|
||||
failed = models.BooleanField(default=False, editable=False)
|
||||
|
||||
def __unicode__(self):
|
||||
def __str__(self):
|
||||
host = getattr_dne(self, 'host')
|
||||
hostname = host.name if host else 'N/A'
|
||||
return '%s changed=%d dark=%d failures=%d ok=%d processed=%d skipped=%s' % \
|
||||
|
||||
@@ -301,14 +301,22 @@ class SurveyJobTemplateMixin(models.Model):
|
||||
accepted.update(extra_vars)
|
||||
extra_vars = {}
|
||||
|
||||
if extra_vars:
|
||||
# Prune the prompted variables for those identical to template
|
||||
tmp_extra_vars = self.extra_vars_dict
|
||||
for key in (set(tmp_extra_vars.keys()) & set(extra_vars.keys())):
|
||||
if tmp_extra_vars[key] == extra_vars[key]:
|
||||
extra_vars.pop(key)
|
||||
|
||||
if extra_vars:
|
||||
# Leftover extra_vars, keys provided that are not allowed
|
||||
rejected.update(extra_vars)
|
||||
# ignored variables does not block manual launch
|
||||
if 'prompts' not in _exclude_errors:
|
||||
errors['extra_vars'] = [_('Variables {list_of_keys} are not allowed on launch. Check the Prompt on Launch setting '+
|
||||
'on the Job Template to include Extra Variables.').format(
|
||||
list_of_keys=', '.join(extra_vars.keys()))]
|
||||
'on the {model_name} to include Extra Variables.').format(
|
||||
list_of_keys=six.text_type(', ').join([six.text_type(key) for key in extra_vars.keys()]),
|
||||
model_name=self._meta.verbose_name.title())]
|
||||
|
||||
return (accepted, rejected, errors)
|
||||
|
||||
|
||||
@@ -20,6 +20,7 @@ from awx.main.notifications.pagerduty_backend import PagerDutyBackend
|
||||
from awx.main.notifications.hipchat_backend import HipChatBackend
|
||||
from awx.main.notifications.webhook_backend import WebhookBackend
|
||||
from awx.main.notifications.mattermost_backend import MattermostBackend
|
||||
from awx.main.notifications.grafana_backend import GrafanaBackend
|
||||
from awx.main.notifications.rocketchat_backend import RocketChatBackend
|
||||
from awx.main.notifications.irc_backend import IrcBackend
|
||||
from awx.main.fields import JSONField
|
||||
@@ -36,6 +37,7 @@ class NotificationTemplate(CommonModelNameNotUnique):
|
||||
('slack', _('Slack'), SlackBackend),
|
||||
('twilio', _('Twilio'), TwilioBackend),
|
||||
('pagerduty', _('Pagerduty'), PagerDutyBackend),
|
||||
('grafana', _('Grafana'), GrafanaBackend),
|
||||
('hipchat', _('HipChat'), HipChatBackend),
|
||||
('webhook', _('Webhook'), WebhookBackend),
|
||||
('mattermost', _('Mattermost'), MattermostBackend),
|
||||
@@ -82,7 +84,7 @@ class NotificationTemplate(CommonModelNameNotUnique):
|
||||
setattr(self, '_saved_{}_{}'.format("config", field), value)
|
||||
self.notification_configuration[field] = ''
|
||||
else:
|
||||
encrypted = encrypt_field(self, 'notification_configuration', subfield=field, skip_utf8=True)
|
||||
encrypted = encrypt_field(self, 'notification_configuration', subfield=field)
|
||||
self.notification_configuration[field] = encrypted
|
||||
if 'notification_configuration' not in update_fields:
|
||||
update_fields.append('notification_configuration')
|
||||
|
||||
@@ -4,7 +4,7 @@
|
||||
# Python
|
||||
import datetime
|
||||
import os
|
||||
import urlparse
|
||||
import urllib.parse as urlparse
|
||||
|
||||
# Django
|
||||
from django.conf import settings
|
||||
@@ -68,7 +68,7 @@ class ProjectOptions(models.Model):
|
||||
@classmethod
|
||||
def get_local_path_choices(cls):
|
||||
if os.path.exists(settings.PROJECTS_ROOT):
|
||||
paths = [x.decode('utf-8') for x in os.listdir(settings.PROJECTS_ROOT)
|
||||
paths = [x for x in os.listdir(settings.PROJECTS_ROOT)
|
||||
if (os.path.isdir(os.path.join(settings.PROJECTS_ROOT, x)) and
|
||||
not x.startswith('.') and not x.startswith('_'))]
|
||||
qs = Project.objects
|
||||
@@ -166,8 +166,8 @@ class ProjectOptions(models.Model):
|
||||
check_special_cases=False)
|
||||
scm_url_parts = urlparse.urlsplit(scm_url)
|
||||
# Prefer the username/password in the URL, if provided.
|
||||
scm_username = scm_url_parts.username or cred.username or ''
|
||||
if scm_url_parts.password or cred.password:
|
||||
scm_username = scm_url_parts.username or cred.get_input('username', default='')
|
||||
if scm_url_parts.password or cred.has_input('password'):
|
||||
scm_password = '********'
|
||||
else:
|
||||
scm_password = ''
|
||||
@@ -254,10 +254,6 @@ class Project(UnifiedJobTemplate, ProjectOptions, ResourceMixin, CustomVirtualEn
|
||||
on_delete=models.CASCADE,
|
||||
related_name='projects',
|
||||
)
|
||||
scm_delete_on_next_update = models.BooleanField(
|
||||
default=False,
|
||||
editable=False,
|
||||
)
|
||||
scm_update_on_launch = models.BooleanField(
|
||||
default=False,
|
||||
help_text=_('Update the project when a job is launched that uses the project.'),
|
||||
@@ -331,13 +327,6 @@ class Project(UnifiedJobTemplate, ProjectOptions, ResourceMixin, CustomVirtualEn
|
||||
# if it hasn't been specified, then we're just doing a normal save.
|
||||
update_fields = kwargs.get('update_fields', [])
|
||||
skip_update = bool(kwargs.pop('skip_update', False))
|
||||
# Check if scm_type or scm_url changes.
|
||||
if self.pk:
|
||||
project_before = self.__class__.objects.get(pk=self.pk)
|
||||
if project_before.scm_type != self.scm_type or project_before.scm_url != self.scm_url:
|
||||
self.scm_delete_on_next_update = True
|
||||
if 'scm_delete_on_next_update' not in update_fields:
|
||||
update_fields.append('scm_delete_on_next_update')
|
||||
# Create auto-generated local path if project uses SCM.
|
||||
if self.pk and self.scm_type and not self.local_path.startswith('_'):
|
||||
slug_name = slugify(six.text_type(self.name)).replace(u'-', u'_')
|
||||
@@ -397,19 +386,6 @@ class Project(UnifiedJobTemplate, ProjectOptions, ResourceMixin, CustomVirtualEn
|
||||
def _can_update(self):
|
||||
return bool(self.scm_type)
|
||||
|
||||
def _update_unified_job_kwargs(self, create_kwargs, kwargs):
|
||||
'''
|
||||
:param create_kwargs: key-worded arguments to be updated and later used for creating unified job.
|
||||
:type create_kwargs: dict
|
||||
:param kwargs: request parameters used to override unified job template fields with runtime values.
|
||||
:type kwargs: dict
|
||||
:return: modified create_kwargs.
|
||||
:rtype: dict
|
||||
'''
|
||||
if self.scm_delete_on_next_update:
|
||||
create_kwargs['scm_delete_on_update'] = True
|
||||
return create_kwargs
|
||||
|
||||
def create_project_update(self, **kwargs):
|
||||
return self.create_unified_job(**kwargs)
|
||||
|
||||
@@ -499,6 +475,21 @@ class ProjectUpdate(UnifiedJob, ProjectOptions, JobNotificationMixin, TaskManage
|
||||
def _get_parent_field_name(self):
|
||||
return 'project'
|
||||
|
||||
def _update_parent_instance(self):
|
||||
if not self.project:
|
||||
return # no parent instance to update
|
||||
if self.job_type == PERM_INVENTORY_DEPLOY:
|
||||
# Do not update project status if this is sync job
|
||||
# unless no other updates have happened or started
|
||||
first_update = False
|
||||
if self.project.status == 'never updated' and self.status == 'running':
|
||||
first_update = True
|
||||
elif self.project.current_job == self:
|
||||
first_update = True
|
||||
if not first_update:
|
||||
return
|
||||
return super(ProjectUpdate, self)._update_parent_instance()
|
||||
|
||||
@classmethod
|
||||
def _get_task_class(cls):
|
||||
from awx.main.tasks import RunProjectUpdate
|
||||
@@ -549,17 +540,6 @@ class ProjectUpdate(UnifiedJob, ProjectOptions, JobNotificationMixin, TaskManage
|
||||
def get_ui_url(self):
|
||||
return urlparse.urljoin(settings.TOWER_URL_BASE, "/#/jobs/project/{}".format(self.pk))
|
||||
|
||||
def _update_parent_instance(self):
|
||||
parent_instance = self._get_parent_instance()
|
||||
if parent_instance and self.job_type == 'check':
|
||||
update_fields = self._update_parent_instance_no_save(parent_instance)
|
||||
if self.status in ('successful', 'failed', 'error', 'canceled'):
|
||||
if not self.failed and parent_instance.scm_delete_on_next_update:
|
||||
parent_instance.scm_delete_on_next_update = False
|
||||
if 'scm_delete_on_next_update' not in update_fields:
|
||||
update_fields.append('scm_delete_on_next_update')
|
||||
parent_instance.save(update_fields=update_fields)
|
||||
|
||||
def cancel(self, job_explanation=None, is_chain=False):
|
||||
res = super(ProjectUpdate, self).cancel(job_explanation=job_explanation, is_chain=is_chain)
|
||||
if res and self.launch_type != 'sync':
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user