mirror of
https://github.com/ansible/awx.git
synced 2026-02-07 20:44:45 -03:30
Compare commits
937 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
6eb1e147a1 | ||
|
|
725fe99df8 | ||
|
|
840f5b7264 | ||
|
|
98dd3c7798 | ||
|
|
e8f0a3dbd5 | ||
|
|
f9981c0825 | ||
|
|
ee68a7ff09 | ||
|
|
84e6980e49 | ||
|
|
091027fefc | ||
|
|
5df50066a6 | ||
|
|
6da224f27e | ||
|
|
3f040eba6d | ||
|
|
d309f8c54b | ||
|
|
f48e600563 | ||
|
|
a10c3c0493 | ||
|
|
2dc9c1a1c8 | ||
|
|
229a3d6f23 | ||
|
|
478e8c0c0b | ||
|
|
440bdee56d | ||
|
|
868f68035f | ||
|
|
84ab88b073 | ||
|
|
62c7554ec4 | ||
|
|
3742090d6f | ||
|
|
c3736134ed | ||
|
|
d6964c7266 | ||
|
|
9d500e89f5 | ||
|
|
37a4f2975b | ||
|
|
f694cd14d7 | ||
|
|
62be41036c | ||
|
|
d924b873b3 | ||
|
|
33f1197b23 | ||
|
|
e5159fd28d | ||
|
|
e843872b98 | ||
|
|
f444870c0c | ||
|
|
6dac50b1e0 | ||
|
|
241f73ebf7 | ||
|
|
4227f7276b | ||
|
|
64b9d61dd4 | ||
|
|
2d48b24ef2 | ||
|
|
6294ddfded | ||
|
|
f8b91f9b0e | ||
|
|
f85e8a44de | ||
|
|
6030c5cf4c | ||
|
|
8ce3a14da5 | ||
|
|
1dacd7e8cf | ||
|
|
39886da4b6 | ||
|
|
99daa4319e | ||
|
|
3568558571 | ||
|
|
0b31e771b1 | ||
|
|
772da61980 | ||
|
|
9cde10c93a | ||
|
|
1bf37266b4 | ||
|
|
77f7e88e68 | ||
|
|
3e4e255d3f | ||
|
|
a448cb17d9 | ||
|
|
87c03c1139 | ||
|
|
775c0b02ee | ||
|
|
806d04c9b0 | ||
|
|
434c0fcd86 | ||
|
|
9238b66344 | ||
|
|
6b1ee7f126 | ||
|
|
f7e5f7c434 | ||
|
|
e074872b8d | ||
|
|
2e6e38e77c | ||
|
|
46fb2d2086 | ||
|
|
f9d2db696b | ||
|
|
f35ce93fb1 | ||
|
|
7b25216ee9 | ||
|
|
f985376d64 | ||
|
|
91c2f80e22 | ||
|
|
fccfef442c | ||
|
|
941e99018a | ||
|
|
3964d7aa20 | ||
|
|
f89cf95c51 | ||
|
|
e90f720153 | ||
|
|
06f8867417 | ||
|
|
225c3263d0 | ||
|
|
2594b90565 | ||
|
|
1a8a137e9c | ||
|
|
ff7a7945ca | ||
|
|
b9d92ff4fd | ||
|
|
416951ca86 | ||
|
|
5180fccd12 | ||
|
|
c39a4051d7 | ||
|
|
5eae035f60 | ||
|
|
6047eb6172 | ||
|
|
7f176b988f | ||
|
|
730741e978 | ||
|
|
45d9ec94ad | ||
|
|
85b3d7c515 | ||
|
|
5cd44cde99 | ||
|
|
86a22927cd | ||
|
|
4a726b7f6f | ||
|
|
16c18886e8 | ||
|
|
99e65300a2 | ||
|
|
c16079e5f8 | ||
|
|
dde408ea1a | ||
|
|
63798df06c | ||
|
|
54308c5fa1 | ||
|
|
657f366e21 | ||
|
|
fce544bb73 | ||
|
|
4beeeae9f1 | ||
|
|
8673631e64 | ||
|
|
3de04d33c8 | ||
|
|
b1119d2972 | ||
|
|
ac41af8a54 | ||
|
|
94eeea6c33 | ||
|
|
b73759e380 | ||
|
|
a5b29201a4 | ||
|
|
f80c2cbfc3 | ||
|
|
3d533e5661 | ||
|
|
91351f7e3b | ||
|
|
9bc0bf0ee7 | ||
|
|
9d17d40b86 | ||
|
|
eeb6aaaea9 | ||
|
|
1e9b221486 | ||
|
|
115a344842 | ||
|
|
03ab9f4e2a | ||
|
|
675286c1ac | ||
|
|
b6ccd02f3d | ||
|
|
a2e63bd1e2 | ||
|
|
8fb393c0a1 | ||
|
|
c2b5ffcc1c | ||
|
|
dbcdbe0770 | ||
|
|
0b6acdbcc9 | ||
|
|
ba2fd6f801 | ||
|
|
cd489262bc | ||
|
|
950f772b49 | ||
|
|
eac7c409d1 | ||
|
|
ffdce7b1d0 | ||
|
|
6062d1ec9f | ||
|
|
4eb85ad23e | ||
|
|
b681d1078f | ||
|
|
f38c9e7478 | ||
|
|
f75a0ca1b6 | ||
|
|
81024f8dfe | ||
|
|
93d1df4e4b | ||
|
|
a86196cfa3 | ||
|
|
499d80c4b0 | ||
|
|
6aff5d9b5a | ||
|
|
db21b17ccd | ||
|
|
dee8ed70cd | ||
|
|
a1aec29e48 | ||
|
|
d3eb66b6fe | ||
|
|
6087d5cb9c | ||
|
|
0c569c67fd | ||
|
|
f8a698d127 | ||
|
|
b5c3857a63 | ||
|
|
690045c8e0 | ||
|
|
874505e2a3 | ||
|
|
3f342feadd | ||
|
|
f46c968742 | ||
|
|
cdb2832f05 | ||
|
|
8bb90dde33 | ||
|
|
fa3e55be91 | ||
|
|
81266cf7a7 | ||
|
|
b110a4a94e | ||
|
|
d286a77010 | ||
|
|
a7992d06e3 | ||
|
|
705881123c | ||
|
|
db20bbe682 | ||
|
|
75a99bb1d5 | ||
|
|
aa9906ebae | ||
|
|
bb43ecb0b5 | ||
|
|
e2b290ff99 | ||
|
|
b2665c084e | ||
|
|
133b25d6ad | ||
|
|
ecc839169a | ||
|
|
9390452f02 | ||
|
|
ef691507b3 | ||
|
|
11563e8333 | ||
|
|
4773c54bf2 | ||
|
|
5d210a1063 | ||
|
|
97ae4ec250 | ||
|
|
73cc9e7b35 | ||
|
|
2b965e5523 | ||
|
|
9d9b7a226d | ||
|
|
5544e84f79 | ||
|
|
41e2920211 | ||
|
|
6d8874d627 | ||
|
|
e04e4e8c85 | ||
|
|
fb257d0add | ||
|
|
a378e897ad | ||
|
|
23d72a9b6c | ||
|
|
e7e18b854f | ||
|
|
633f5419e0 | ||
|
|
1d1630bc1c | ||
|
|
48f4e6dd23 | ||
|
|
634d432de4 | ||
|
|
b222f10d68 | ||
|
|
1b6cfd803c | ||
|
|
60d6832971 | ||
|
|
06ce1c2a92 | ||
|
|
72d700e125 | ||
|
|
a706eb608a | ||
|
|
646c9cc708 | ||
|
|
e042abbbb3 | ||
|
|
3538974c8a | ||
|
|
9a61c43687 | ||
|
|
f2876b9ac5 | ||
|
|
2799d09d55 | ||
|
|
9811e65921 | ||
|
|
652e7a500b | ||
|
|
be148b5fd4 | ||
|
|
6e401fa02f | ||
|
|
c2e224bb86 | ||
|
|
7e7bb5261b | ||
|
|
14f42af700 | ||
|
|
f6beb7105d | ||
|
|
244d9e70e2 | ||
|
|
b2cf45820e | ||
|
|
4f257655a9 | ||
|
|
7200cbc94a | ||
|
|
c2ef0a6500 | ||
|
|
201c3f56e7 | ||
|
|
87d51c1038 | ||
|
|
9b702e46fe | ||
|
|
9608539710 | ||
|
|
e45c3fb8e9 | ||
|
|
23ac8d346f | ||
|
|
ff00239c09 | ||
|
|
fc7b2212e6 | ||
|
|
ade0734af7 | ||
|
|
1efd189d52 | ||
|
|
52799c169e | ||
|
|
73f819b632 | ||
|
|
f3116a3250 | ||
|
|
43522d30e6 | ||
|
|
f8641bfa5e | ||
|
|
8b9a92f306 | ||
|
|
c508695ed0 | ||
|
|
305f717e88 | ||
|
|
7d190da1c4 | ||
|
|
fd63937fa2 | ||
|
|
5e64e8771e | ||
|
|
597435141d | ||
|
|
f11bc1154a | ||
|
|
cfff30f024 | ||
|
|
3cbb516bac | ||
|
|
467536f93f | ||
|
|
3878d8f7d8 | ||
|
|
112bab274d | ||
|
|
9b3aa60468 | ||
|
|
1d01efa024 | ||
|
|
f98c6b5c5b | ||
|
|
854da96976 | ||
|
|
3211323a4e | ||
|
|
2d396927dd | ||
|
|
54e2608cf4 | ||
|
|
44cd8819f4 | ||
|
|
5c0850b279 | ||
|
|
7a512c1de7 | ||
|
|
5bdc8b0015 | ||
|
|
385ef0b0a4 | ||
|
|
6495779e40 | ||
|
|
012902f4fe | ||
|
|
65bdf0baf7 | ||
|
|
f6fb3e0b41 | ||
|
|
6fae6168d9 | ||
|
|
9f91190f4b | ||
|
|
23f2ac4cbc | ||
|
|
f06141eb00 | ||
|
|
a971e20e05 | ||
|
|
edc97a24b2 | ||
|
|
8dec13a25d | ||
|
|
586019fe8f | ||
|
|
d7b8a20a75 | ||
|
|
d98e4c304f | ||
|
|
eb4dca9dc1 | ||
|
|
74460fa2d7 | ||
|
|
ac06f9e432 | ||
|
|
2c8d524b1a | ||
|
|
46728a0931 | ||
|
|
39d785070c | ||
|
|
dca29e756d | ||
|
|
2a9d728b70 | ||
|
|
ef6297377b | ||
|
|
73fb332af3 | ||
|
|
16ad68a6b0 | ||
|
|
88c4feb3ae | ||
|
|
f65839ec8f | ||
|
|
8e0a22c766 | ||
|
|
c0fb2ddbdc | ||
|
|
d60bec8155 | ||
|
|
98da019d12 | ||
|
|
40f0f5ddf7 | ||
|
|
31124e07c6 | ||
|
|
cb1ab742e8 | ||
|
|
93093b9bc6 | ||
|
|
5fa44c61d2 | ||
|
|
7290de22e2 | ||
|
|
10d95c9aef | ||
|
|
da74c61a09 | ||
|
|
d93ef9f6de | ||
|
|
209c5c9378 | ||
|
|
6b1110f1c3 | ||
|
|
ec08997b63 | ||
|
|
09150fe21d | ||
|
|
3562be8317 | ||
|
|
dc8115681a | ||
|
|
6f0f56f4f6 | ||
|
|
1b2d457090 | ||
|
|
764322b87b | ||
|
|
51005c0342 | ||
|
|
cccd021d8b | ||
|
|
18752a637f | ||
|
|
bbf283d1fd | ||
|
|
f83126643a | ||
|
|
d913d622d3 | ||
|
|
f062554e82 | ||
|
|
2d0eae26bc | ||
|
|
45937f0be3 | ||
|
|
e0d9100dc4 | ||
|
|
19d0524461 | ||
|
|
4db5c496d0 | ||
|
|
babea5d599 | ||
|
|
a2e3bf1030 | ||
|
|
1550989482 | ||
|
|
d94a49ac74 | ||
|
|
de52adedef | ||
|
|
e8886a5525 | ||
|
|
876d4316e1 | ||
|
|
659f68f280 | ||
|
|
f867c9e476 | ||
|
|
d6a5a1e0d0 | ||
|
|
b1ce5e24e3 | ||
|
|
4a4d25329b | ||
|
|
e6f06a95da | ||
|
|
b15a75676d | ||
|
|
098ec63944 | ||
|
|
1c4a376758 | ||
|
|
a52f050f44 | ||
|
|
836335b4c5 | ||
|
|
f10bf4c067 | ||
|
|
138211da64 | ||
|
|
44befa7847 | ||
|
|
b9950deaf9 | ||
|
|
8ba9eef97b | ||
|
|
9342cb012a | ||
|
|
0f04851ab7 | ||
|
|
c90dfbb7e0 | ||
|
|
05f93032f5 | ||
|
|
726b5ddc26 | ||
|
|
9c8dbdc7a5 | ||
|
|
c170d4c4f6 | ||
|
|
0ee49dae76 | ||
|
|
4ea7c8a534 | ||
|
|
8298b76dff | ||
|
|
52a46dd765 | ||
|
|
5bec4a51c6 | ||
|
|
fd658d44c9 | ||
|
|
6a296419d2 | ||
|
|
5b5f07d639 | ||
|
|
27c56d4148 | ||
|
|
ae7396b0cf | ||
|
|
c677e4b18e | ||
|
|
32f02fc91c | ||
|
|
588cb1e403 | ||
|
|
5971a84e74 | ||
|
|
e31fc37215 | ||
|
|
572c0fbb74 | ||
|
|
2aa30226f4 | ||
|
|
53da8e0775 | ||
|
|
8e53453737 | ||
|
|
80023017a2 | ||
|
|
10c357d0f1 | ||
|
|
e8b2072ea5 | ||
|
|
9e89c16b38 | ||
|
|
ca19b9e9d4 | ||
|
|
269e71c069 | ||
|
|
857a5718e5 | ||
|
|
6f7a717664 | ||
|
|
0b57522dce | ||
|
|
c2a2bf39d5 | ||
|
|
6b2cee2f69 | ||
|
|
4e55c98bc6 | ||
|
|
143d41fb2a | ||
|
|
c975f65bbc | ||
|
|
2995cde7cb | ||
|
|
221021a798 | ||
|
|
411d69204b | ||
|
|
19f4de0d05 | ||
|
|
070c67ffe8 | ||
|
|
5c38011ad5 | ||
|
|
4e9c6a956d | ||
|
|
1afdd7ac1d | ||
|
|
3312db61c3 | ||
|
|
0ca8fd7752 | ||
|
|
5bdf9a108c | ||
|
|
0a6d13c1b9 | ||
|
|
3673e7c3f4 | ||
|
|
b0324acd0e | ||
|
|
b6eeb2e77f | ||
|
|
6bbbe23a96 | ||
|
|
097f465f39 | ||
|
|
58337b9e2e | ||
|
|
2601631f28 | ||
|
|
48537e8202 | ||
|
|
0345a0c8e2 | ||
|
|
65c71b812e | ||
|
|
510b4197ac | ||
|
|
55855e9e63 | ||
|
|
6a32164438 | ||
|
|
811186308c | ||
|
|
d96383b317 | ||
|
|
83a9c3470e | ||
|
|
701a69b5e5 | ||
|
|
295e40002e | ||
|
|
7c2f6c95a6 | ||
|
|
fbd46f7799 | ||
|
|
c9ec0d31f1 | ||
|
|
85f7dc4222 | ||
|
|
1c8851b716 | ||
|
|
afe8dc6ad9 | ||
|
|
f294aabcc9 | ||
|
|
4c60999161 | ||
|
|
18ba40506f | ||
|
|
2ae82b9302 | ||
|
|
ec3b225c76 | ||
|
|
53d8e8b332 | ||
|
|
3fd0c29a95 | ||
|
|
8fb17cfbdb | ||
|
|
e4d227a791 | ||
|
|
0123469e7e | ||
|
|
d494645914 | ||
|
|
80c2249bdb | ||
|
|
6df65c95a7 | ||
|
|
119e80c717 | ||
|
|
b7e614beee | ||
|
|
f231216584 | ||
|
|
16a6fb5adc | ||
|
|
7b1edda368 | ||
|
|
69edef430c | ||
|
|
6f1f64118b | ||
|
|
e1e0bb30a9 | ||
|
|
95ec009758 | ||
|
|
55b948bf39 | ||
|
|
34df47ceba | ||
|
|
0505e38071 | ||
|
|
eb131f64cc | ||
|
|
a3a47834fd | ||
|
|
88a91cfeba | ||
|
|
6aab88259a | ||
|
|
0a4a1bed0a | ||
|
|
62215ca432 | ||
|
|
fd21603c0e | ||
|
|
aab58f5ae7 | ||
|
|
29ff69a774 | ||
|
|
33d7342ffe | ||
|
|
609b17aa20 | ||
|
|
e23a2b4506 | ||
|
|
eba12a6207 | ||
|
|
03c5cc779b | ||
|
|
065b943870 | ||
|
|
6e67ae68fd | ||
|
|
adf708366a | ||
|
|
4d2fcfd8c1 | ||
|
|
0921de5d2b | ||
|
|
f2801e0c03 | ||
|
|
befc658042 | ||
|
|
883fa4906a | ||
|
|
60827143bb | ||
|
|
5b17ab6873 | ||
|
|
0e80f663ab | ||
|
|
cb95de0862 | ||
|
|
86a3a79be4 | ||
|
|
b417fc3803 | ||
|
|
5b2adc89cf | ||
|
|
41fb21911e | ||
|
|
eaa74b40c1 | ||
|
|
cf513b33ee | ||
|
|
a39e1a528b | ||
|
|
05dded397d | ||
|
|
ae5a1117d4 | ||
|
|
b1361c8fe2 | ||
|
|
20ee73ce73 | ||
|
|
0bd8012fd9 | ||
|
|
05ef51f710 | ||
|
|
d6b5990cd2 | ||
|
|
26f7a2ba81 | ||
|
|
0381a3ac8c | ||
|
|
4b40cb3abb | ||
|
|
b0265b060b | ||
|
|
4ca33579a5 | ||
|
|
31e7e10f30 | ||
|
|
92f0af684c | ||
|
|
6a7520d10f | ||
|
|
f2dfa132a7 | ||
|
|
ddcbc408b9 | ||
|
|
ea39cbce73 | ||
|
|
44d7d68322 | ||
|
|
9f39bab2b8 | ||
|
|
8eb4dafb17 | ||
|
|
428f8addf8 | ||
|
|
57b317d440 | ||
|
|
68f9c5137d | ||
|
|
9f97efece8 | ||
|
|
1a68df275c | ||
|
|
86363e260e | ||
|
|
8f66793276 | ||
|
|
c7e0e30f93 | ||
|
|
8ab7745e3a | ||
|
|
fd0c4ec869 | ||
|
|
a435843f23 | ||
|
|
f850f8d3e0 | ||
|
|
c29d476919 | ||
|
|
b05b6b2e03 | ||
|
|
3f76499c56 | ||
|
|
e63383bde6 | ||
|
|
c6be92cdf6 | ||
|
|
341e1e34e3 | ||
|
|
70755a395b | ||
|
|
fe9b24cde2 | ||
|
|
70f7a082bb | ||
|
|
9df29e8fc4 | ||
|
|
d37cb64aaf | ||
|
|
1d9f01a201 | ||
|
|
373bb443aa | ||
|
|
286b1d4e25 | ||
|
|
7b7465f168 | ||
|
|
e453afa064 | ||
|
|
cf96275f1b | ||
|
|
be8168b555 | ||
|
|
fd92ba0c0b | ||
|
|
0184a7c267 | ||
|
|
81f6d36a3a | ||
|
|
f1df4c54f8 | ||
|
|
521d3d5edb | ||
|
|
acee22435b | ||
|
|
0c497fa682 | ||
|
|
90b9c7861c | ||
|
|
eb5bf599e3 | ||
|
|
10e68c6fb3 | ||
|
|
49bdadcdbf | ||
|
|
015fc29c1c | ||
|
|
0dfb183cb6 | ||
|
|
ba14634318 | ||
|
|
b953478225 | ||
|
|
9964ba7c9a | ||
|
|
12b8349e88 | ||
|
|
c74d60f3f3 | ||
|
|
44ad6bfdce | ||
|
|
fde7a1e3e5 | ||
|
|
4a0fc3e1af | ||
|
|
5f1da2b923 | ||
|
|
e7bf81883b | ||
|
|
4993a9e6ec | ||
|
|
8562c378c0 | ||
|
|
6d935f740c | ||
|
|
c1133b3f6d | ||
|
|
c0faa39b53 | ||
|
|
7a433f4e8f | ||
|
|
2302496724 | ||
|
|
54681eb055 | ||
|
|
b716e2b099 | ||
|
|
69dcbe0865 | ||
|
|
14a8e3da5e | ||
|
|
6ff1424e8c | ||
|
|
9786dc08d3 | ||
|
|
ecaa66c13b | ||
|
|
ee1d322336 | ||
|
|
1f4a45a698 | ||
|
|
966bb6fc74 | ||
|
|
f554f45288 | ||
|
|
5c2b2dea0c | ||
|
|
fd9373a9ec | ||
|
|
82a641e173 | ||
|
|
490f719fd9 | ||
|
|
46f5cb6b7a | ||
|
|
efb25b7b9e | ||
|
|
87b13ead12 | ||
|
|
7c6975baec | ||
|
|
6e6cd51b4d | ||
|
|
3d233faed8 | ||
|
|
54d0f173dc | ||
|
|
684b9bd47a | ||
|
|
9530c6ca50 | ||
|
|
b7209d1694 | ||
|
|
9d806ddb82 | ||
|
|
332c802317 | ||
|
|
9660e27246 | ||
|
|
64f45da4d2 | ||
|
|
73418e41f3 | ||
|
|
6e2010ca40 | ||
|
|
50433789ae | ||
|
|
a3f0158a94 | ||
|
|
130bf076f4 | ||
|
|
06d7a61ca1 | ||
|
|
297fecba3a | ||
|
|
3cbf384ad1 | ||
|
|
45a0084f78 | ||
|
|
f9741b619c | ||
|
|
5ec7378135 | ||
|
|
c05e4e07ee | ||
|
|
cc429f9741 | ||
|
|
cb766c6a95 | ||
|
|
3c637cd54c | ||
|
|
61cbd34586 | ||
|
|
9697999ddd | ||
|
|
41613ff544 | ||
|
|
0af7f046f0 | ||
|
|
efe03c396b | ||
|
|
9b1d6ab441 | ||
|
|
e9d524ebc0 | ||
|
|
c4d8d5ee9e | ||
|
|
75746f2c69 | ||
|
|
6d88a81cbd | ||
|
|
56d6479cd8 | ||
|
|
7be129f9fa | ||
|
|
4d21d699bb | ||
|
|
899a5fca0b | ||
|
|
4456ae2d71 | ||
|
|
3153587c90 | ||
|
|
a0090c7c52 | ||
|
|
3903e88a47 | ||
|
|
11b8ca1ef9 | ||
|
|
0b54ee65f5 | ||
|
|
6b8267d2b8 | ||
|
|
1f93d3ad69 | ||
|
|
5969d9e3e2 | ||
|
|
e05db68bde | ||
|
|
e92f1187d2 | ||
|
|
53d0611cf8 | ||
|
|
d1c49d45bf | ||
|
|
a293a60d5c | ||
|
|
4cdec9c297 | ||
|
|
4baada37d8 | ||
|
|
cb3f7b9ef5 | ||
|
|
1c7fbc2806 | ||
|
|
060578b30b | ||
|
|
cb05b54404 | ||
|
|
218b97883d | ||
|
|
d337948bd6 | ||
|
|
5d51a4e781 | ||
|
|
a0beb9e445 | ||
|
|
a9aa91d9f2 | ||
|
|
2f56a20484 | ||
|
|
4985fb6ffa | ||
|
|
b545a6510f | ||
|
|
df7b168911 | ||
|
|
83b449fd30 | ||
|
|
a00c8920ce | ||
|
|
a07b1a19f3 | ||
|
|
a95e554a16 | ||
|
|
4c92d02540 | ||
|
|
a0bdf8cdae | ||
|
|
daaabd935c | ||
|
|
eaf55728d8 | ||
|
|
45acd15c82 | ||
|
|
3f936cd5e7 | ||
|
|
5d9d486f9c | ||
|
|
d3f2dedbd5 | ||
|
|
fe605596b5 | ||
|
|
bf601fc988 | ||
|
|
7f36efe8dd | ||
|
|
615cc11d0d | ||
|
|
3b520cdeee | ||
|
|
c245abefcb | ||
|
|
6a093c8e8b | ||
|
|
2097b014e8 | ||
|
|
fb6ce4bed3 | ||
|
|
7ae19a8ba3 | ||
|
|
240556b5d6 | ||
|
|
1b6976205c | ||
|
|
ce21dec0d0 | ||
|
|
22df3047e6 | ||
|
|
79cb11d905 | ||
|
|
be79b6e907 | ||
|
|
2239c8d6e5 | ||
|
|
7394535022 | ||
|
|
ec40f62c4d | ||
|
|
5e6c978a47 | ||
|
|
a54352898e | ||
|
|
b583aeb646 | ||
|
|
af6af052d0 | ||
|
|
70325fd249 | ||
|
|
e935b06c39 | ||
|
|
8f9758803e | ||
|
|
9672e72834 | ||
|
|
fb07be36b5 | ||
|
|
0f6d2c36a0 | ||
|
|
805ba2568c | ||
|
|
e0bbd36b99 | ||
|
|
3b1b55946e | ||
|
|
189804932e | ||
|
|
61da965d54 | ||
|
|
1f1657d880 | ||
|
|
57f4db25d2 | ||
|
|
115726ed83 | ||
|
|
9a7dd38cbb | ||
|
|
74a5247d9d | ||
|
|
cc18cf650e | ||
|
|
78ccf3c674 | ||
|
|
96531eff7a | ||
|
|
561390d405 | ||
|
|
c608d761a2 | ||
|
|
61c0beccff | ||
|
|
182a7e8e5c | ||
|
|
0e6c14e707 | ||
|
|
080c430b82 | ||
|
|
40c6b346c1 | ||
|
|
1c61fafbc7 | ||
|
|
afe36974de | ||
|
|
99460a76d8 | ||
|
|
efbbbc0bdc | ||
|
|
9090cb830d | ||
|
|
6aca9d80bb | ||
|
|
33deaa7d0b | ||
|
|
0bd4a8c16d | ||
|
|
c98c6db664 | ||
|
|
31e6d7cc0a | ||
|
|
c758e6f8cf | ||
|
|
242c4e2533 | ||
|
|
d7d496553e | ||
|
|
1f59d6182b | ||
|
|
64b49feeee | ||
|
|
2419a592b4 | ||
|
|
eb66a03a30 | ||
|
|
87cf797153 | ||
|
|
a481fc3cc9 | ||
|
|
c3bab52a61 | ||
|
|
b3cdefec23 | ||
|
|
3cc3cf1f80 | ||
|
|
2a9a471181 | ||
|
|
2a37430eab | ||
|
|
29520f1ca6 | ||
|
|
051efa4a19 | ||
|
|
e26015a084 | ||
|
|
4cb0366fcf | ||
|
|
365b3a2366 | ||
|
|
5c35bb335a | ||
|
|
0df85b0a27 | ||
|
|
02c8f4cc59 | ||
|
|
fbe5832d5a | ||
|
|
b4956de6e4 | ||
|
|
5e91ae7b03 | ||
|
|
1468e5908a | ||
|
|
6e8c71a231 | ||
|
|
56868dbedd | ||
|
|
3c8f5f666f | ||
|
|
c7bfc60be3 | ||
|
|
62d91365a7 | ||
|
|
4e48118704 | ||
|
|
8bcc91518e | ||
|
|
f6bddfd336 | ||
|
|
a691caf346 | ||
|
|
3ad2bb1bb9 | ||
|
|
5548f7e91d | ||
|
|
6d08c11506 | ||
|
|
055222bb82 | ||
|
|
82a226d1fe | ||
|
|
e93518a030 | ||
|
|
33b1028882 | ||
|
|
67f1f9ac69 | ||
|
|
f99f183d53 | ||
|
|
2e179609d9 | ||
|
|
8fb0b401ce | ||
|
|
ed124c693a | ||
|
|
949c777546 | ||
|
|
3f5cb645f2 | ||
|
|
d83771f082 | ||
|
|
b177c7b558 | ||
|
|
2b664d6958 | ||
|
|
909c2513ac | ||
|
|
5f7c056f13 | ||
|
|
56881c25c9 | ||
|
|
aa855c1d57 | ||
|
|
09b097d6b9 | ||
|
|
ed8b4a3c70 | ||
|
|
d4f86e8999 | ||
|
|
17627ed6ce | ||
|
|
32ddfdf590 | ||
|
|
4a2a6949a8 | ||
|
|
5ba6d14e4d | ||
|
|
c7793e0b9c | ||
|
|
44d7915993 | ||
|
|
2ef08b1d13 | ||
|
|
16aa73fc2c | ||
|
|
5013d74f46 | ||
|
|
3d54ab9a0f | ||
|
|
9420837dae | ||
|
|
a613cb2f78 | ||
|
|
92c9f6b0f7 | ||
|
|
c7a558b027 | ||
|
|
4f40381b46 | ||
|
|
2c9ef3bae6 | ||
|
|
f82163499c | ||
|
|
6f7bdc04c0 | ||
|
|
9fc8144f8e | ||
|
|
f670a01bf7 | ||
|
|
0e2fb02185 | ||
|
|
b0520ae0e4 | ||
|
|
471ee46171 | ||
|
|
fc6acbd9d1 | ||
|
|
5b3bb1e81d | ||
|
|
7b757f17a9 | ||
|
|
753749d2b4 | ||
|
|
73d4d14a62 | ||
|
|
13131e96b6 | ||
|
|
aaaeb37812 | ||
|
|
dfcfdc7a4e | ||
|
|
4ca8862b51 | ||
|
|
e886ce57aa | ||
|
|
f747edca0c | ||
|
|
ba318f8670 | ||
|
|
9d359a080b | ||
|
|
41d0a2f7b9 | ||
|
|
379824bbf7 | ||
|
|
47de2ddcb5 | ||
|
|
14686e8b16 | ||
|
|
f35c16e4f0 | ||
|
|
226a689f7b | ||
|
|
54d63ee437 | ||
|
|
fe9bd37c74 | ||
|
|
0b633c2b6c | ||
|
|
8081d66872 | ||
|
|
40cf4ff209 | ||
|
|
554f96ccef | ||
|
|
1a5cdd05c5 | ||
|
|
bb67d62d81 | ||
|
|
a9288c82fc | ||
|
|
5a70989ff5 | ||
|
|
56139423d9 | ||
|
|
ffd9a239ba | ||
|
|
00b47f1dbf | ||
|
|
eb2a9baadd | ||
|
|
953fa3fe0d | ||
|
|
b20adac33d | ||
|
|
186aa6cc99 | ||
|
|
27b0f874cc | ||
|
|
669e963e38 | ||
|
|
03eb9bafb7 | ||
|
|
78ef11d558 | ||
|
|
ad71dc3e98 | ||
|
|
f3410f6517 | ||
|
|
7d432e484c | ||
|
|
271eb4043a | ||
|
|
cc9ee550d9 | ||
|
|
7a8d9394be | ||
|
|
339d430de1 | ||
|
|
6f24fda168 | ||
|
|
3ad9aa902d | ||
|
|
3408f5270e | ||
|
|
d365705e05 | ||
|
|
1f81592b5c | ||
|
|
0f0857747d | ||
|
|
ff27c486bf | ||
|
|
e7719e3fdc | ||
|
|
7a6cbe2685 | ||
|
|
b2328d17a4 | ||
|
|
b8790db84a | ||
|
|
89646e7799 | ||
|
|
1b32dad745 | ||
|
|
5ab0b91a0d | ||
|
|
da6f793468 | ||
|
|
ce44a056ad | ||
|
|
54aababd0f | ||
|
|
8a58a73cb0 | ||
|
|
db4d84b94c | ||
|
|
604b42929d | ||
|
|
a90ff45e98 | ||
|
|
d461090415 | ||
|
|
9ccee200f3 | ||
|
|
a74fe57691 | ||
|
|
3f5e1518ee | ||
|
|
e433e3ebc2 | ||
|
|
88cd154c97 | ||
|
|
6fcaba6540 | ||
|
|
cc0d658b1c | ||
|
|
792928aae8 | ||
|
|
aa5219cf30 | ||
|
|
9e7882fce2 | ||
|
|
c40ca718d0 | ||
|
|
67daca04e0 | ||
|
|
c3ca43d9bc | ||
|
|
a534a80360 | ||
|
|
012189cb10 | ||
|
|
0feeaf0453 | ||
|
|
7bf7cebd72 | ||
|
|
b7475442fb | ||
|
|
11874d38e2 | ||
|
|
ce59f1817a | ||
|
|
2e7b53cf90 | ||
|
|
afdb4f9709 | ||
|
|
2c17d56acd | ||
|
|
8bde6060c4 | ||
|
|
da16785201 | ||
|
|
b7eb1a4c59 | ||
|
|
47740f62f4 | ||
|
|
fb7b36cfbd | ||
|
|
ba8c44f8a6 | ||
|
|
287d181af7 | ||
|
|
dfa65225d9 | ||
|
|
aa4ca300f5 | ||
|
|
12f6c05a4a | ||
|
|
bbde149ab1 | ||
|
|
9cf3066591 | ||
|
|
8d8aadb193 | ||
|
|
3194690e5f | ||
|
|
6cc3ac2e99 | ||
|
|
bda4db462f | ||
|
|
c4c1b9799e | ||
|
|
a825cf3583 | ||
|
|
7c8bd47198 | ||
|
|
9f691a048d | ||
|
|
da68e613e3 | ||
|
|
b9a9d645de | ||
|
|
7032927ba3 | ||
|
|
e62035fa5e | ||
|
|
5fe6e75255 | ||
|
|
916f919fcb | ||
|
|
83372d6f03 | ||
|
|
7c53bf3681 | ||
|
|
e40e646211 | ||
|
|
b913d8411a | ||
|
|
2017504c51 | ||
|
|
ac280e1446 | ||
|
|
16848e9154 | ||
|
|
81875f0971 | ||
|
|
7d58ae3c8c | ||
|
|
5445de7d01 | ||
|
|
ee59ac957a | ||
|
|
6b7d712f9f | ||
|
|
3ae133d39d | ||
|
|
b8bd6d2472 | ||
|
|
d32461388f | ||
|
|
0e9f7f37e0 | ||
|
|
e8ea6bc946 | ||
|
|
91045534d0 | ||
|
|
1042c1cc28 | ||
|
|
1ce9c00d77 | ||
|
|
22aaf765a5 | ||
|
|
186a1b04b4 | ||
|
|
1cd7f42a27 | ||
|
|
8cae728ea0 | ||
|
|
c0690cddc8 | ||
|
|
bbed8ec704 |
@@ -1 +1,3 @@
|
||||
awx/ui/node_modules
|
||||
awx/ui_next/node_modules
|
||||
Dockerfile
|
||||
|
||||
2
.env
2
.env
@@ -1,3 +1,3 @@
|
||||
PYTHONUNBUFFERED=true
|
||||
SELENIUM_DOCKER_TAG=latest
|
||||
|
||||
COMPOSE_PROJECT_NAME=tools
|
||||
|
||||
7
.github/dependabot.yml
vendored
Normal file
7
.github/dependabot.yml
vendored
Normal file
@@ -0,0 +1,7 @@
|
||||
---
|
||||
version: 2
|
||||
updates:
|
||||
- package-ecosystem: "pip"
|
||||
directory: "/requirements"
|
||||
schedule:
|
||||
interval: "monthly"
|
||||
13
.gitignore
vendored
13
.gitignore
vendored
@@ -29,14 +29,18 @@ awx/ui/client/languages
|
||||
awx/ui/templates/ui/index.html
|
||||
awx/ui/templates/ui/installing.html
|
||||
awx/ui_next/node_modules/
|
||||
awx/ui_next/src/locales/
|
||||
awx/ui_next/src/locales/*/messages.js
|
||||
awx/ui_next/coverage/
|
||||
awx/ui_next/build
|
||||
awx/ui_next/.env.local
|
||||
awx/ui_next/instrumented
|
||||
rsyslog.pid
|
||||
tools/prometheus/data
|
||||
tools/docker-compose/ansible/awx_dump.sql
|
||||
tools/docker-compose/Dockerfile
|
||||
tools/docker-compose/_build
|
||||
tools/docker-compose/_sources
|
||||
tools/docker-compose/overrides/
|
||||
|
||||
# Tower setup playbook testing
|
||||
setup/test/roles/postgresql
|
||||
@@ -87,6 +91,9 @@ awx/awx_test.sqlite3-journal
|
||||
# Mac OS X
|
||||
*.DS_Store
|
||||
|
||||
# VSCode
|
||||
.vscode/
|
||||
|
||||
# Editors
|
||||
*.sw[poj]
|
||||
*~
|
||||
@@ -146,6 +153,8 @@ use_dev_supervisor.txt
|
||||
.idea/*
|
||||
*.unison.tmp
|
||||
*.#
|
||||
/tools/docker-compose/overrides/
|
||||
/awx/ui_next/.ui-built
|
||||
/Dockerfile
|
||||
/_build/
|
||||
/_build_kube_dev/
|
||||
/Dockerfile.kube-dev
|
||||
|
||||
76
CHANGELOG.md
76
CHANGELOG.md
@@ -2,6 +2,82 @@
|
||||
|
||||
This is a list of high-level changes for each release of AWX. A full list of commits can be found at `https://github.com/ansible/awx/releases/tag/<version>`.
|
||||
|
||||
# 19.0.0 (April 7, 2021)
|
||||
|
||||
- AWX now runs on Python 3.8 (https://github.com/ansible/awx/pull/8778/)
|
||||
- Fixed inventories-from-projects when running in Kubernetes (https://github.com/ansible/awx/pull/9741)
|
||||
- Fixed a bug where a slash was appended to invetory file paths in UI dropdown (https://github.com/ansible/awx/pull/9713)
|
||||
- Fix a bug with large file parsing in project sync (https://github.com/ansible/awx/pull/9627)
|
||||
- Fix k8s credentials that use a custom ca cert (https://github.com/ansible/awx/pull/9744)
|
||||
- Fix a bug that allowed a user to attempt deleting a running job (https://github.com/ansible/awx/pull/9758)
|
||||
- Fixed the Kubernetes Pod reaper to properly delete Pods launched by Receptor (https://github.com/ansible/awx/pull/9819)
|
||||
- AWX Collection Modules: added ability to set instance groups for organization, job templates, and inventories. (https://github.com/ansible/awx/pull/9804)
|
||||
- Fixed CSP violation errors on job details and job settings views (https://github.com/ansible/awx/pull/9818)
|
||||
- Added support for convergence any/all on workflow nodes (https://github.com/ansible/awx/pull/9737)
|
||||
- Fixed race condition that causes InvalidGitRepositoryError (https://github.com/ansible/awx/pull/9754)
|
||||
- Added support for Execution Environments to the Activity Stream (https://github.com/ansible/awx/issues/9308)
|
||||
- Fixed a bug that improperly formats OpenSSH keys specified in custom Credential Types (https://github.com/ansible/awx/issues/9361)
|
||||
- Fixed an HTTP 500 error for unauthenticated users (https://github.com/ansible/awx/pull/9725)
|
||||
- Added subscription wizard: https://github.com/ansible/awx/pull/9496
|
||||
|
||||
# 18.0.0 (March 23, 2021)
|
||||
|
||||
**IMPORTANT INSTALL AND UPGRADE NOTES**
|
||||
|
||||
Starting in version 18.0, the [AWX Operator](https://github.com/ansible/awx-operator) is the preferred way to install AWX: https://github.com/ansible/awx/blob/devel/INSTALL.md#installing-awx
|
||||
|
||||
If you have a pre-existing installation of AWX that utilizes the Docker-based installation method, this install method has ** notably changed** from 17.x to 18.x. For details, please see:
|
||||
|
||||
- https://groups.google.com/g/awx-project/c/47MjWSUQaOc/m/bCjSDn0eBQAJ
|
||||
- https://github.com/ansible/awx/blob/devel/tools/docker-compose
|
||||
- https://github.com/ansible/awx/blob/devel/tools/docker-compose/docs/data_migration.md
|
||||
|
||||
### Introducing Execution Environments
|
||||
|
||||
After a herculean effort from a number of contributors, we're excited to announce that AWX 18.0.0 introduces a new concept called Execution Environments.
|
||||
|
||||
Execution Environments are container images which consist of everything necessary to run a playbook within AWX, and which drive the entire management and lifecycle of playbook execution runtime in AWX: https://github.com/ansible/awx/issues/5157. This means that going forward, AWX no longer utilizes the [bubblewrap](https://github.com/containers/bubblewrap) project for playbook isolation, but instead utilizes a container per playbook run.
|
||||
|
||||
Much like custom virtualenvs, custom Execution Environments can be crafted to specify additional Python or system-level dependencies. [Ansible Builder](https://github.com/ansible/ansible-builder) outputs images you can upload to your registry which can *then* be defined in AWX and utilized for playbook runs.
|
||||
|
||||
To learn more about Ansible Builder and Execution Environments, see: https://www.ansible.com/blog/introduction-to-ansible-builder
|
||||
|
||||
### Other Notable Changes
|
||||
|
||||
- Removed `installer` directory.
|
||||
- The Kubernetes installer has been removed in favor of [AWX Operator](https://github.com/ansible/awx-operator). Official images for Operator-based installs are no longer hosted on Docker Hub, but are instead available on [Quay](https://quay.io/repository/ansible/awx?tab=tags).
|
||||
- The "Local Docker" install method has been removed in favor of the development environment. Details can be found at: https://github.com/ansible/awx/blob/devel/tools/docker-compose/README.md
|
||||
- Removal of custom virtual environments https://github.com/ansible/awx/pull/9498
|
||||
- Custom virtual environments have been replaced by Execution Environments https://github.com/ansible/awx/pull/9570
|
||||
- The default Container Group Pod definition has changed. All custom Pod specs have been reset. https://github.com/ansible/awx/commit/05ef51f710dad8f8036bc5acee4097db4adc0d71
|
||||
- Added user interface for the activity stream: https://github.com/ansible/awx/pull/9083
|
||||
- Converted many of the top-level list views (Jobs, Teams, Hosts, Inventories, Projects, and more) to a new, permanent table component for substantially increased responsiveness, usability, maintainability, and other 'ility's: https://github.com/ansible/awx/pull/8970, https://github.com/ansible/awx/pull/9182 and many others!
|
||||
- Added support for Centrify Vault (https://www.centrify.com) as a credential lookup plugin (https://github.com/ansible/awx/pull/9542)
|
||||
- Added support for namespaces in Hashicorp Vault credential plugin (https://github.com/ansible/awx/pull/9590)
|
||||
- Added click-to-expand details for job tables
|
||||
- Added search filtering to job output https://github.com/ansible/awx/pull/9208
|
||||
- Added the new migration, update, and "installation in progress" page https://github.com/ansible/awx/pull/9123
|
||||
- Added the user interface for job settings https://github.com/ansible/awx/pull/8661
|
||||
- Runtime errors from jobs are now displayed, along with an explanation for what went wrong, on the output page https://github.com/ansible/awx/pull/8726
|
||||
- You can now cancel a running job from its output and details panel https://github.com/ansible/awx/pull/9199
|
||||
- Fixed a bug where launch prompt inputs were unexpectedly deposited in the url: https://github.com/ansible/awx/pull/9231
|
||||
- Playbook, credential type, and inventory file inputs now support type-ahead and manual type-in! https://github.com/ansible/awx/pull/9120
|
||||
- Added ability to relaunch against failed hosts: https://github.com/ansible/awx/pull/9225
|
||||
- Added pending workflow approval count to the application header https://github.com/ansible/awx/pull/9334
|
||||
- Added user interface for management jobs: https://github.com/ansible/awx/pull/9224
|
||||
- Added toast message to show notification template test result to notification templates list https://github.com/ansible/awx/pull/9318
|
||||
- Replaced CodeMirror with AceEditor for editing template variables and notification templates https://github.com/ansible/awx/pull/9281
|
||||
- Added support for filtering and pagination on job output https://github.com/ansible/awx/pull/9208
|
||||
- Added support for html in custom login text https://github.com/ansible/awx/pull/9519
|
||||
|
||||
# 17.1.0 (March 9, 2021)
|
||||
- Addressed a security issue in AWX (CVE-2021-20253)
|
||||
- Fixed a bug permissions error related to redis in K8S-based deployments: https://github.com/ansible/awx/issues/9401
|
||||
|
||||
# 17.0.1 (January 26, 2021)
|
||||
- Fixed pgdocker directory permissions issue with Local Docker installer: https://github.com/ansible/awx/pull/9152
|
||||
- Fixed a bug in the UI which caused toggle settings to not be changed when clicked: https://github.com/ansible/awx/pull/9093
|
||||
|
||||
# 17.0.0 (January 22, 2021)
|
||||
- AWX now requires PostgreSQL 12 by default: https://github.com/ansible/awx/pull/8943
|
||||
**Note:** users who encounter permissions errors at upgrade time should `chown -R ~/.awx/pgdocker` to ensure it's owned by the user running the install playbook
|
||||
|
||||
234
CONTRIBUTING.md
234
CONTRIBUTING.md
@@ -2,7 +2,7 @@
|
||||
|
||||
Hi there! We're excited to have you as a contributor.
|
||||
|
||||
Have questions about this document or anything not covered here? Come chat with us at `#ansible-awx` on irc.freenode.net, or submit your question to the [mailing list](https://groups.google.com/forum/#!forum/awx-project).
|
||||
Have questions about this document or anything not covered here? Come chat with us at `#ansible-awx` on webchat.freenode.net, or submit your question to the [mailing list](https://groups.google.com/forum/#!forum/awx-project).
|
||||
|
||||
## Table of contents
|
||||
|
||||
@@ -11,24 +11,15 @@ Have questions about this document or anything not covered here? Come chat with
|
||||
* [Prerequisites](#prerequisites)
|
||||
* [Docker](#docker)
|
||||
* [Docker compose](#docker-compose)
|
||||
* [Node and npm](#node-and-npm)
|
||||
* [Build the environment](#build-the-environment)
|
||||
* [Frontend Development](#frontend-development)
|
||||
* [Build and Run the Development Environment](#build-and-run-the-development-environment)
|
||||
* [Fork and clone the AWX repo](#fork-and-clone-the-awx-repo)
|
||||
* [Create local settings](#create-local-settings)
|
||||
* [Build the base image](#build-the-base-image)
|
||||
* [Build the user interface](#build-the-user-interface)
|
||||
* [Running the environment](#running-the-environment)
|
||||
* [Start the containers](#start-the-containers)
|
||||
* [Start from the container shell](#start-from-the-container-shell)
|
||||
* [Post Build Steps](#post-build-steps)
|
||||
* [Start a shell](#start-a-shell)
|
||||
* [Create a superuser](#create-a-superuser)
|
||||
* [Load the data](#load-the-data)
|
||||
* [Building API Documentation](#build-api-documentation)
|
||||
* [Building API Documentation](#building-api-documentation)
|
||||
* [Accessing the AWX web interface](#accessing-the-awx-web-interface)
|
||||
* [Purging containers and images](#purging-containers-and-images)
|
||||
* [What should I work on?](#what-should-i-work-on)
|
||||
* [Submitting Pull Requests](#submitting-pull-requests)
|
||||
* [PR Checks run by Zuul](#pr-checks-run-by-zuul)
|
||||
* [Reporting Issues](#reporting-issues)
|
||||
|
||||
## Things to know prior to submitting code
|
||||
@@ -37,12 +28,12 @@ Have questions about this document or anything not covered here? Come chat with
|
||||
- You must use `git commit --signoff` for any commit to be merged, and agree that usage of --signoff constitutes agreement with the terms of [DCO 1.1](./DCO_1_1.md).
|
||||
- Take care to make sure no merge commits are in the submission, and use `git rebase` vs `git merge` for this reason.
|
||||
- If collaborating with someone else on the same branch, consider using `--force-with-lease` instead of `--force`. This will prevent you from accidentally overwriting commits pushed by someone else. For more information, see https://git-scm.com/docs/git-push#git-push---force-with-leaseltrefnamegt
|
||||
- If submitting a large code change, it's a good idea to join the `#ansible-awx` channel on irc.freenode.net, and talk about what you would like to do or add first. This not only helps everyone know what's going on, it also helps save time and effort, if the community decides some changes are needed.
|
||||
- If submitting a large code change, it's a good idea to join the `#ansible-awx` channel on webchat.freenode.net, and talk about what you would like to do or add first. This not only helps everyone know what's going on, it also helps save time and effort, if the community decides some changes are needed.
|
||||
- We ask all of our community members and contributors to adhere to the [Ansible code of conduct](http://docs.ansible.com/ansible/latest/community/code_of_conduct.html). If you have questions, or need assistance, please reach out to our community team at [codeofconduct@ansible.com](mailto:codeofconduct@ansible.com)
|
||||
|
||||
## Setting up your development environment
|
||||
|
||||
The AWX development environment workflow and toolchain is based on Docker, and the docker-compose tool, to provide dependencies, services, and databases necessary to run all of the components. It also binds the local source tree into the development container, making it possible to observe and test changes in real time.
|
||||
The AWX development environment workflow and toolchain uses Docker and the docker-compose tool, to provide dependencies, services, and databases necessary to run all of the components. It also bind-mounts the local source tree into the development container, making it possible to observe and test changes in real time.
|
||||
|
||||
### Prerequisites
|
||||
|
||||
@@ -55,29 +46,19 @@ respectively.
|
||||
|
||||
For Linux platforms, refer to the following from Docker:
|
||||
|
||||
**Fedora**
|
||||
* **Fedora** - https://docs.docker.com/engine/installation/linux/docker-ce/fedora/
|
||||
|
||||
> https://docs.docker.com/engine/installation/linux/docker-ce/fedora/
|
||||
* **CentOS** - https://docs.docker.com/engine/installation/linux/docker-ce/centos/
|
||||
|
||||
**CentOS**
|
||||
* **Ubuntu** - https://docs.docker.com/engine/installation/linux/docker-ce/ubuntu/
|
||||
|
||||
> https://docs.docker.com/engine/installation/linux/docker-ce/centos/
|
||||
* **Debian** - https://docs.docker.com/engine/installation/linux/docker-ce/debian/
|
||||
|
||||
**Ubuntu**
|
||||
* **Arch** - https://wiki.archlinux.org/index.php/Docker
|
||||
|
||||
> https://docs.docker.com/engine/installation/linux/docker-ce/ubuntu/
|
||||
#### Docker Compose
|
||||
|
||||
**Debian**
|
||||
|
||||
> https://docs.docker.com/engine/installation/linux/docker-ce/debian/
|
||||
|
||||
**Arch**
|
||||
|
||||
> https://wiki.archlinux.org/index.php/Docker
|
||||
|
||||
#### Docker compose
|
||||
|
||||
If you're not using Docker for Mac, or Docker for Windows, you may need, or choose to, install the Docker compose Python module separately, in which case you'll need to run the following:
|
||||
If you're not using Docker for Mac, or Docker for Windows, you may need, or choose to, install the `docker-compose` Python module separately.
|
||||
|
||||
```bash
|
||||
(host)$ pip3 install docker-compose
|
||||
@@ -87,186 +68,15 @@ If you're not using Docker for Mac, or Docker for Windows, you may need, or choo
|
||||
|
||||
See [the ui development documentation](awx/ui_next/CONTRIBUTING.md).
|
||||
|
||||
|
||||
### Build the environment
|
||||
|
||||
#### Fork and clone the AWX repo
|
||||
|
||||
If you have not done so already, you'll need to fork the AWX repo on GitHub. For more on how to do this, see [Fork a Repo](https://help.github.com/articles/fork-a-repo/).
|
||||
|
||||
#### Create local settings
|
||||
### Build and Run the Development Environment
|
||||
|
||||
AWX will import the file `awx/settings/local_settings.py` and combine it with defaults in `awx/settings/defaults.py`. This file is required for starting the development environment and startup will fail if it's not provided.
|
||||
See the [README.md](./tools/docker-compose/README.md) for docs on how to build the awx_devel image and run the development environment.
|
||||
|
||||
An example is provided. Make a copy of it, and edit as needed (the defaults are usually fine):
|
||||
|
||||
```bash
|
||||
(host)$ cp awx/settings/local_settings.py.docker_compose awx/settings/local_settings.py
|
||||
```
|
||||
|
||||
#### Build the base image
|
||||
|
||||
The AWX base container image (defined in `tools/docker-compose/Dockerfile`) contains basic OS dependencies and symbolic links into the development environment that make running the services easy.
|
||||
|
||||
Run the following to build the image:
|
||||
|
||||
```bash
|
||||
(host)$ make docker-compose-build
|
||||
```
|
||||
|
||||
**NOTE**
|
||||
|
||||
> The image will need to be rebuilt, if the Python requirements or OS dependencies change.
|
||||
|
||||
Once the build completes, you will have a `ansible/awx_devel` image in your local image cache. Use the `docker images` command to view it, as follows:
|
||||
|
||||
```bash
|
||||
(host)$ docker images
|
||||
|
||||
REPOSITORY TAG IMAGE ID CREATED SIZE
|
||||
ansible/awx_devel latest ba9ec3e8df74 26 minutes ago 1.42GB
|
||||
```
|
||||
|
||||
#### Build the user interface
|
||||
|
||||
Run the following to build the AWX UI:
|
||||
|
||||
```bash
|
||||
(host) $ make ui-devel
|
||||
```
|
||||
See [the ui development documentation](awx/ui/README.md) for more information on using the frontend development, build, and test tooling.
|
||||
|
||||
### Running the environment
|
||||
|
||||
#### Start the containers
|
||||
|
||||
Start the development containers by running the following:
|
||||
|
||||
```bash
|
||||
(host)$ make docker-compose
|
||||
```
|
||||
|
||||
The above utilizes the image built in the previous step, and will automatically start all required services and dependent containers. Once the containers launch, your session will be attached to the *awx* container, and you'll be able to watch log messages and events in real time. You will see messages from Django and the front end build process.
|
||||
|
||||
If you start a second terminal session, you can take a look at the running containers using the `docker ps` command. For example:
|
||||
|
||||
```bash
|
||||
# List running containers
|
||||
(host)$ docker ps
|
||||
|
||||
$ docker ps
|
||||
CONTAINER ID IMAGE COMMAND CREATED STATUS PORTS NAMES
|
||||
44251b476f98 gcr.io/ansible-tower-engineering/awx_devel:devel "/entrypoint.sh /bin…" 27 seconds ago Up 23 seconds 0.0.0.0:6899->6899/tcp, 0.0.0.0:7899-7999->7899-7999/tcp, 0.0.0.0:8013->8013/tcp, 0.0.0.0:8043->8043/tcp, 0.0.0.0:8080->8080/tcp, 22/tcp, 0.0.0.0:8888->8888/tcp tools_awx_run_9e820694d57e
|
||||
40de380e3c2e redis:latest "docker-entrypoint.s…" 28 seconds ago Up 26 seconds
|
||||
b66a506d3007 postgres:12 "docker-entrypoint.s…" 28 seconds ago Up 26 seconds 0.0.0.0:5432->5432/tcp tools_postgres_1
|
||||
```
|
||||
**NOTE**
|
||||
|
||||
> The Makefile assumes that the image you built is tagged with your current branch. This allows you to build images for different contexts or branches. When starting the containers, you can choose a specific branch by setting `COMPOSE_TAG=<branch name>` in your environment.
|
||||
|
||||
> For example, you might be working in a feature branch, but you want to run the containers using the `devel` image you built previously. To do that, start the containers using the following command: `$ COMPOSE_TAG=devel make docker-compose`
|
||||
|
||||
##### Wait for migrations to complete
|
||||
|
||||
The first time you start the environment, database migrations need to run in order to build the PostgreSQL database. It will take few moments, but eventually you will see output in your terminal session that looks like the following:
|
||||
|
||||
```bash
|
||||
awx_1 | Operations to perform:
|
||||
awx_1 | Synchronize unmigrated apps: solo, api, staticfiles, debug_toolbar, messages, channels, django_extensions, ui, rest_framework, polymorphic
|
||||
awx_1 | Apply all migrations: sso, taggit, sessions, sites, kombu_transport_django, social_auth, contenttypes, auth, conf, main
|
||||
awx_1 | Synchronizing apps without migrations:
|
||||
awx_1 | Creating tables...
|
||||
awx_1 | Running deferred SQL...
|
||||
awx_1 | Installing custom SQL...
|
||||
awx_1 | Running migrations:
|
||||
awx_1 | Rendering model states... DONE
|
||||
awx_1 | Applying contenttypes.0001_initial... OK
|
||||
awx_1 | Applying contenttypes.0002_remove_content_type_name... OK
|
||||
awx_1 | Applying auth.0001_initial... OK
|
||||
awx_1 | Applying auth.0002_alter_permission_name_max_length... OK
|
||||
awx_1 | Applying auth.0003_alter_user_email_max_length... OK
|
||||
awx_1 | Applying auth.0004_alter_user_username_opts... OK
|
||||
awx_1 | Applying auth.0005_alter_user_last_login_null... OK
|
||||
awx_1 | Applying auth.0006_require_contenttypes_0002... OK
|
||||
awx_1 | Applying taggit.0001_initial... OK
|
||||
awx_1 | Applying taggit.0002_auto_20150616_2121... OK
|
||||
awx_1 | Applying main.0001_initial... OK
|
||||
awx_1 | Applying main.0002_squashed_v300_release... OK
|
||||
awx_1 | Applying main.0003_squashed_v300_v303_updates... OK
|
||||
awx_1 | Applying main.0004_squashed_v310_release... OK
|
||||
awx_1 | Applying conf.0001_initial... OK
|
||||
awx_1 | Applying conf.0002_v310_copy_tower_settings... OK
|
||||
...
|
||||
```
|
||||
|
||||
Once migrations are completed, you can begin using AWX.
|
||||
|
||||
#### Start from the container shell
|
||||
|
||||
Often times you'll want to start the development environment without immediately starting all of the services in the *awx* container, and instead be taken directly to a shell. You can do this with the following:
|
||||
|
||||
```bash
|
||||
(host)$ make docker-compose-test
|
||||
```
|
||||
|
||||
Using `docker exec`, this will create a session in the running *awx* container, and place you at a command prompt, where you can run shell commands inside the container.
|
||||
|
||||
If you want to start and use the development environment, you'll first need to bootstrap it by running the following command:
|
||||
|
||||
```bash
|
||||
(container)# /usr/bin/bootstrap_development.sh
|
||||
```
|
||||
|
||||
The above will do all the setup tasks, including running database migrations, so it may take a couple minutes. Once it's done it
|
||||
will drop you back to the shell.
|
||||
|
||||
In order to launch all developer services:
|
||||
|
||||
```bash
|
||||
(container)# /usr/bin/launch_awx.sh
|
||||
```
|
||||
|
||||
`launch_awx.sh` also calls `bootstrap_development.sh` so if all you are doing is launching the supervisor to start all services, you don't
|
||||
need to call `bootstrap_development.sh` first.
|
||||
|
||||
|
||||
|
||||
### Post Build Steps
|
||||
|
||||
Before you can log in and use the system, you will need to create an admin user. Optionally, you may also want to load some demo data.
|
||||
|
||||
##### Start a shell
|
||||
|
||||
To create the admin user, and load demo data, you first need to start a shell session on the *awx* container. In a new terminal session, use the `docker exec` command as follows to start the shell session:
|
||||
|
||||
```bash
|
||||
(host)$ docker exec -it tools_awx_1 bash
|
||||
```
|
||||
This creates a session in the *awx* containers, just as if you were using `ssh`, and allows you execute commands within the running container.
|
||||
|
||||
##### Create an admin user
|
||||
|
||||
Before you can log into AWX, you need to create an admin user. With this user you will be able to create more users, and begin configuring the server. From within the container shell, run the following command:
|
||||
|
||||
```bash
|
||||
(container)# awx-manage createsuperuser
|
||||
```
|
||||
You will be prompted for a username, an email address, and a password, and you will be asked to confirm the password. The email address is not important, so just enter something that looks like an email address. Remember the username and password, as you will use them to log into the web interface for the first time.
|
||||
|
||||
##### Load demo data
|
||||
|
||||
You can optionally load some demo data. This will create a demo project, inventory, and job template. From within the container shell, run the following to load the data:
|
||||
|
||||
```bash
|
||||
(container)# awx-manage create_preload_data
|
||||
```
|
||||
|
||||
**NOTE**
|
||||
|
||||
> This information will persist in the database running in the `tools_postgres_1` container, until the container is removed. You may periodically need to recreate
|
||||
this container, and thus the database, if the database schema changes in an upstream commit.
|
||||
|
||||
##### Building API Documentation
|
||||
### Building API Documentation
|
||||
|
||||
AWX includes support for building [Swagger/OpenAPI
|
||||
documentation](https://swagger.io). To build the documentation locally, run:
|
||||
@@ -284,7 +94,7 @@ is an example of one such service.
|
||||
|
||||
You can now log into the AWX web interface at [https://localhost:8043](https://localhost:8043), and access the API directly at [https://localhost:8043/api/](https://localhost:8043/api/).
|
||||
|
||||
To log in use the admin user and password you created above in [Create an admin user](#create-an-admin-user).
|
||||
[Create an admin user](./tools/docker-compose/README.md#create-an-admin-user) if needed.
|
||||
|
||||
### Purging containers and images
|
||||
|
||||
@@ -304,7 +114,7 @@ Fixing bugs, adding translations, and updating the documentation are always appr
|
||||
|
||||
**NOTE**
|
||||
|
||||
> If you work in a part of the codebase that is going through active development, your changes may be rejected, or you may be asked to `rebase`. A good idea before starting work is to have a discussion with us in the `#ansible-awx` channel on irc.freenode.net, or on the [mailing list](https://groups.google.com/forum/#!forum/awx-project).
|
||||
> If you work in a part of the codebase that is going through active development, your changes may be rejected, or you may be asked to `rebase`. A good idea before starting work is to have a discussion with us in the `#ansible-awx` channel on webchat.freenode.net, or on the [mailing list](https://groups.google.com/forum/#!forum/awx-project).
|
||||
|
||||
**NOTE**
|
||||
|
||||
@@ -317,7 +127,7 @@ Fixes and Features for AWX will go through the Github pull request process. Subm
|
||||
Here are a few things you can do to help the visibility of your change, and increase the likelihood that it will be accepted:
|
||||
|
||||
* No issues when running linters/code checkers
|
||||
* Python: flake8: `(container)/awx_devel$ make flake8`
|
||||
* Python: black: `(container)/awx_devel$ make black`
|
||||
* Javascript: JsHint: `(container)/awx_devel$ make jshint`
|
||||
* No issues from unit tests
|
||||
* Python: py.test: `(container)/awx_devel$ make test`
|
||||
@@ -326,7 +136,7 @@ Here are a few things you can do to help the visibility of your change, and incr
|
||||
* Make the smallest change possible
|
||||
* Write good commit messages. See [How to write a Git commit message](https://chris.beams.io/posts/git-commit/).
|
||||
|
||||
It's generally a good idea to discuss features with us first by engaging us in the `#ansible-awx` channel on irc.freenode.net, or on the [mailing list](https://groups.google.com/forum/#!forum/awx-project).
|
||||
It's generally a good idea to discuss features with us first by engaging us in the `#ansible-awx` channel on webchat.freenode.net, or on the [mailing list](https://groups.google.com/forum/#!forum/awx-project).
|
||||
|
||||
We like to keep our commit history clean, and will require resubmission of pull requests that contain merge commits. Use `git pull --rebase`, rather than
|
||||
`git pull`, and `git rebase`, rather than `git merge`.
|
||||
@@ -335,7 +145,7 @@ Sometimes it might take us a while to fully review your PR. We try to keep the `
|
||||
|
||||
All submitted PRs will have the linter and unit tests run against them via Zuul, and the status reported in the PR.
|
||||
|
||||
## PR Checks ran by Zuul
|
||||
## PR Checks run by Zuul
|
||||
Zuul jobs for awx are defined in the [zuul-jobs](https://github.com/ansible/zuul-jobs) repo.
|
||||
|
||||
Zuul runs the following checks that must pass:
|
||||
|
||||
667
INSTALL.md
667
INSTALL.md
@@ -1,643 +1,122 @@
|
||||
Table of Contents
|
||||
=================
|
||||
|
||||
* [Installing AWX](#installing-awx)
|
||||
* [The AWX Operator](#the-awx-operator)
|
||||
* [Quickstart with minikube](#quickstart-with-minikube)
|
||||
* [Starting minikube](#starting-minikube)
|
||||
* [Deploying the AWX Operator](#deploying-the-awx-operator)
|
||||
* [Verifying the Operator Deployment](#verifying-the-operator-deployment)
|
||||
* [Deploy AWX](#deploy-awx)
|
||||
* [Accessing AWX](#accessing-awx)
|
||||
* [Installing the AWX CLI](#installing-the-awx-cli)
|
||||
* [Building the CLI Documentation](#building-the-cli-documentation)
|
||||
|
||||
|
||||
# Installing AWX
|
||||
|
||||
This document provides a guide for installing AWX.
|
||||
:warning: NOTE |
|
||||
--- |
|
||||
If you're installing an older release of AWX (prior to 18.0), these instructions have changed. Take a look at your version specific instructions, e.g., for AWX 17.0.1, see: [https://github.com/ansible/awx/blob/17.0.1/INSTALL.md](https://github.com/ansible/awx/blob/17.0.1/INSTALL.md)
|
||||
If you're attempting to migrate an older Docker-based AWX installation, see: [Migrating Data from Local Docker](https://github.com/ansible/awx/blob/devel/tools/docker-compose/docs/data_migration.md) |
|
||||
|
||||
## Table of contents
|
||||
## The AWX Operator
|
||||
|
||||
- [Installing AWX](#installing-awx)
|
||||
* [Getting started](#getting-started)
|
||||
+ [Clone the repo](#clone-the-repo)
|
||||
+ [AWX branding](#awx-branding)
|
||||
+ [Prerequisites](#prerequisites)
|
||||
+ [System Requirements](#system-requirements)
|
||||
+ [Choose a deployment platform](#choose-a-deployment-platform)
|
||||
+ [Official vs Building Images](#official-vs-building-images)
|
||||
* [Upgrading from previous versions](#upgrading-from-previous-versions)
|
||||
* [OpenShift](#openshift)
|
||||
+ [Prerequisites](#prerequisites-1)
|
||||
+ [Pre-install steps](#pre-install-steps)
|
||||
- [Deploying to Minishift](#deploying-to-minishift)
|
||||
- [PostgreSQL](#postgresql)
|
||||
+ [Run the installer](#run-the-installer)
|
||||
+ [Post-install](#post-install)
|
||||
+ [Accessing AWX](#accessing-awx)
|
||||
* [Kubernetes](#kubernetes)
|
||||
+ [Prerequisites](#prerequisites-2)
|
||||
+ [Pre-install steps](#pre-install-steps-1)
|
||||
+ [Configuring Helm](#configuring-helm)
|
||||
+ [Run the installer](#run-the-installer-1)
|
||||
+ [Post-install](#post-install-1)
|
||||
+ [Accessing AWX](#accessing-awx-1)
|
||||
+ [SSL Termination](#ssl-termination)
|
||||
* [Docker-Compose](#docker-compose)
|
||||
+ [Prerequisites](#prerequisites-3)
|
||||
+ [Pre-install steps](#pre-install-steps-2)
|
||||
- [Deploying to a remote host](#deploying-to-a-remote-host)
|
||||
- [Inventory variables](#inventory-variables)
|
||||
- [Docker registry](#docker-registry)
|
||||
- [Proxy settings](#proxy-settings)
|
||||
- [PostgreSQL](#postgresql-1)
|
||||
+ [Run the installer](#run-the-installer-2)
|
||||
+ [Post-install](#post-install-2)
|
||||
+ [Accessing AWX](#accessing-awx-2)
|
||||
- [Installing the AWX CLI](#installing-the-awx-cli)
|
||||
* [Building the CLI Documentation](#building-the-cli-documentation)
|
||||
Starting in version 18.0, the [AWX Operator](https://github.com/ansible/awx-operator) is the preferred way to install AWX.
|
||||
|
||||
AWX can also alternatively be installed and [run in Docker](./tools/docker-compose/README.md), but this install path is only recommended for development/test-oriented deployments, and has no official published release.
|
||||
|
||||
## Getting started
|
||||
### Quickstart with minikube
|
||||
|
||||
### Clone the repo
|
||||
If you don't have an existing OpenShift or Kubernetes cluster, minikube is a fast and easy way to get up and running.
|
||||
|
||||
If you have not already done so, you will need to clone, or create a local copy, of the [AWX repo](https://github.com/ansible/awx). We generally recommend that you view the releases page:
|
||||
To install minikube, follow the steps in their [documentation](https://minikube.sigs.k8s.io/docs/start/).
|
||||
|
||||
https://github.com/ansible/awx/releases
|
||||
#### Starting minikube
|
||||
|
||||
...and clone the latest stable release, e.g.,
|
||||
|
||||
`git clone -b x.y.z https://github.com/ansible/awx.git`
|
||||
|
||||
Please note that deploying from `HEAD` (or the latest commit) is **not** stable, and that if you want to do this, you should proceed at your own risk (also, see the section #official-vs-building-images for building your own image).
|
||||
|
||||
For more on how to clone the repo, view [git clone help](https://git-scm.com/docs/git-clone).
|
||||
|
||||
Once you have a local copy, run the commands in the following sections from the root of the project tree.
|
||||
|
||||
### AWX branding
|
||||
|
||||
You can optionally install the AWX branding assets from the [awx-logos repo](https://github.com/ansible/awx-logos). Prior to installing, please review and agree to the [trademark guidelines](https://github.com/ansible/awx-logos/blob/master/TRADEMARKS.md).
|
||||
|
||||
To install the assets, clone the `awx-logos` repo so that it is next to your `awx` clone. As you progress through the installation steps, you'll be setting variables in the [inventory](./installer/inventory) file. To include the assets in the build, set `awx_official=true`.
|
||||
|
||||
### Prerequisites
|
||||
|
||||
Before you can run a deployment, you'll need the following installed in your local environment:
|
||||
|
||||
- [Ansible](http://docs.ansible.com/ansible/latest/intro_installation.html) Requires Version 2.8+
|
||||
- [Docker](https://docs.docker.com/engine/installation/)
|
||||
+ A recent version
|
||||
- [docker](https://pypi.org/project/docker/) Python module
|
||||
+ This is incompatible with `docker-py`. If you have previously installed `docker-py`, please uninstall it.
|
||||
+ We use this module instead of `docker-py` because it is what the `docker-compose` Python module requires.
|
||||
- [community.general.docker_image collection](https://docs.ansible.com/ansible/latest/collections/community/general/docker_image_module.html)
|
||||
+ This is only required if you are using Ansible >= 2.10
|
||||
- [GNU Make](https://www.gnu.org/software/make/)
|
||||
- [Git](https://git-scm.com/) Requires Version 1.8.4+
|
||||
- Python 3.6+
|
||||
- [Node 14.x LTS version](https://nodejs.org/en/download/)
|
||||
+ This is only required if you're [building your own container images](#official-vs-building-images) with `use_container_for_build=false`
|
||||
- [NPM 6.x LTS](https://docs.npmjs.com/)
|
||||
+ This is only required if you're [building your own container images](#official-vs-building-images) with `use_container_for_build=false`
|
||||
|
||||
### System Requirements
|
||||
|
||||
The system that runs the AWX service will need to satisfy the following requirements
|
||||
|
||||
- At least 4GB of memory
|
||||
- At least 2 cpu cores
|
||||
- At least 20GB of space
|
||||
- Running Docker, Openshift, or Kubernetes
|
||||
- If you choose to use an external PostgreSQL database, please note that the minimum version is 10+.
|
||||
|
||||
### Choose a deployment platform
|
||||
|
||||
We currently support running AWX as a containerized application using Docker images deployed to either an OpenShift cluster, a Kubernetes cluster, or docker-compose. The remainder of this document will walk you through the process of building the images, and deploying them to either platform.
|
||||
|
||||
The [installer](./installer) directory contains an [inventory](./installer/inventory) file, and a playbook, [install.yml](./installer/install.yml). You'll begin by setting variables in the inventory file according to the platform you wish to use, and then you'll start the image build and deployment process by running the playbook.
|
||||
|
||||
In the sections below, you'll find deployment details and instructions for each platform:
|
||||
- [OpenShift](#openshift)
|
||||
- [Kubernetes](#kubernetes)
|
||||
- [Docker Compose](#docker-compose).
|
||||
|
||||
### Official vs Building Images
|
||||
|
||||
When installing AWX you have the option of building your own image or using the image provided on DockerHub (see [awx](https://hub.docker.com/r/ansible/awx/))
|
||||
|
||||
This is controlled by the following variables in the `inventory` file
|
||||
Once you have installed minikube, run the following command to start it. You may wish to customize these options.
|
||||
|
||||
```
|
||||
dockerhub_base=ansible
|
||||
dockerhub_version=latest
|
||||
$ minikube start --cpus=4 --memory=8g --addons=ingress
|
||||
```
|
||||
|
||||
If these variables are present then all deployments will use these hosted images. If the variables are not present then the images will be built during the install.
|
||||
#### Deploying the AWX Operator
|
||||
|
||||
*dockerhub_base*
|
||||
|
||||
> The base location on DockerHub where the images are hosted (by default this pulls a container image named `ansible/awx:tag`)
|
||||
|
||||
*dockerhub_version*
|
||||
|
||||
> Multiple versions are provided. `latest` always pulls the most recent. You may also select version numbers at different granularities: 1, 1.0, 1.0.1, 1.0.0.123
|
||||
|
||||
*use_container_for_build*
|
||||
|
||||
> Use a local distribution build container image for building the AWX package. This is helpful if you don't want to bother installing the build-time dependencies as it is taken care of already.
|
||||
|
||||
|
||||
## Upgrading from previous versions
|
||||
|
||||
Upgrading AWX involves rerunning the install playbook. Download a newer release from [https://github.com/ansible/awx/releases](https://github.com/ansible/awx/releases) and re-populate the inventory file with your customized variables.
|
||||
|
||||
For convenience, you can create a file called `vars.yml`:
|
||||
For a comprehensive overview of features, see [README.md](https://github.com/ansible/awx-operator/blob/devel/README.md) in the awx-operator repo. The following steps are the bare minimum to get AWX up and running.
|
||||
|
||||
```
|
||||
admin_password: 'adminpass'
|
||||
pg_password: 'pgpass'
|
||||
secret_key: 'mysupersecret'
|
||||
$ minikube kubectl -- apply -f https://raw.githubusercontent.com/ansible/awx-operator/devel/deploy/awx-operator.yaml
|
||||
```
|
||||
|
||||
And pass it to the installer:
|
||||
##### Verifying the Operator Deployment
|
||||
|
||||
After a few seconds, the operator should be up and running. Verify it by running the following command:
|
||||
|
||||
```
|
||||
$ ansible-playbook -i inventory install.yml -e @vars.yml
|
||||
$ minikube kubectl get pods
|
||||
NAME READY STATUS RESTARTS AGE
|
||||
awx-operator-7c78bfbfd-xb6th 1/1 Running 0 11s
|
||||
```
|
||||
|
||||
## OpenShift
|
||||
#### Deploy AWX
|
||||
|
||||
### Prerequisites
|
||||
|
||||
To complete a deployment to OpenShift, you will need access to an OpenShift cluster. For demo and testing purposes, you can use [Minishift](https://github.com/minishift/minishift) to create a single node cluster running inside a virtual machine.
|
||||
|
||||
When using OpenShift for deploying AWX make sure you have correct privileges to add the security context 'privileged', otherwise the installation will fail. The privileged context is needed because of the use of [the bubblewrap tool](https://github.com/containers/bubblewrap) to add an additional layer of security when using containers.
|
||||
|
||||
You will also need to have the `oc` command in your PATH. The `install.yml` playbook will call out to `oc` when logging into, and creating objects on the cluster.
|
||||
|
||||
The default resource requests per-deployment requires:
|
||||
|
||||
> Memory: 6GB
|
||||
> CPU: 3 cores
|
||||
|
||||
This can be tuned by overriding the variables found in [/installer/roles/kubernetes/defaults/main.yml](/installer/roles/kubernetes/defaults/main.yml). Special care should be taken when doing this as undersized instances will experience crashes and resource exhaustion.
|
||||
|
||||
For more detail on how resource requests are formed see: [https://docs.openshift.com/container-platform/latest/dev_guide/compute_resources.html#dev-compute-resources](https://docs.openshift.com/container-platform/latest/dev_guide/compute_resources.html#dev-compute-resources)
|
||||
|
||||
### Pre-install steps
|
||||
|
||||
Before starting the install, review the [inventory](./installer/inventory) file, and uncomment and provide values for the following variables found in the `[all:vars]` section:
|
||||
|
||||
*openshift_host*
|
||||
|
||||
> IP address or hostname of the OpenShift cluster. If you're using Minishift, this will be the value returned by `minishift ip`.
|
||||
|
||||
|
||||
*openshift_skip_tls_verify*
|
||||
|
||||
> Boolean. Set to True if using self-signed certs.
|
||||
|
||||
*openshift_project*
|
||||
|
||||
> Name of the OpenShift project that will be created, and used as the namespace for the AWX app. Defaults to *awx*.
|
||||
|
||||
*openshift_user*
|
||||
|
||||
> Username of the OpenShift user that will create the project, and deploy the application. Defaults to *developer*.
|
||||
|
||||
*openshift_pg_emptydir*
|
||||
|
||||
> Boolean. Set to True to use an emptyDir volume when deploying the PostgreSQL pod. Note: This should only be used for demo and testing purposes.
|
||||
|
||||
*docker_registry*
|
||||
|
||||
> IP address and port, or URL, for accessing a registry that the OpenShift cluster can access. Defaults to *172.30.1.1:5000*, the internal registry delivered with Minishift. This is not needed if you are using official hosted images.
|
||||
|
||||
*docker_registry_repository*
|
||||
|
||||
> Namespace to use when pushing and pulling images to and from the registry. Generally this will match the project name. It defaults to *awx*. This is not needed if you are using official hosted images.
|
||||
|
||||
*docker_registry_username*
|
||||
|
||||
> Username of the user that will push images to the registry. Will generally match the *openshift_user* value. Defaults to *developer*. This is not needed if you are using official hosted images.
|
||||
|
||||
#### Deploying to Minishift
|
||||
|
||||
Install Minishift by following the [installation guide](https://docs.openshift.org/latest/minishift/getting-started/installing.html).
|
||||
|
||||
The recommended minimum resources for your Minishift VM:
|
||||
|
||||
```bash
|
||||
$ minishift start --cpus=4 --memory=8GB
|
||||
```
|
||||
|
||||
The Minishift VM contains a Docker daemon, which you can use to build the AWX images. This is generally the approach you should take, and we recommend doing so. To use this instance, run the following command to setup your environment:
|
||||
|
||||
```bash
|
||||
# Set DOCKER environment variable to point to the Minishift VM
|
||||
$ eval $(minishift docker-env)
|
||||
```
|
||||
|
||||
**Note**
|
||||
|
||||
> If you choose to not use the Docker instance running inside the VM, and build the images externally, you will have to enable the OpenShift cluster to access the images. This involves pushing the images to an external Docker registry, and granting the cluster access to it, or exposing the internal registry, and pushing the images into it.
|
||||
|
||||
#### PostgreSQL
|
||||
|
||||
By default, AWX will deploy a PostgreSQL pod inside of your cluster. You will need to create a [Persistent Volume Claim](https://docs.openshift.org/latest/dev_guide/persistent_volumes.html) which is named `postgresql` by default, and can be overridden by setting the `openshift_pg_pvc_name` variable. For testing and demo purposes, you may set `openshift_pg_emptydir=yes`.
|
||||
|
||||
If you wish to use an external database, in the inventory file, set the value of `pg_hostname`, and update `pg_username`, `pg_password`, `pg_admin_password`, `pg_database`, and `pg_port` with the connection information. When setting `pg_hostname` the installer will assume you have configured the database in that location and will not launch the postgresql pod.
|
||||
|
||||
### Run the installer
|
||||
|
||||
To start the install, you will pass two *extra* variables on the command line. The first is *openshift_password*, which is the password for the *openshift_user*, and the second is *docker_registry_password*, which is the password associated with *docker_registry_username*.
|
||||
|
||||
If you're using the OpenShift internal registry, then you'll pass an access token for the *docker_registry_password* value, rather than a password. The `oc whoami -t` command will generate the required token, as long as you're logged into the cluster via `oc cluster login`.
|
||||
|
||||
Run the following command (docker_registry_password is optional if using official images):
|
||||
|
||||
```bash
|
||||
# Start the install
|
||||
$ ansible-playbook -i inventory install.yml -e openshift_password=developer -e docker_registry_password=$(oc whoami -t)
|
||||
```
|
||||
|
||||
### Post-install
|
||||
|
||||
After the playbook run completes, check the status of the deployment by running `oc get pods`:
|
||||
|
||||
```bash
|
||||
# View the running pods
|
||||
$ oc get pods
|
||||
|
||||
NAME READY STATUS RESTARTS AGE
|
||||
awx-3886581826-5mv0l 4/4 Running 0 8s
|
||||
postgresql-1-l85fh 1/1 Running 0 20m
|
||||
Once the Operator is running, you can now deploy AWX by creating a simple YAML file:
|
||||
|
||||
```
|
||||
|
||||
In the above example, the name of the AWX pod is `awx-3886581826-5mv0l`. Before accessing the AWX web interface, setup tasks and database migrations need to complete. These tasks are running in the `awx_task` container inside the AWX pod. To monitor their status, tail the container's STDOUT by running the following command, replacing the AWX pod name with the pod name from your environment:
|
||||
|
||||
```bash
|
||||
# Follow the awx_task log output
|
||||
$ oc logs -f awx-3886581826-5mv0l -c awx-celery
|
||||
$ cat myawx.yml
|
||||
---
|
||||
apiVersion: awx.ansible.com/v1beta1
|
||||
kind: AWX
|
||||
metadata:
|
||||
name: awx
|
||||
spec:
|
||||
tower_ingress_type: Ingress
|
||||
```
|
||||
|
||||
You will see the following indicating that database migrations are running:
|
||||
And then creating the AWX object in the Kubernetes API:
|
||||
|
||||
```bash
|
||||
Using /etc/ansible/ansible.cfg as config file
|
||||
127.0.0.1 | SUCCESS => {
|
||||
"changed": false,
|
||||
"db": "awx"
|
||||
}
|
||||
Operations to perform:
|
||||
Synchronize unmigrated apps: solo, api, staticfiles, messages, channels, django_extensions, ui, rest_framework, polymorphic
|
||||
Apply all migrations: sso, taggit, sessions, sites, kombu_transport_django, social_auth, contenttypes, auth, conf, main
|
||||
Synchronizing apps without migrations:
|
||||
Creating tables...
|
||||
Running deferred SQL...
|
||||
Installing custom SQL...
|
||||
Running migrations:
|
||||
Rendering model states... DONE
|
||||
Applying contenttypes.0001_initial... OK
|
||||
Applying contenttypes.0002_remove_content_type_name... OK
|
||||
Applying auth.0001_initial... OK
|
||||
Applying auth.0002_alter_permission_name_max_length... OK
|
||||
Applying auth.0003_alter_user_email_max_length... OK
|
||||
Applying auth.0004_alter_user_username_opts... OK
|
||||
Applying auth.0005_alter_user_last_login_null... OK
|
||||
Applying auth.0006_require_contenttypes_0002... OK
|
||||
Applying taggit.0001_initial... OK
|
||||
Applying taggit.0002_auto_20150616_2121... OK
|
||||
...
|
||||
```
|
||||
$ minikube kubectl apply -- -f myawx.yml
|
||||
awx.awx.ansible.com/awx created
|
||||
```
|
||||
|
||||
When you see output similar to the following, you'll know that database migrations have completed, and you can access the web interface:
|
||||
After creating the AWX object in the Kubernetes API, the operator will begin running its reconciliation loop.
|
||||
|
||||
```bash
|
||||
Python 2.7.5 (default, Nov 6 2016, 00:28:07)
|
||||
[GCC 4.8.5 20150623 (Red Hat 4.8.5-11)] on linux2
|
||||
Type "help", "copyright", "credits" or "license" for more information.
|
||||
(InteractiveConsole)
|
||||
To see what's going on, you can tail the logs of the operator pod (note that your pod name will be different):
|
||||
|
||||
>>> <User: admin>
|
||||
>>> Default organization added.
|
||||
Demo Credential, Inventory, and Job Template added.
|
||||
Successfully registered instance awx-3886581826-5mv0l
|
||||
(changed: True)
|
||||
Creating instance group tower
|
||||
Added instance awx-3886581826-5mv0l to tower
|
||||
```
|
||||
$ minikube kubectl logs -- -f awx-operator-7c78bfbfd-xb6th
|
||||
```
|
||||
|
||||
Once database migrations complete, the web interface will be accessible.
|
||||
After a few seconds, you will see the database and application pods show up. On a fresh system, it may take a few minutes for the container images to download.
|
||||
|
||||
### Accessing AWX
|
||||
|
||||
The AWX web interface is running in the AWX pod, behind the `awx-web-svc` service. To view the service, and its port value, run the following command:
|
||||
|
||||
```bash
|
||||
# View available services
|
||||
$ oc get services
|
||||
|
||||
NAME CLUSTER-IP EXTERNAL-IP PORT(S) AGE
|
||||
awx-web-svc 172.30.111.74 <nodes> 8052:30083/TCP 37m
|
||||
postgresql 172.30.102.9 <none> 5432/TCP 38m
|
||||
```
|
||||
$ minikube kubectl get pods
|
||||
NAME READY STATUS RESTARTS AGE
|
||||
awx-5ffbfd489c-bvtvf 3/3 Running 0 2m54s
|
||||
awx-operator-7c78bfbfd-xb6th 1/1 Running 0 6m42s
|
||||
awx-postgres-0 1/1 Running 0 2m58s
|
||||
```
|
||||
|
||||
The deployment process creates a route, `awx-web-svc`, to expose the service. How the ingres is actually created will vary depending on your environment, and how the cluster is configured. You can view the route, and the external IP address and hostname assigned to it, by running the following command:
|
||||
##### Accessing AWX
|
||||
|
||||
```bash
|
||||
# View available routes
|
||||
$ oc get routes
|
||||
To access the AWX UI, you'll need to grab the service url from minikube:
|
||||
|
||||
NAME HOST/PORT PATH SERVICES PORT TERMINATION WILDCARD
|
||||
awx-web-svc awx-web-svc-awx.192.168.64.2.nip.io awx-web-svc http edge/Allow None
|
||||
```
|
||||
$ minikube service awx-service --url
|
||||
http://192.168.59.2:31868
|
||||
```
|
||||
|
||||
The above example is taken from a Minishift instance. From a web browser, use `https` to access the `HOST/PORT` value from your environment. Using the above example, the URL to access the server would be [https://awx-web-svc-awx.192.168.64.2.nip.io](https://awx-web-svc-awx.192.168.64.2.nip.io).
|
||||
On fresh installs, you will see the "AWX is currently upgrading." page until database migrations finish.
|
||||
|
||||
Once you access the AWX server, you will be prompted with a login dialog. The default administrator username is `admin`, and the password is `password`.
|
||||
Once you are redirected to the login screen, you can now log in by obtaining the generated admin password (note: do not copy the trailing `%`):
|
||||
|
||||
## Kubernetes
|
||||
|
||||
### Prerequisites
|
||||
|
||||
A Kubernetes deployment will require you to have access to a Kubernetes cluster as well as the following tools:
|
||||
|
||||
- [kubectl](https://kubernetes.io/docs/tasks/tools/install-kubectl/)
|
||||
- [helm](https://helm.sh/docs/intro/quickstart/)
|
||||
|
||||
The installation program will reference `kubectl` directly. `helm` is only necessary if you are letting the installer configure PostgreSQL for you.
|
||||
|
||||
The default resource requests per-pod requires:
|
||||
|
||||
> Memory: 6GB
|
||||
> CPU: 3 cores
|
||||
|
||||
This can be tuned by overriding the variables found in [/installer/roles/kubernetes/defaults/main.yml](/installer/roles/kubernetes/defaults/main.yml). Special care should be taken when doing this as undersized instances will experience crashes and resource exhaustion.
|
||||
|
||||
For more detail on how resource requests are formed see: [https://kubernetes.io/docs/concepts/configuration/manage-compute-resources-container/](https://kubernetes.io/docs/concepts/configuration/manage-compute-resources-container/)
|
||||
|
||||
### Pre-install steps
|
||||
|
||||
Before starting the install process, review the [inventory](./installer/inventory) file, and uncomment and provide values for the following variables found in the `[all:vars]` section uncommenting when necessary. Make sure the openshift and standalone docker sections are commented out:
|
||||
|
||||
*kubernetes_context*
|
||||
|
||||
> Prior to running the installer, make sure you've configured the context for the cluster you'll be installing to. This is how the installer knows which cluster to connect to and what authentication to use
|
||||
|
||||
*kubernetes_namespace*
|
||||
|
||||
> Name of the Kubernetes namespace where the AWX resources will be installed. This will be created if it doesn't exist
|
||||
|
||||
*docker_registry_*
|
||||
|
||||
> These settings should be used if building your own base images. You'll need access to an external registry and are responsible for making sure your kube cluster can talk to it and use it. If these are undefined and the dockerhub_ configuration settings are uncommented then the images will be pulled from dockerhub instead
|
||||
|
||||
### Configuring Helm
|
||||
|
||||
If you want the AWX installer to manage creating the database pod (rather than installing and configuring postgres on your own). Then you will need to have a working `helm` installation, you can find details here: [https://helm.sh/docs/intro/quickstart/](https://helm.sh/docs/intro/quickstart/).
|
||||
|
||||
You do not need to create a [Persistent Volume Claim](https://docs.openshift.org/latest/dev_guide/persistent_volumes.html) as Helm does it for you. However, an existing one may be used by setting the `pg_persistence_existingclaim` variable.
|
||||
|
||||
Newer Kubernetes clusters with RBAC enabled will need to make sure a service account is created, make sure to follow the instructions here [https://helm.sh/docs/topics/rbac/](https://helm.sh/docs/topics/rbac/)
|
||||
|
||||
### Run the installer
|
||||
|
||||
After making changes to the `inventory` file use `ansible-playbook` to begin the install
|
||||
|
||||
```bash
|
||||
$ ansible-playbook -i inventory install.yml
|
||||
```
|
||||
$ minikube kubectl -- get secret awx-admin-password -o jsonpath='{.data.password}' | base64 --decode
|
||||
b6ChwVmqEiAsil2KSpH4xGaZPeZvWnWj%
|
||||
```
|
||||
|
||||
### Post-install
|
||||
|
||||
After the playbook run completes, check the status of the deployment by running `kubectl get pods --namespace awx` (replace awx with the namespace you used):
|
||||
|
||||
```bash
|
||||
# View the running pods, it may take a few minutes for everything to be marked in the Running state
|
||||
$ kubectl get pods --namespace awx
|
||||
NAME READY STATUS RESTARTS AGE
|
||||
awx-2558692395-2r8ss 4/4 Running 0 29s
|
||||
awx-postgresql-355348841-kltkn 1/1 Running 0 1m
|
||||
```
|
||||
|
||||
### Accessing AWX
|
||||
|
||||
The AWX web interface is running in the AWX pod behind the `awx-web-svc` service:
|
||||
|
||||
```bash
|
||||
# View available services
|
||||
$ kubectl get svc --namespace awx
|
||||
NAME TYPE CLUSTER-IP EXTERNAL-IP PORT(S) AGE
|
||||
awx-postgresql ClusterIP 10.7.250.208 <none> 5432/TCP 2m
|
||||
awx-web-svc NodePort 10.7.241.35 <none> 80:30177/TCP 1m
|
||||
```
|
||||
|
||||
The deployment process creates an `Ingress` named `awx-web-svc` also. Some kubernetes cloud providers will automatically handle routing configuration when an Ingress is created others may require that you more explicitly configure it. You can see what kubernetes knows about things with:
|
||||
|
||||
```bash
|
||||
kubectl get ing --namespace awx
|
||||
NAME HOSTS ADDRESS PORTS AGE
|
||||
awx-web-svc * 35.227.x.y 80 3m
|
||||
```
|
||||
|
||||
If your provider is able to allocate an IP Address from the Ingress controller then you can navigate to the address and access the AWX interface. For some providers it can take a few minutes to allocate and make this accessible. For other providers it may require you to manually intervene.
|
||||
|
||||
### SSL Termination
|
||||
|
||||
Unlike Openshift's `Route` the Kubernetes `Ingress` doesn't yet handle SSL termination. As such the default configuration will only expose AWX through HTTP on port 80. You are responsible for configuring SSL support until support is added (either to Kubernetes or AWX itself).
|
||||
|
||||
|
||||
## Docker-Compose
|
||||
|
||||
### Prerequisites
|
||||
|
||||
- [Docker](https://docs.docker.com/engine/installation/) on the host where AWX will be deployed. After installing Docker, the Docker service must be started (depending on your OS, you may have to add the local user that uses Docker to the ``docker`` group, refer to the documentation for details)
|
||||
- [docker-compose](https://pypi.org/project/docker-compose/) Python module.
|
||||
+ This also installs the `docker` Python module, which is incompatible with `docker-py`. If you have previously installed `docker-py`, please uninstall it.
|
||||
- [Docker Compose](https://docs.docker.com/compose/install/).
|
||||
|
||||
### Pre-install steps
|
||||
|
||||
#### Deploying to a remote host
|
||||
|
||||
By default, the delivered [installer/inventory](./installer/inventory) file will deploy AWX to the local host. It is possible, however, to deploy to a remote host. The [installer/install.yml](./installer/install.yml) playbook can be used to build images on the local host, and ship the built images to, and run deployment tasks on, a remote host. To do this, modify the [installer/inventory](./installer/inventory) file, by commenting out `localhost`, and adding the remote host.
|
||||
|
||||
For example, suppose you wish to build images locally on your CI/CD host, and deploy them to a remote host named *awx-server*. To do this, add *awx-server* to the [installer/inventory](./installer/inventory) file, and comment out or remove `localhost`, as demonstrated by the following:
|
||||
|
||||
```yaml
|
||||
# localhost ansible_connection=local
|
||||
awx-server
|
||||
|
||||
[all:vars]
|
||||
...
|
||||
```
|
||||
|
||||
In the above example, image build tasks will be delegated to `localhost`, which is typically where the clone of the AWX project exists. Built images will be archived, copied to remote host, and imported into the remote Docker image cache. Tasks to start the AWX containers will then execute on the remote host.
|
||||
|
||||
If you choose to use the official images then the remote host will be the one to pull those images.
|
||||
|
||||
**Note**
|
||||
|
||||
> You may also want to set additional variables to control how Ansible connects to the host. For more information about this, view [Behavioral Inventory Parameters](http://docs.ansible.com/ansible/latest/intro_inventory.html#id12).
|
||||
|
||||
> As mentioned above, in [Prerequisites](#prerequisites-1), the prerequisites are required on the remote host.
|
||||
|
||||
> When deploying to a remote host, the playbook does not execute tasks with the `become` option. For this reason, make sure the user that connects to the remote host has privileges to run the `docker` command. This typically means that non-privileged users need to be part of the `docker` group.
|
||||
|
||||
|
||||
#### Inventory variables
|
||||
|
||||
Before starting the install process, review the [inventory](./installer/inventory) file, and uncomment and provide values for the following variables found in the `[all:vars]` section:
|
||||
|
||||
*postgres_data_dir*
|
||||
|
||||
> If you're using the default PostgreSQL container (see [PostgreSQL](#postgresql-1) below), provide a path that can be mounted to the container, and where the database can be persisted.
|
||||
|
||||
*host_port*
|
||||
|
||||
> Provide a port number that can be mapped from the Docker daemon host to the web server running inside the AWX container. If undefined no port will be exposed. Defaults to *80*.
|
||||
|
||||
*host_port_ssl*
|
||||
|
||||
> Provide a port number that can be mapped from the Docker daemon host to the web server running inside the AWX container for SSL support. If undefined no port will be exposed. Defaults to *443*, only works if you also set `ssl_certificate` (see below).
|
||||
|
||||
*ssl_certificate*
|
||||
|
||||
> Optionally, provide the path to a file that contains a certificate and its private key. This needs to be a .pem-file
|
||||
|
||||
*docker_compose_dir*
|
||||
|
||||
> When using docker-compose, the `docker-compose.yml` file will be created there (default `~/.awx/awxcompose`).
|
||||
|
||||
*custom_venv_dir*
|
||||
|
||||
> Adds the custom venv environments from the local host to be passed into the containers at install.
|
||||
|
||||
*ca_trust_dir*
|
||||
|
||||
> If you're using a non trusted CA, provide a path where the untrusted Certs are stored on your Host.
|
||||
|
||||
#### Docker registry
|
||||
|
||||
If you wish to tag and push built images to a Docker registry, set the following variables in the inventory file:
|
||||
|
||||
*docker_registry*
|
||||
|
||||
> IP address and port, or URL, for accessing a registry.
|
||||
|
||||
*docker_registry_repository*
|
||||
|
||||
> Namespace to use when pushing and pulling images to and from the registry. Defaults to *awx*.
|
||||
|
||||
*docker_registry_username*
|
||||
|
||||
> Username of the user that will push images to the registry. Defaults to *developer*.
|
||||
|
||||
**Note**
|
||||
|
||||
> These settings are ignored if using official images
|
||||
|
||||
|
||||
#### Proxy settings
|
||||
|
||||
*http_proxy*
|
||||
|
||||
> IP address and port, or URL, for using an http_proxy.
|
||||
|
||||
*https_proxy*
|
||||
|
||||
> IP address and port, or URL, for using an https_proxy.
|
||||
|
||||
*no_proxy*
|
||||
|
||||
> Exclude IP address or URL from the proxy.
|
||||
|
||||
#### PostgreSQL
|
||||
|
||||
AWX requires access to a PostgreSQL database, and by default, one will be created and deployed in a container, and data will be persisted to a host volume. In this scenario, you must set the value of `postgres_data_dir` to a path that can be mounted to the container. When the container is stopped, the database files will still exist in the specified path.
|
||||
|
||||
If you wish to use an external database, in the inventory file, set the value of `pg_hostname`, and update `pg_username`, `pg_password`, `pg_admin_password`, `pg_database`, and `pg_port` with the connection information.
|
||||
|
||||
### Run the installer
|
||||
|
||||
If you are not pushing images to a Docker registry, start the install by running the following:
|
||||
|
||||
```bash
|
||||
# Set the working directory to installer
|
||||
$ cd installer
|
||||
|
||||
# Run the Ansible playbook
|
||||
$ ansible-playbook -i inventory install.yml
|
||||
```
|
||||
|
||||
If you're pushing built images to a repository, then use the `-e` option to pass the registry password as follows, replacing *password* with the password of the username assigned to `docker_registry_username` (note that you will also need to remove `dockerhub_base` and `dockerhub_version` from the inventory file):
|
||||
|
||||
```bash
|
||||
# Set the working directory to installer
|
||||
$ cd installer
|
||||
|
||||
# Run the Ansible playbook
|
||||
$ ansible-playbook -i inventory -e docker_registry_password=password install.yml
|
||||
```
|
||||
|
||||
### Post-install
|
||||
|
||||
After the playbook run completes, Docker starts a series of containers that provide the services that make up AWX. You can view the running containers using the `docker ps` command.
|
||||
|
||||
If you're deploying using Docker Compose, container names will be prefixed by the name of the folder where the docker-compose.yml file is created (by default, `awx`).
|
||||
|
||||
Immediately after the containers start, the *awx_task* container will perform required setup tasks, including database migrations. These tasks need to complete before the web interface can be accessed. To monitor the progress, you can follow the container's STDOUT by running the following:
|
||||
|
||||
```bash
|
||||
# Tail the awx_task log
|
||||
$ docker logs -f awx_task
|
||||
```
|
||||
|
||||
You will see output similar to the following:
|
||||
|
||||
```bash
|
||||
Using /etc/ansible/ansible.cfg as config file
|
||||
127.0.0.1 | SUCCESS => {
|
||||
"changed": false,
|
||||
"db": "awx"
|
||||
}
|
||||
Operations to perform:
|
||||
Synchronize unmigrated apps: solo, api, staticfiles, messages, channels, django_extensions, ui, rest_framework, polymorphic
|
||||
Apply all migrations: sso, taggit, sessions, sites, kombu_transport_django, social_auth, contenttypes, auth, conf, main
|
||||
Synchronizing apps without migrations:
|
||||
Creating tables...
|
||||
Running deferred SQL...
|
||||
Installing custom SQL...
|
||||
Running migrations:
|
||||
Rendering model states... DONE
|
||||
Applying contenttypes.0001_initial... OK
|
||||
Applying contenttypes.0002_remove_content_type_name... OK
|
||||
Applying auth.0001_initial... OK
|
||||
Applying auth.0002_alter_permission_name_max_length... OK
|
||||
Applying auth.0003_alter_user_email_max_length... OK
|
||||
Applying auth.0004_alter_user_username_opts... OK
|
||||
Applying auth.0005_alter_user_last_login_null... OK
|
||||
Applying auth.0006_require_contenttypes_0002... OK
|
||||
Applying taggit.0001_initial... OK
|
||||
Applying taggit.0002_auto_20150616_2121... OK
|
||||
Applying main.0001_initial... OK
|
||||
...
|
||||
```
|
||||
|
||||
Once migrations complete, you will see the following log output, indicating that migrations have completed:
|
||||
|
||||
```bash
|
||||
Python 2.7.5 (default, Nov 6 2016, 00:28:07)
|
||||
[GCC 4.8.5 20150623 (Red Hat 4.8.5-11)] on linux2
|
||||
Type "help", "copyright", "credits" or "license" for more information.
|
||||
(InteractiveConsole)
|
||||
|
||||
>>> <User: admin>
|
||||
>>> Default organization added.
|
||||
Demo Credential, Inventory, and Job Template added.
|
||||
Successfully registered instance awx
|
||||
(changed: True)
|
||||
Creating instance group tower
|
||||
Added instance awx to tower
|
||||
(changed: True)
|
||||
...
|
||||
```
|
||||
|
||||
### Accessing AWX
|
||||
|
||||
The AWX web server is accessible on the deployment host, using the *host_port* value set in the *inventory* file. The default URL is [http://localhost](http://localhost).
|
||||
|
||||
You will prompted with a login dialog. The default administrator username is `admin`, and the password is `password`.
|
||||
Now you can log in at the URL above with the username "admin" and the password above. Happy Automating!
|
||||
|
||||
|
||||
# Installing the AWX CLI
|
||||
|
||||
@@ -20,7 +20,7 @@ recursive-exclude awx/ui/client *
|
||||
recursive-exclude awx/settings local_settings.py*
|
||||
include tools/scripts/request_tower_configuration.sh
|
||||
include tools/scripts/request_tower_configuration.ps1
|
||||
include tools/scripts/ansible-tower-service
|
||||
include tools/scripts/automation-controller-service
|
||||
include tools/scripts/failure-event-handler
|
||||
include tools/scripts/awx-python
|
||||
include awx/playbooks/library/mkfifo.py
|
||||
|
||||
268
Makefile
268
Makefile
@@ -1,4 +1,4 @@
|
||||
PYTHON ?= python3
|
||||
PYTHON ?= python3.8
|
||||
PYTHON_VERSION = $(shell $(PYTHON) -c "from distutils.sysconfig import get_python_version; print(get_python_version())")
|
||||
SITELIB=$(shell $(PYTHON) -c "from distutils.sysconfig import get_python_lib; print(get_python_lib())")
|
||||
OFFICIAL ?= no
|
||||
@@ -20,17 +20,18 @@ COMPOSE_TAG ?= $(GIT_BRANCH)
|
||||
COMPOSE_HOST ?= $(shell hostname)
|
||||
|
||||
VENV_BASE ?= /var/lib/awx/venv/
|
||||
COLLECTION_BASE ?= /var/lib/awx/vendor/awx_ansible_collections
|
||||
SCL_PREFIX ?=
|
||||
CELERY_SCHEDULE_FILE ?= /var/lib/awx/beat.db
|
||||
|
||||
DEV_DOCKER_TAG_BASE ?= gcr.io/ansible-tower-engineering
|
||||
DEV_DOCKER_TAG_BASE ?= quay.io/awx
|
||||
DEVEL_IMAGE_NAME ?= $(DEV_DOCKER_TAG_BASE)/awx_devel:$(COMPOSE_TAG)
|
||||
|
||||
# Python packages to install only from source (not from binary wheels)
|
||||
# Comma separated list
|
||||
SRC_ONLY_PKGS ?= cffi,pycparser,psycopg2,twilio,pycurl
|
||||
# These should be upgraded in the AWX and Ansible venv before attempting
|
||||
# to install the actual requirements
|
||||
VENV_BOOTSTRAP ?= pip==19.3.1 setuptools==41.6.0
|
||||
VENV_BOOTSTRAP ?= pip==19.3.1 setuptools==41.6.0 wheel==0.36.2
|
||||
|
||||
# Determine appropriate shasum command
|
||||
UNAME_S := $(shell uname -s)
|
||||
@@ -60,11 +61,11 @@ WHEEL_FILE ?= $(WHEEL_NAME)-py2-none-any.whl
|
||||
I18N_FLAG_FILE = .i18n_built
|
||||
|
||||
.PHONY: awx-link clean clean-tmp clean-venv requirements requirements_dev \
|
||||
develop refresh adduser migrate dbchange runserver \
|
||||
develop refresh adduser migrate dbchange \
|
||||
receiver test test_unit test_coverage coverage_html \
|
||||
dev_build release_build release_clean sdist \
|
||||
ui-docker-machine ui-docker ui-release ui-devel \
|
||||
ui-test ui-deps ui-test-ci VERSION
|
||||
dev_build release_build sdist \
|
||||
ui-release ui-devel \
|
||||
VERSION docker-compose-sources
|
||||
|
||||
clean-tmp:
|
||||
rm -rf tmp/
|
||||
@@ -113,31 +114,7 @@ guard-%:
|
||||
exit 1; \
|
||||
fi
|
||||
|
||||
virtualenv: virtualenv_ansible virtualenv_awx
|
||||
|
||||
# virtualenv_* targets do not use --system-site-packages to prevent bugs installing packages
|
||||
# but Ansible venvs are expected to have this, so that must be done after venv creation
|
||||
virtualenv_ansible:
|
||||
if [ "$(VENV_BASE)" ]; then \
|
||||
if [ ! -d "$(VENV_BASE)" ]; then \
|
||||
mkdir $(VENV_BASE); \
|
||||
fi; \
|
||||
if [ ! -d "$(VENV_BASE)/ansible" ]; then \
|
||||
virtualenv -p python $(VENV_BASE)/ansible && \
|
||||
$(VENV_BASE)/ansible/bin/pip install $(PIP_OPTIONS) $(VENV_BOOTSTRAP); \
|
||||
fi; \
|
||||
fi
|
||||
|
||||
virtualenv_ansible_py3:
|
||||
if [ "$(VENV_BASE)" ]; then \
|
||||
if [ ! -d "$(VENV_BASE)" ]; then \
|
||||
mkdir $(VENV_BASE); \
|
||||
fi; \
|
||||
if [ ! -d "$(VENV_BASE)/ansible" ]; then \
|
||||
virtualenv -p $(PYTHON) $(VENV_BASE)/ansible; \
|
||||
$(VENV_BASE)/ansible/bin/pip install $(PIP_OPTIONS) $(VENV_BOOTSTRAP); \
|
||||
fi; \
|
||||
fi
|
||||
virtualenv: virtualenv_awx
|
||||
|
||||
# flit is needed for offline install of certain packages, specifically ptyprocess
|
||||
# it is needed for setup, but not always recognized as a setup dependency
|
||||
@@ -148,37 +125,11 @@ virtualenv_awx:
|
||||
mkdir $(VENV_BASE); \
|
||||
fi; \
|
||||
if [ ! -d "$(VENV_BASE)/awx" ]; then \
|
||||
virtualenv -p $(PYTHON) $(VENV_BASE)/awx; \
|
||||
$(PYTHON) -m venv $(VENV_BASE)/awx; \
|
||||
$(VENV_BASE)/awx/bin/pip install $(PIP_OPTIONS) $(VENV_BOOTSTRAP); \
|
||||
fi; \
|
||||
fi
|
||||
|
||||
# --ignore-install flag is not used because *.txt files should specify exact versions
|
||||
requirements_ansible: virtualenv_ansible
|
||||
if [[ "$(PIP_OPTIONS)" == *"--no-index"* ]]; then \
|
||||
cat requirements/requirements_ansible.txt requirements/requirements_ansible_local.txt | PYCURL_SSL_LIBRARY=$(PYCURL_SSL_LIBRARY) $(VENV_BASE)/ansible/bin/pip install $(PIP_OPTIONS) -r /dev/stdin ; \
|
||||
else \
|
||||
cat requirements/requirements_ansible.txt requirements/requirements_ansible_git.txt | PYCURL_SSL_LIBRARY=$(PYCURL_SSL_LIBRARY) $(VENV_BASE)/ansible/bin/pip install $(PIP_OPTIONS) --no-binary $(SRC_ONLY_PKGS) -r /dev/stdin ; \
|
||||
fi
|
||||
$(VENV_BASE)/ansible/bin/pip uninstall --yes -r requirements/requirements_ansible_uninstall.txt
|
||||
# Same effect as using --system-site-packages flag on venv creation
|
||||
rm $(shell ls -d $(VENV_BASE)/ansible/lib/python* | head -n 1)/no-global-site-packages.txt
|
||||
|
||||
requirements_ansible_py3: virtualenv_ansible_py3
|
||||
if [[ "$(PIP_OPTIONS)" == *"--no-index"* ]]; then \
|
||||
cat requirements/requirements_ansible.txt requirements/requirements_ansible_local.txt | PYCURL_SSL_LIBRARY=$(PYCURL_SSL_LIBRARY) $(VENV_BASE)/ansible/bin/pip3 install $(PIP_OPTIONS) -r /dev/stdin ; \
|
||||
else \
|
||||
cat requirements/requirements_ansible.txt requirements/requirements_ansible_git.txt | PYCURL_SSL_LIBRARY=$(PYCURL_SSL_LIBRARY) $(VENV_BASE)/ansible/bin/pip3 install $(PIP_OPTIONS) --no-binary $(SRC_ONLY_PKGS) -r /dev/stdin ; \
|
||||
fi
|
||||
$(VENV_BASE)/ansible/bin/pip3 uninstall --yes -r requirements/requirements_ansible_uninstall.txt
|
||||
# Same effect as using --system-site-packages flag on venv creation
|
||||
rm $(shell ls -d $(VENV_BASE)/ansible/lib/python* | head -n 1)/no-global-site-packages.txt
|
||||
|
||||
requirements_ansible_dev:
|
||||
if [ "$(VENV_BASE)" ]; then \
|
||||
$(VENV_BASE)/ansible/bin/pip install pytest mock; \
|
||||
fi
|
||||
|
||||
# Install third-party requirements needed for AWX's environment.
|
||||
# this does not use system site packages intentionally
|
||||
requirements_awx: virtualenv_awx
|
||||
@@ -192,17 +143,9 @@ requirements_awx: virtualenv_awx
|
||||
requirements_awx_dev:
|
||||
$(VENV_BASE)/awx/bin/pip install -r requirements/requirements_dev.txt
|
||||
|
||||
requirements_collections:
|
||||
mkdir -p $(COLLECTION_BASE)
|
||||
n=0; \
|
||||
until [ "$$n" -ge 5 ]; do \
|
||||
ansible-galaxy collection install -r requirements/collections_requirements.yml -p $(COLLECTION_BASE) && break; \
|
||||
n=$$((n+1)); \
|
||||
done
|
||||
requirements: requirements_awx
|
||||
|
||||
requirements: requirements_ansible requirements_awx requirements_collections
|
||||
|
||||
requirements_dev: requirements_awx requirements_ansible_py3 requirements_awx_dev requirements_ansible_dev
|
||||
requirements_dev: requirements_awx requirements_awx_dev
|
||||
|
||||
requirements_test: requirements
|
||||
|
||||
@@ -221,7 +164,7 @@ version_file:
|
||||
if [ "$(VENV_BASE)" ]; then \
|
||||
. $(VENV_BASE)/awx/bin/activate; \
|
||||
fi; \
|
||||
python -c "import awx; print(awx.__version__)" > /var/lib/awx/.awx_version; \
|
||||
$(PYTHON) -c "import awx; print(awx.__version__)" > /var/lib/awx/.awx_version; \
|
||||
|
||||
# Do any one-time init tasks.
|
||||
comma := ,
|
||||
@@ -267,11 +210,27 @@ collectstatic:
|
||||
fi; \
|
||||
mkdir -p awx/public/static && $(PYTHON) manage.py collectstatic --clear --noinput > /dev/null 2>&1
|
||||
|
||||
UWSGI_DEV_RELOAD_COMMAND ?= supervisorctl restart tower-processes:awx-dispatcher tower-processes:awx-receiver
|
||||
|
||||
uwsgi: collectstatic
|
||||
@if [ "$(VENV_BASE)" ]; then \
|
||||
. $(VENV_BASE)/awx/bin/activate; \
|
||||
fi; \
|
||||
uwsgi -b 32768 --socket 127.0.0.1:8050 --module=awx.wsgi:application --home=/var/lib/awx/venv/awx --chdir=/awx_devel/ --vacuum --processes=5 --harakiri=120 --master --no-orphans --py-autoreload 1 --max-requests=1000 --stats /tmp/stats.socket --lazy-apps --logformat "%(addr) %(method) %(uri) - %(proto) %(status)" --hook-accepting1="exec:supervisorctl restart tower-processes:awx-dispatcher tower-processes:awx-receiver"
|
||||
uwsgi -b 32768 \
|
||||
--socket 127.0.0.1:8050 \
|
||||
--module=awx.wsgi:application \
|
||||
--home=/var/lib/awx/venv/awx \
|
||||
--chdir=/awx_devel/ \
|
||||
--vacuum \
|
||||
--processes=5 \
|
||||
--harakiri=120 --master \
|
||||
--no-orphans \
|
||||
--py-autoreload 1 \
|
||||
--max-requests=1000 \
|
||||
--stats /tmp/stats.socket \
|
||||
--lazy-apps \
|
||||
--logformat "%(addr) %(method) %(uri) - %(proto) %(status)" \
|
||||
--hook-accepting1="exec: $(UWSGI_DEV_RELOAD_COMMAND)"
|
||||
|
||||
daphne:
|
||||
@if [ "$(VENV_BASE)" ]; then \
|
||||
@@ -312,20 +271,13 @@ jupyter:
|
||||
reports:
|
||||
mkdir -p $@
|
||||
|
||||
pep8: reports
|
||||
@(set -o pipefail && $@ | tee reports/$@.report)
|
||||
black: reports
|
||||
@command -v black >/dev/null 2>&1 || { echo "could not find black on your PATH, you may need to \`pip install black\`, or set AWX_IGNORE_BLACK=1" && exit 1; }
|
||||
@(set -o pipefail && $@ $(BLACK_ARGS) awx awxkit awx_collection | tee reports/$@.report)
|
||||
|
||||
flake8: reports
|
||||
@if [ "$(VENV_BASE)" ]; then \
|
||||
. $(VENV_BASE)/awx/bin/activate; \
|
||||
fi; \
|
||||
(set -o pipefail && $@ | tee reports/$@.report)
|
||||
|
||||
pyflakes: reports
|
||||
@(set -o pipefail && $@ | tee reports/$@.report)
|
||||
|
||||
pylint: reports
|
||||
@(set -o pipefail && $@ | reports/$@.report)
|
||||
.git/hooks/pre-commit:
|
||||
@echo "[ -z \$$AWX_IGNORE_BLACK ] && (black --check \`git diff --cached --name-only --diff-filter=AM | grep -E '\.py$\'\` || (echo 'To fix this, run \`make black\` to auto-format your code prior to commit, or set AWX_IGNORE_BLACK=1' && exit 1))" > .git/hooks/pre-commit
|
||||
@chmod +x .git/hooks/pre-commit
|
||||
|
||||
genschema: reports
|
||||
$(MAKE) swagger PYTEST_ARGS="--genschema --create-db "
|
||||
@@ -337,10 +289,10 @@ swagger: reports
|
||||
fi; \
|
||||
(set -o pipefail && py.test $(PYTEST_ARGS) awx/conf/tests/functional awx/main/tests/functional/api awx/main/tests/docs --release=$(VERSION_TARGET) | tee reports/$@.report)
|
||||
|
||||
check: flake8 pep8 # pyflakes pylint
|
||||
check: black
|
||||
|
||||
awx-link:
|
||||
[ -d "/awx_devel/awx.egg-info" ] || python3 /awx_devel/setup.py egg_info_dev
|
||||
[ -d "/awx_devel/awx.egg-info" ] || $(PYTHON) /awx_devel/setup.py egg_info_dev
|
||||
cp -f /tmp/awx.egg-link /var/lib/awx/venv/awx/lib/python$(PYTHON_VERSION)/site-packages/awx.egg-link
|
||||
|
||||
TEST_DIRS ?= awx/main/tests/unit awx/main/tests/functional awx/conf/tests awx/sso/tests
|
||||
@@ -365,17 +317,15 @@ test_collection:
|
||||
rm -f $(shell ls -d $(VENV_BASE)/awx/lib/python* | head -n 1)/no-global-site-packages.txt
|
||||
if [ "$(VENV_BASE)" ]; then \
|
||||
. $(VENV_BASE)/awx/bin/activate; \
|
||||
fi; \
|
||||
fi && \
|
||||
pip install ansible && \
|
||||
py.test $(COLLECTION_TEST_DIRS) -v
|
||||
# The python path needs to be modified so that the tests can find Ansible within the container
|
||||
# First we will use anything expility set as PYTHONPATH
|
||||
# Second we will load any libraries out of the virtualenv (if it's unspecified that should be ok because python should not load out of an empty directory)
|
||||
# Finally we will add the system path so that the tests can find the ansible libraries
|
||||
|
||||
flake8_collection:
|
||||
flake8 awx_collection/ # Different settings, in main exclude list
|
||||
|
||||
test_collection_all: test_collection flake8_collection
|
||||
test_collection_all: test_collection
|
||||
|
||||
# WARNING: symlinking a collection is fundamentally unstable
|
||||
# this is for rapid development iteration with playbooks, do not use with other test targets
|
||||
@@ -427,39 +377,6 @@ bulk_data:
|
||||
fi; \
|
||||
$(PYTHON) tools/data_generators/rbac_dummy_data_generator.py --preset=$(DATA_GEN_PRESET)
|
||||
|
||||
# l10n TASKS
|
||||
# --------------------------------------
|
||||
|
||||
# check for UI po files
|
||||
HAVE_PO := $(shell ls awx/ui/po/*.po 2>/dev/null)
|
||||
check-po:
|
||||
ifdef HAVE_PO
|
||||
# Should be 'Language: zh-CN' but not 'Language: zh_CN' in zh_CN.po
|
||||
for po in awx/ui/po/*.po ; do \
|
||||
echo $$po; \
|
||||
mo="awx/ui/po/`basename $$po .po`.mo"; \
|
||||
msgfmt --check --verbose $$po -o $$mo; \
|
||||
if test "$$?" -ne 0 ; then \
|
||||
exit -1; \
|
||||
fi; \
|
||||
rm $$mo; \
|
||||
name=`echo "$$po" | grep '-'`; \
|
||||
if test "x$$name" != x ; then \
|
||||
right_name=`echo $$language | sed -e 's/-/_/'`; \
|
||||
echo "ERROR: WRONG $$name CORRECTION: $$right_name"; \
|
||||
exit -1; \
|
||||
fi; \
|
||||
language=`grep '^"Language:' "$$po" | grep '_'`; \
|
||||
if test "x$$language" != x ; then \
|
||||
right_language=`echo $$language | sed -e 's/_/-/'`; \
|
||||
echo "ERROR: WRONG $$language CORRECTION: $$right_language in $$po"; \
|
||||
exit -1; \
|
||||
fi; \
|
||||
done;
|
||||
else
|
||||
@echo No PO files
|
||||
endif
|
||||
|
||||
|
||||
# UI TASKS
|
||||
# --------------------------------------
|
||||
@@ -472,16 +389,13 @@ clean-ui:
|
||||
rm -rf awx/ui_next/build
|
||||
rm -rf awx/ui_next/src/locales/_build
|
||||
rm -rf $(UI_BUILD_FLAG_FILE)
|
||||
git checkout awx/ui_next/src/locales
|
||||
|
||||
awx/ui_next/node_modules:
|
||||
$(NPM_BIN) --prefix awx/ui_next --loglevel warn --ignore-scripts install
|
||||
|
||||
$(UI_BUILD_FLAG_FILE):
|
||||
$(NPM_BIN) --prefix awx/ui_next --loglevel warn run extract-strings
|
||||
$(NPM_BIN) --prefix awx/ui_next --loglevel warn run compile-strings
|
||||
$(NPM_BIN) --prefix awx/ui_next --loglevel warn run build
|
||||
git checkout awx/ui_next/src/locales
|
||||
mkdir -p awx/public/static/css
|
||||
mkdir -p awx/public/static/js
|
||||
mkdir -p awx/public/static/media
|
||||
@@ -495,11 +409,17 @@ ui-release: awx/ui_next/node_modules $(UI_BUILD_FLAG_FILE)
|
||||
ui-devel: awx/ui_next/node_modules
|
||||
@$(MAKE) -B $(UI_BUILD_FLAG_FILE)
|
||||
|
||||
ui-devel-instrumented: awx/ui_next/node_modules
|
||||
$(NPM_BIN) --prefix awx/ui_next --loglevel warn run start-instrumented
|
||||
|
||||
ui-devel-test: awx/ui_next/node_modules
|
||||
$(NPM_BIN) --prefix awx/ui_next --loglevel warn run start
|
||||
|
||||
ui-zuul-lint-and-test:
|
||||
$(NPM_BIN) --prefix awx/ui_next install
|
||||
$(NPM_BIN) run --prefix awx/ui_next lint
|
||||
$(NPM_BIN) run --prefix awx/ui_next prettier-check
|
||||
$(NPM_BIN) run --prefix awx/ui_next test
|
||||
$(NPM_BIN) run --prefix awx/ui_next test -- --coverage --watchAll=false
|
||||
|
||||
|
||||
# Build a pip-installable package into dist/ with a timestamped version number.
|
||||
@@ -543,31 +463,30 @@ docker-auth:
|
||||
awx/projects:
|
||||
@mkdir -p $@
|
||||
|
||||
# Docker isolated rampart
|
||||
docker-compose-isolated: awx/projects
|
||||
CURRENT_UID=$(shell id -u) TAG=$(COMPOSE_TAG) DEV_DOCKER_TAG_BASE=$(DEV_DOCKER_TAG_BASE) docker-compose -f tools/docker-compose.yml -f tools/docker-isolated-override.yml up
|
||||
|
||||
COMPOSE_UP_OPTS ?=
|
||||
CLUSTER_NODE_COUNT ?= 1
|
||||
|
||||
# Docker Compose Development environment
|
||||
docker-compose: docker-auth awx/projects
|
||||
CURRENT_UID=$(shell id -u) OS="$(shell docker info | grep 'Operating System')" TAG=$(COMPOSE_TAG) DEV_DOCKER_TAG_BASE=$(DEV_DOCKER_TAG_BASE) docker-compose -f tools/docker-compose.yml $(COMPOSE_UP_OPTS) up --no-recreate awx
|
||||
docker-compose-sources: .git/hooks/pre-commit
|
||||
ansible-playbook -i tools/docker-compose/inventory tools/docker-compose/ansible/sources.yml \
|
||||
-e awx_image=$(DEV_DOCKER_TAG_BASE)/awx_devel \
|
||||
-e awx_image_tag=$(COMPOSE_TAG) \
|
||||
-e cluster_node_count=$(CLUSTER_NODE_COUNT)
|
||||
|
||||
docker-compose-cluster: docker-auth awx/projects
|
||||
CURRENT_UID=$(shell id -u) TAG=$(COMPOSE_TAG) DEV_DOCKER_TAG_BASE=$(DEV_DOCKER_TAG_BASE) docker-compose -f tools/docker-compose-cluster.yml up
|
||||
docker-compose: docker-auth awx/projects docker-compose-sources
|
||||
docker-compose -f tools/docker-compose/_sources/docker-compose.yml up $(COMPOSE_UP_OPTS)
|
||||
|
||||
docker-compose-credential-plugins: docker-auth awx/projects
|
||||
docker-compose-credential-plugins: docker-auth awx/projects docker-compose-sources
|
||||
echo -e "\033[0;31mTo generate a CyberArk Conjur API key: docker exec -it tools_conjur_1 conjurctl account create quick-start\033[0m"
|
||||
CURRENT_UID=$(shell id -u) TAG=$(COMPOSE_TAG) DEV_DOCKER_TAG_BASE=$(DEV_DOCKER_TAG_BASE) docker-compose -f tools/docker-compose.yml -f tools/docker-credential-plugins-override.yml up --no-recreate awx
|
||||
docker-compose -f tools/docker-compose/_sources/docker-compose.yml -f tools/docker-credential-plugins-override.yml up --no-recreate awx_1
|
||||
|
||||
docker-compose-test: docker-auth awx/projects
|
||||
cd tools && CURRENT_UID=$(shell id -u) OS="$(shell docker info | grep 'Operating System')" TAG=$(COMPOSE_TAG) DEV_DOCKER_TAG_BASE=$(DEV_DOCKER_TAG_BASE) docker-compose run --rm --service-ports awx /bin/bash
|
||||
docker-compose-test: docker-auth awx/projects docker-compose-sources
|
||||
docker-compose -f tools/docker-compose/_sources/docker-compose.yml run --rm --service-ports awx_1 /bin/bash
|
||||
|
||||
docker-compose-runtest: awx/projects
|
||||
cd tools && CURRENT_UID=$(shell id -u) TAG=$(COMPOSE_TAG) DEV_DOCKER_TAG_BASE=$(DEV_DOCKER_TAG_BASE) docker-compose run --rm --service-ports awx /start_tests.sh
|
||||
docker-compose-runtest: awx/projects docker-compose-sources
|
||||
docker-compose -f tools/docker-compose/_sources/docker-compose.yml run --rm --service-ports awx_1 /start_tests.sh
|
||||
|
||||
docker-compose-build-swagger: awx/projects
|
||||
cd tools && CURRENT_UID=$(shell id -u) TAG=$(COMPOSE_TAG) DEV_DOCKER_TAG_BASE=$(DEV_DOCKER_TAG_BASE) docker-compose run --rm --service-ports --no-deps awx /start_tests.sh swagger
|
||||
docker-compose-build-swagger: awx/projects docker-compose-sources
|
||||
docker-compose -f tools/docker-compose/_sources/docker-compose.yml run --rm --service-ports --no-deps awx_1 /start_tests.sh swagger
|
||||
|
||||
detect-schema-change: genschema
|
||||
curl https://s3.amazonaws.com/awx-public-ci-files/schema.json -o reference-schema.json
|
||||
@@ -575,21 +494,14 @@ detect-schema-change: genschema
|
||||
diff -u -b reference-schema.json schema.json
|
||||
|
||||
docker-compose-clean: awx/projects
|
||||
cd tools && TAG=$(COMPOSE_TAG) DEV_DOCKER_TAG_BASE=$(DEV_DOCKER_TAG_BASE) docker-compose rm -sf
|
||||
docker-compose -f tools/docker-compose/_sources/docker-compose.yml rm -sf
|
||||
|
||||
# Base development image build
|
||||
docker-compose-build:
|
||||
ansible localhost -m template -a "src=installer/roles/image_build/templates/Dockerfile.j2 dest=tools/docker-compose/Dockerfile" -e build_dev=True
|
||||
docker build -t ansible/awx_devel -f tools/docker-compose/Dockerfile \
|
||||
--cache-from=$(DEV_DOCKER_TAG_BASE)/awx_devel:$(COMPOSE_TAG) .
|
||||
docker tag ansible/awx_devel $(DEV_DOCKER_TAG_BASE)/awx_devel:$(COMPOSE_TAG)
|
||||
#docker push $(DEV_DOCKER_TAG_BASE)/awx_devel:$(COMPOSE_TAG)
|
||||
|
||||
# For use when developing on "isolated" AWX deployments
|
||||
docker-compose-isolated-build: docker-compose-build
|
||||
docker build -t ansible/awx_isolated -f tools/docker-isolated/Dockerfile .
|
||||
docker tag ansible/awx_isolated $(DEV_DOCKER_TAG_BASE)/awx_isolated:$(COMPOSE_TAG)
|
||||
#docker push $(DEV_DOCKER_TAG_BASE)/awx_isolated:$(COMPOSE_TAG)
|
||||
ansible-playbook tools/ansible/dockerfile.yml -e build_dev=True
|
||||
DOCKER_BUILDKIT=1 docker build -t $(DEVEL_IMAGE_NAME) \
|
||||
--build-arg BUILDKIT_INLINE_CACHE=1 \
|
||||
--cache-from=$(DEV_DOCKER_TAG_BASE)/awx_devel:$(COMPOSE_TAG) .
|
||||
|
||||
docker-clean:
|
||||
$(foreach container_id,$(shell docker ps -f name=tools_awx -aq),docker stop $(container_id); docker rm -f $(container_id);)
|
||||
@@ -601,11 +513,11 @@ docker-clean-volumes: docker-compose-clean
|
||||
docker-refresh: docker-clean docker-compose
|
||||
|
||||
# Docker Development Environment with Elastic Stack Connected
|
||||
docker-compose-elk: docker-auth awx/projects
|
||||
CURRENT_UID=$(shell id -u) TAG=$(COMPOSE_TAG) DEV_DOCKER_TAG_BASE=$(DEV_DOCKER_TAG_BASE) docker-compose -f tools/docker-compose.yml -f tools/elastic/docker-compose.logstash-link.yml -f tools/elastic/docker-compose.elastic-override.yml up --no-recreate
|
||||
docker-compose-elk: docker-auth awx/projects docker-compose-sources
|
||||
docker-compose -f tools/docker-compose/_sources/docker-compose.yml -f tools/elastic/docker-compose.logstash-link.yml -f tools/elastic/docker-compose.elastic-override.yml up --no-recreate
|
||||
|
||||
docker-compose-cluster-elk: docker-auth awx/projects
|
||||
TAG=$(COMPOSE_TAG) DEV_DOCKER_TAG_BASE=$(DEV_DOCKER_TAG_BASE) docker-compose -f tools/docker-compose-cluster.yml -f tools/elastic/docker-compose.logstash-link-cluster.yml -f tools/elastic/docker-compose.elastic-override.yml up --no-recreate
|
||||
docker-compose-cluster-elk: docker-auth awx/projects docker-compose-sources
|
||||
docker-compose -f tools/docker-compose/_sources/docker-compose.yml -f tools/elastic/docker-compose.logstash-link-cluster.yml -f tools/elastic/docker-compose.elastic-override.yml up --no-recreate
|
||||
|
||||
prometheus:
|
||||
docker run -u0 --net=tools_default --link=`docker ps | egrep -o "tools_awx(_run)?_([^ ]+)?"`:awxweb --volume `pwd`/tools/prometheus:/prometheus --name prometheus -d -p 0.0.0.0:9090:9090 prom/prometheus --web.enable-lifecycle --config.file=/prometheus/prometheus.yml
|
||||
@@ -624,5 +536,33 @@ psql-container:
|
||||
VERSION:
|
||||
@echo "awx: $(VERSION)"
|
||||
|
||||
Dockerfile: installer/roles/image_build/templates/Dockerfile.j2
|
||||
ansible localhost -m template -a "src=installer/roles/image_build/templates/Dockerfile.j2 dest=Dockerfile"
|
||||
Dockerfile: tools/ansible/roles/dockerfile/templates/Dockerfile.j2
|
||||
ansible-playbook tools/ansible/dockerfile.yml
|
||||
|
||||
Dockerfile.kube-dev: tools/ansible/roles/dockerfile/templates/Dockerfile.j2
|
||||
ansible-playbook tools/ansible/dockerfile.yml \
|
||||
-e dockerfile_name=Dockerfile.kube-dev \
|
||||
-e kube_dev=True \
|
||||
-e template_dest=_build_kube_dev
|
||||
|
||||
awx-kube-dev-build: Dockerfile.kube-dev
|
||||
docker build -f Dockerfile.kube-dev \
|
||||
--build-arg BUILDKIT_INLINE_CACHE=1 \
|
||||
-t $(DEV_DOCKER_TAG_BASE)/awx_kube_devel:$(COMPOSE_TAG) .
|
||||
|
||||
|
||||
# Translation TASKS
|
||||
# --------------------------------------
|
||||
|
||||
# generate UI .pot
|
||||
pot: $(UI_BUILD_FLAG_FILE)
|
||||
$(NPM_BIN) --prefix awx/ui_next --loglevel warn run extract-strings
|
||||
$(NPM_BIN) --prefix awx/ui_next --loglevel warn run extract-template
|
||||
|
||||
# generate API django .pot .po
|
||||
LANG = "en-us"
|
||||
messages:
|
||||
@if [ "$(VENV_BASE)" ]; then \
|
||||
. $(VENV_BASE)/awx/bin/activate; \
|
||||
fi; \
|
||||
$(PYTHON) manage.py makemessages -l $(LANG) --keep-pot
|
||||
|
||||
25
README.md
25
README.md
@@ -1,4 +1,7 @@
|
||||
[](https://ansible.softwarefactory-project.io/zuul/status)
|
||||
[](https://ansible.softwarefactory-project.io/zuul/status) [](https://docs.ansible.com/ansible/latest/community/code_of_conduct.html) [](https://github.com/ansible/awx/blob/devel/LICENSE.md) [](https://groups.google.com/g/awx-project)
|
||||
[](https://webchat.freenode.net/#ansible-awx)
|
||||
|
||||
<img src="https://raw.githubusercontent.com/ansible/awx-logos/master/awx/ui/client/assets/logo-login.svg?sanitize=true" width=200 alt="AWX" />
|
||||
|
||||
AWX provides a web-based user interface, REST API, and task engine built on top of [Ansible](https://github.com/ansible/ansible). It is the upstream project for [Tower](https://www.ansible.com/tower), a commercial derivative of AWX.
|
||||
|
||||
@@ -14,31 +17,25 @@ Contributing
|
||||
------------
|
||||
|
||||
- Refer to the [Contributing guide](./CONTRIBUTING.md) to get started developing, testing, and building AWX.
|
||||
- All code submissions are done through pull requests against the `devel` branch.
|
||||
- All contributors must use git commit --signoff for any commit to be merged, and agree that usage of --signoff constitutes agreement with the terms of [DCO 1.1](./DCO_1_1.md)
|
||||
- Take care to make sure no merge commits are in the submission, and use `git rebase` vs `git merge` for this reason.
|
||||
- If submitting a large code change, it's a good idea to join the `#ansible-awx` channel on irc.freenode.net, and talk about what you would like to do or add first. This not only helps everyone know what's going on, it also helps save time and effort, if the community decides some changes are needed.
|
||||
- All code submissions are made through pull requests against the `devel` branch.
|
||||
- All contributors must use git commit --signoff for any commit to be merged and agree that usage of --signoff constitutes agreement with the terms of [DCO 1.1](./DCO_1_1.md)
|
||||
- Take care to make sure no merge commits are in the submission, and use `git rebase` vs. `git merge` for this reason.
|
||||
- If submitting a large code change, it's a good idea to join the `#ansible-awx` channel on webchat.freenode.net and talk about what you would like to do or add first. This not only helps everyone know what's going on, but it also helps save time and effort if the community decides some changes are needed.
|
||||
|
||||
Reporting Issues
|
||||
----------------
|
||||
|
||||
If you're experiencing a problem that you feel is a bug in AWX, or have ideas for how to improve AWX, we encourage you to open an issue, and share your feedback. But before opening a new issue, we ask that you please take a look at our [Issues guide](./ISSUES.md).
|
||||
If you're experiencing a problem that you feel is a bug in AWX or have ideas for improving AWX, we encourage you to open an issue and share your feedback. But before opening a new issue, we ask that you please take a look at our [Issues guide](./ISSUES.md).
|
||||
|
||||
Code of Conduct
|
||||
---------------
|
||||
|
||||
We ask all of our community members and contributors to adhere to the [Ansible code of conduct](http://docs.ansible.com/ansible/latest/community/code_of_conduct.html). If you have questions, or need assistance, please reach out to our community team at [codeofconduct@ansible.com](mailto:codeofconduct@ansible.com)
|
||||
We ask all of our community members and contributors to adhere to the [Ansible code of conduct](http://docs.ansible.com/ansible/latest/community/code_of_conduct.html). If you have questions or need assistance, please reach out to our community team at [codeofconduct@ansible.com](mailto:codeofconduct@ansible.com)
|
||||
|
||||
Get Involved
|
||||
------------
|
||||
|
||||
We welcome your feedback and ideas. Here's how to reach us with feedback and questions:
|
||||
|
||||
- Join the `#ansible-awx` channel on irc.freenode.net
|
||||
- Join the `#ansible-awx` channel on webchat.freenode.net
|
||||
- Join the [mailing list](https://groups.google.com/forum/#!forum/awx-project)
|
||||
|
||||
License
|
||||
-------
|
||||
|
||||
[Apache v2](./LICENSE.md)
|
||||
|
||||
|
||||
@@ -15,9 +15,10 @@ __all__ = ['__version__']
|
||||
# Check for the presence/absence of "devonly" module to determine if running
|
||||
# from a source code checkout or release packaage.
|
||||
try:
|
||||
import awx.devonly # noqa
|
||||
import awx.devonly # noqa
|
||||
|
||||
MODE = 'development'
|
||||
except ImportError: # pragma: no cover
|
||||
except ImportError: # pragma: no cover
|
||||
MODE = 'production'
|
||||
|
||||
|
||||
@@ -25,6 +26,7 @@ import hashlib
|
||||
|
||||
try:
|
||||
import django # noqa: F401
|
||||
|
||||
HAS_DJANGO = True
|
||||
except ImportError:
|
||||
HAS_DJANGO = False
|
||||
@@ -40,6 +42,7 @@ if HAS_DJANGO is True:
|
||||
try:
|
||||
names_digest('foo', 'bar', 'baz', length=8)
|
||||
except ValueError:
|
||||
|
||||
def names_digest(*args, length):
|
||||
"""
|
||||
Generate a 32-bit digest of a set of arguments that can be used to shorten
|
||||
@@ -64,7 +67,7 @@ def find_commands(management_dir):
|
||||
continue
|
||||
elif f.endswith('.py') and f[:-3] not in commands:
|
||||
commands.append(f[:-3])
|
||||
elif f.endswith('.pyc') and f[:-4] not in commands: # pragma: no cover
|
||||
elif f.endswith('.pyc') and f[:-4] not in commands: # pragma: no cover
|
||||
commands.append(f[:-4])
|
||||
except OSError:
|
||||
pass
|
||||
@@ -75,6 +78,7 @@ def oauth2_getattribute(self, attr):
|
||||
# Custom method to override
|
||||
# oauth2_provider.settings.OAuth2ProviderSettings.__getattribute__
|
||||
from django.conf import settings
|
||||
|
||||
val = None
|
||||
if 'migrate' not in sys.argv:
|
||||
# certain Django OAuth Toolkit migrations actually reference
|
||||
@@ -94,33 +98,38 @@ def prepare_env():
|
||||
# Hide DeprecationWarnings when running in production. Need to first load
|
||||
# settings to apply our filter after Django's own warnings filter.
|
||||
from django.conf import settings
|
||||
if not settings.DEBUG: # pragma: no cover
|
||||
|
||||
if not settings.DEBUG: # pragma: no cover
|
||||
warnings.simplefilter('ignore', DeprecationWarning)
|
||||
# Monkeypatch Django find_commands to also work with .pyc files.
|
||||
import django.core.management
|
||||
|
||||
django.core.management.find_commands = find_commands
|
||||
|
||||
# Monkeypatch Oauth2 toolkit settings class to check for settings
|
||||
# in django.conf settings each time, not just once during import
|
||||
import oauth2_provider.settings
|
||||
|
||||
oauth2_provider.settings.OAuth2ProviderSettings.__getattribute__ = oauth2_getattribute
|
||||
|
||||
# Use the AWX_TEST_DATABASE_* environment variables to specify the test
|
||||
# database settings to use when management command is run as an external
|
||||
# program via unit tests.
|
||||
for opt in ('ENGINE', 'NAME', 'USER', 'PASSWORD', 'HOST', 'PORT'): # pragma: no cover
|
||||
for opt in ('ENGINE', 'NAME', 'USER', 'PASSWORD', 'HOST', 'PORT'): # pragma: no cover
|
||||
if os.environ.get('AWX_TEST_DATABASE_%s' % opt, None):
|
||||
settings.DATABASES['default'][opt] = os.environ['AWX_TEST_DATABASE_%s' % opt]
|
||||
# Disable capturing all SQL queries in memory when in DEBUG mode.
|
||||
if settings.DEBUG and not getattr(settings, 'SQL_DEBUG', True):
|
||||
from django.db.backends.base.base import BaseDatabaseWrapper
|
||||
from django.db.backends.utils import CursorWrapper
|
||||
|
||||
BaseDatabaseWrapper.make_debug_cursor = lambda self, cursor: CursorWrapper(cursor, self)
|
||||
|
||||
# Use the default devserver addr/port defined in settings for runserver.
|
||||
default_addr = getattr(settings, 'DEVSERVER_DEFAULT_ADDR', '127.0.0.1')
|
||||
default_port = getattr(settings, 'DEVSERVER_DEFAULT_PORT', 8000)
|
||||
from django.core.management.commands import runserver as core_runserver
|
||||
|
||||
original_handle = core_runserver.Command.handle
|
||||
|
||||
def handle(self, *args, **options):
|
||||
@@ -139,7 +148,8 @@ def manage():
|
||||
# Now run the command (or display the version).
|
||||
from django.conf import settings
|
||||
from django.core.management import execute_from_command_line
|
||||
if len(sys.argv) >= 2 and sys.argv[1] in ('version', '--version'): # pragma: no cover
|
||||
|
||||
if len(sys.argv) >= 2 and sys.argv[1] in ('version', '--version'): # pragma: no cover
|
||||
sys.stdout.write('%s\n' % __version__)
|
||||
# If running as a user without permission to read settings, display an
|
||||
# error message. Allow --help to still work.
|
||||
|
||||
@@ -18,7 +18,6 @@ logger = logging.getLogger('awx.api.authentication')
|
||||
|
||||
|
||||
class LoggedBasicAuthentication(authentication.BasicAuthentication):
|
||||
|
||||
def authenticate(self, request):
|
||||
if not settings.AUTH_BASIC_ENABLED:
|
||||
return
|
||||
@@ -35,22 +34,18 @@ class LoggedBasicAuthentication(authentication.BasicAuthentication):
|
||||
|
||||
|
||||
class SessionAuthentication(authentication.SessionAuthentication):
|
||||
|
||||
def authenticate_header(self, request):
|
||||
return 'Session'
|
||||
|
||||
|
||||
class LoggedOAuth2Authentication(OAuth2Authentication):
|
||||
|
||||
def authenticate(self, request):
|
||||
ret = super(LoggedOAuth2Authentication, self).authenticate(request)
|
||||
if ret:
|
||||
user, token = ret
|
||||
username = user.username if user else '<none>'
|
||||
logger.info(smart_text(
|
||||
u"User {} performed a {} to {} through the API using OAuth 2 token {}.".format(
|
||||
username, request.method, request.path, token.pk
|
||||
)
|
||||
))
|
||||
logger.info(
|
||||
smart_text(u"User {} performed a {} to {} through the API using OAuth 2 token {}.".format(username, request.method, request.path, token.pk))
|
||||
)
|
||||
setattr(user, 'oauth_scopes', [x for x in token.scope.split() if x])
|
||||
return ret
|
||||
|
||||
@@ -38,16 +38,20 @@ register(
|
||||
register(
|
||||
'OAUTH2_PROVIDER',
|
||||
field_class=OAuth2ProviderField,
|
||||
default={'ACCESS_TOKEN_EXPIRE_SECONDS': oauth2_settings.ACCESS_TOKEN_EXPIRE_SECONDS,
|
||||
'AUTHORIZATION_CODE_EXPIRE_SECONDS': oauth2_settings.AUTHORIZATION_CODE_EXPIRE_SECONDS,
|
||||
'REFRESH_TOKEN_EXPIRE_SECONDS': oauth2_settings.REFRESH_TOKEN_EXPIRE_SECONDS},
|
||||
default={
|
||||
'ACCESS_TOKEN_EXPIRE_SECONDS': oauth2_settings.ACCESS_TOKEN_EXPIRE_SECONDS,
|
||||
'AUTHORIZATION_CODE_EXPIRE_SECONDS': oauth2_settings.AUTHORIZATION_CODE_EXPIRE_SECONDS,
|
||||
'REFRESH_TOKEN_EXPIRE_SECONDS': oauth2_settings.REFRESH_TOKEN_EXPIRE_SECONDS,
|
||||
},
|
||||
label=_('OAuth 2 Timeout Settings'),
|
||||
help_text=_('Dictionary for customizing OAuth 2 timeouts, available items are '
|
||||
'`ACCESS_TOKEN_EXPIRE_SECONDS`, the duration of access tokens in the number '
|
||||
'of seconds, `AUTHORIZATION_CODE_EXPIRE_SECONDS`, the duration of '
|
||||
'authorization codes in the number of seconds, and `REFRESH_TOKEN_EXPIRE_SECONDS`, '
|
||||
'the duration of refresh tokens, after expired access tokens, '
|
||||
'in the number of seconds.'),
|
||||
help_text=_(
|
||||
'Dictionary for customizing OAuth 2 timeouts, available items are '
|
||||
'`ACCESS_TOKEN_EXPIRE_SECONDS`, the duration of access tokens in the number '
|
||||
'of seconds, `AUTHORIZATION_CODE_EXPIRE_SECONDS`, the duration of '
|
||||
'authorization codes in the number of seconds, and `REFRESH_TOKEN_EXPIRE_SECONDS`, '
|
||||
'the duration of refresh tokens, after expired access tokens, '
|
||||
'in the number of seconds.'
|
||||
),
|
||||
category=_('Authentication'),
|
||||
category_slug='authentication',
|
||||
unit=_('seconds'),
|
||||
@@ -57,10 +61,12 @@ register(
|
||||
field_class=fields.BooleanField,
|
||||
default=False,
|
||||
label=_('Allow External Users to Create OAuth2 Tokens'),
|
||||
help_text=_('For security reasons, users from external auth providers (LDAP, SAML, '
|
||||
'SSO, Radius, and others) are not allowed to create OAuth2 tokens. '
|
||||
'To change this behavior, enable this setting. Existing tokens will '
|
||||
'not be deleted when this setting is toggled off.'),
|
||||
help_text=_(
|
||||
'For security reasons, users from external auth providers (LDAP, SAML, '
|
||||
'SSO, Radius, and others) are not allowed to create OAuth2 tokens. '
|
||||
'To change this behavior, enable this setting. Existing tokens will '
|
||||
'not be deleted when this setting is toggled off.'
|
||||
),
|
||||
category=_('Authentication'),
|
||||
category_slug='authentication',
|
||||
)
|
||||
@@ -71,8 +77,7 @@ register(
|
||||
required=False,
|
||||
default='',
|
||||
label=_('Login redirect override URL'),
|
||||
help_text=_('URL to which unauthorized users will be redirected to log in. '
|
||||
'If blank, users will be sent to the Tower login page.'),
|
||||
help_text=_('URL to which unauthorized users will be redirected to log in. If blank, users will be sent to the Tower login page.'),
|
||||
category=_('Authentication'),
|
||||
category_slug='authentication',
|
||||
)
|
||||
|
||||
@@ -16,7 +16,4 @@ class ActiveJobConflict(ValidationError):
|
||||
# turn everything in self.detail into string by using force_text.
|
||||
# Declare detail afterwards circumvent this behavior.
|
||||
super(ActiveJobConflict, self).__init__()
|
||||
self.detail = {
|
||||
"error": _("Resource is being used by running jobs."),
|
||||
"active_jobs": active_jobs
|
||||
}
|
||||
self.detail = {"error": _("Resource is being used by running jobs."), "active_jobs": active_jobs}
|
||||
|
||||
@@ -16,10 +16,10 @@ __all__ = ['BooleanNullField', 'CharNullField', 'ChoiceNullField', 'VerbatimFiel
|
||||
|
||||
|
||||
class NullFieldMixin(object):
|
||||
'''
|
||||
"""
|
||||
Mixin to prevent shortcutting validation when we want to allow null input,
|
||||
but coerce the resulting value to another type.
|
||||
'''
|
||||
"""
|
||||
|
||||
def validate_empty_values(self, data):
|
||||
(is_empty_value, data) = super(NullFieldMixin, self).validate_empty_values(data)
|
||||
@@ -29,18 +29,18 @@ class NullFieldMixin(object):
|
||||
|
||||
|
||||
class BooleanNullField(NullFieldMixin, serializers.NullBooleanField):
|
||||
'''
|
||||
"""
|
||||
Custom boolean field that allows null and empty string as False values.
|
||||
'''
|
||||
"""
|
||||
|
||||
def to_internal_value(self, data):
|
||||
return bool(super(BooleanNullField, self).to_internal_value(data))
|
||||
|
||||
|
||||
class CharNullField(NullFieldMixin, serializers.CharField):
|
||||
'''
|
||||
"""
|
||||
Custom char field that allows null as input and coerces to an empty string.
|
||||
'''
|
||||
"""
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
kwargs['allow_null'] = True
|
||||
@@ -51,9 +51,9 @@ class CharNullField(NullFieldMixin, serializers.CharField):
|
||||
|
||||
|
||||
class ChoiceNullField(NullFieldMixin, serializers.ChoiceField):
|
||||
'''
|
||||
"""
|
||||
Custom choice field that allows null as input and coerces to an empty string.
|
||||
'''
|
||||
"""
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
kwargs['allow_null'] = True
|
||||
@@ -64,9 +64,9 @@ class ChoiceNullField(NullFieldMixin, serializers.ChoiceField):
|
||||
|
||||
|
||||
class VerbatimField(serializers.Field):
|
||||
'''
|
||||
"""
|
||||
Custom field that passes the value through without changes.
|
||||
'''
|
||||
"""
|
||||
|
||||
def to_internal_value(self, data):
|
||||
return data
|
||||
@@ -77,22 +77,19 @@ class VerbatimField(serializers.Field):
|
||||
|
||||
class OAuth2ProviderField(fields.DictField):
|
||||
|
||||
default_error_messages = {
|
||||
'invalid_key_names': _('Invalid key names: {invalid_key_names}'),
|
||||
}
|
||||
default_error_messages = {'invalid_key_names': _('Invalid key names: {invalid_key_names}')}
|
||||
valid_key_names = {'ACCESS_TOKEN_EXPIRE_SECONDS', 'AUTHORIZATION_CODE_EXPIRE_SECONDS', 'REFRESH_TOKEN_EXPIRE_SECONDS'}
|
||||
child = fields.IntegerField(min_value=1)
|
||||
|
||||
def to_internal_value(self, data):
|
||||
data = super(OAuth2ProviderField, self).to_internal_value(data)
|
||||
invalid_flags = (set(data.keys()) - self.valid_key_names)
|
||||
invalid_flags = set(data.keys()) - self.valid_key_names
|
||||
if invalid_flags:
|
||||
self.fail('invalid_key_names', invalid_key_names=', '.join(list(invalid_flags)))
|
||||
return data
|
||||
|
||||
|
||||
class DeprecatedCredentialField(serializers.IntegerField):
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
kwargs['allow_null'] = True
|
||||
kwargs['default'] = None
|
||||
|
||||
@@ -27,9 +27,9 @@ from awx.main.utils.db import get_all_field_names
|
||||
|
||||
|
||||
class TypeFilterBackend(BaseFilterBackend):
|
||||
'''
|
||||
"""
|
||||
Filter on type field now returned with all objects.
|
||||
'''
|
||||
"""
|
||||
|
||||
def filter_queryset(self, request, queryset, view):
|
||||
try:
|
||||
@@ -64,7 +64,7 @@ class TypeFilterBackend(BaseFilterBackend):
|
||||
|
||||
|
||||
def get_fields_from_path(model, path):
|
||||
'''
|
||||
"""
|
||||
Given a Django ORM lookup path (possibly over multiple models)
|
||||
Returns the fields in the line, and also the revised lookup path
|
||||
ex., given
|
||||
@@ -73,7 +73,7 @@ def get_fields_from_path(model, path):
|
||||
returns tuple of fields traversed as well and a corrected path,
|
||||
for special cases we do substitutions
|
||||
([<IntegerField for timeout>], 'project__timeout')
|
||||
'''
|
||||
"""
|
||||
# Store of all the fields used to detect repeats
|
||||
field_list = []
|
||||
new_parts = []
|
||||
@@ -82,12 +82,9 @@ def get_fields_from_path(model, path):
|
||||
raise ParseError(_('No related model for field {}.').format(name))
|
||||
# HACK: Make project and inventory source filtering by old field names work for backwards compatibility.
|
||||
if model._meta.object_name in ('Project', 'InventorySource'):
|
||||
name = {
|
||||
'current_update': 'current_job',
|
||||
'last_update': 'last_job',
|
||||
'last_update_failed': 'last_job_failed',
|
||||
'last_updated': 'last_job_run',
|
||||
}.get(name, name)
|
||||
name = {'current_update': 'current_job', 'last_update': 'last_job', 'last_update_failed': 'last_job_failed', 'last_updated': 'last_job_run'}.get(
|
||||
name, name
|
||||
)
|
||||
|
||||
if name == 'type' and 'polymorphic_ctype' in get_all_field_names(model):
|
||||
name = 'polymorphic_ctype'
|
||||
@@ -121,28 +118,42 @@ def get_fields_from_path(model, path):
|
||||
|
||||
|
||||
def get_field_from_path(model, path):
|
||||
'''
|
||||
"""
|
||||
Given a Django ORM lookup path (possibly over multiple models)
|
||||
Returns the last field in the line, and the revised lookup path
|
||||
ex.
|
||||
(<IntegerField for timeout>, 'project__timeout')
|
||||
'''
|
||||
"""
|
||||
field_list, new_path = get_fields_from_path(model, path)
|
||||
return (field_list[-1], new_path)
|
||||
|
||||
|
||||
class FieldLookupBackend(BaseFilterBackend):
|
||||
'''
|
||||
"""
|
||||
Filter using field lookups provided via query string parameters.
|
||||
'''
|
||||
"""
|
||||
|
||||
RESERVED_NAMES = ('page', 'page_size', 'format', 'order', 'order_by',
|
||||
'search', 'type', 'host_filter', 'count_disabled', 'no_truncate')
|
||||
RESERVED_NAMES = ('page', 'page_size', 'format', 'order', 'order_by', 'search', 'type', 'host_filter', 'count_disabled', 'no_truncate')
|
||||
|
||||
SUPPORTED_LOOKUPS = ('exact', 'iexact', 'contains', 'icontains',
|
||||
'startswith', 'istartswith', 'endswith', 'iendswith',
|
||||
'regex', 'iregex', 'gt', 'gte', 'lt', 'lte', 'in',
|
||||
'isnull', 'search')
|
||||
SUPPORTED_LOOKUPS = (
|
||||
'exact',
|
||||
'iexact',
|
||||
'contains',
|
||||
'icontains',
|
||||
'startswith',
|
||||
'istartswith',
|
||||
'endswith',
|
||||
'iendswith',
|
||||
'regex',
|
||||
'iregex',
|
||||
'gt',
|
||||
'gte',
|
||||
'lt',
|
||||
'lte',
|
||||
'in',
|
||||
'isnull',
|
||||
'search',
|
||||
)
|
||||
|
||||
# A list of fields that we know can be filtered on without the possiblity
|
||||
# of introducing duplicates
|
||||
@@ -189,10 +200,7 @@ class FieldLookupBackend(BaseFilterBackend):
|
||||
try:
|
||||
return self.to_python_related(value)
|
||||
except ValueError:
|
||||
raise ParseError(_('Invalid {field_name} id: {field_id}').format(
|
||||
field_name=getattr(field, 'name', 'related field'),
|
||||
field_id=value)
|
||||
)
|
||||
raise ParseError(_('Invalid {field_name} id: {field_id}').format(field_name=getattr(field, 'name', 'related field'), field_id=value))
|
||||
else:
|
||||
return field.to_python(value)
|
||||
|
||||
@@ -205,13 +213,13 @@ class FieldLookupBackend(BaseFilterBackend):
|
||||
field_list, new_lookup = self.get_fields_from_lookup(model, lookup)
|
||||
field = field_list[-1]
|
||||
|
||||
needs_distinct = (not all(isinstance(f, self.NO_DUPLICATES_ALLOW_LIST) for f in field_list))
|
||||
needs_distinct = not all(isinstance(f, self.NO_DUPLICATES_ALLOW_LIST) for f in field_list)
|
||||
|
||||
# Type names are stored without underscores internally, but are presented and
|
||||
# and serialized over the API containing underscores so we remove `_`
|
||||
# for polymorphic_ctype__model lookups.
|
||||
if new_lookup.startswith('polymorphic_ctype__model'):
|
||||
value = value.replace('_','')
|
||||
value = value.replace('_', '')
|
||||
elif new_lookup.endswith('__isnull'):
|
||||
value = to_python_boolean(value)
|
||||
elif new_lookup.endswith('__in'):
|
||||
@@ -329,24 +337,20 @@ class FieldLookupBackend(BaseFilterBackend):
|
||||
args = []
|
||||
for n, k, v in and_filters:
|
||||
if n:
|
||||
args.append(~Q(**{k:v}))
|
||||
args.append(~Q(**{k: v}))
|
||||
else:
|
||||
args.append(Q(**{k:v}))
|
||||
args.append(Q(**{k: v}))
|
||||
for role_name in role_filters:
|
||||
if not hasattr(queryset.model, 'accessible_pk_qs'):
|
||||
raise ParseError(_(
|
||||
'Cannot apply role_level filter to this list because its model '
|
||||
'does not use roles for access control.'))
|
||||
args.append(
|
||||
Q(pk__in=queryset.model.accessible_pk_qs(request.user, role_name))
|
||||
)
|
||||
raise ParseError(_('Cannot apply role_level filter to this list because its model ' 'does not use roles for access control.'))
|
||||
args.append(Q(pk__in=queryset.model.accessible_pk_qs(request.user, role_name)))
|
||||
if or_filters:
|
||||
q = Q()
|
||||
for n,k,v in or_filters:
|
||||
for n, k, v in or_filters:
|
||||
if n:
|
||||
q |= ~Q(**{k:v})
|
||||
q |= ~Q(**{k: v})
|
||||
else:
|
||||
q |= Q(**{k:v})
|
||||
q |= Q(**{k: v})
|
||||
args.append(q)
|
||||
if search_filters and search_filter_relation == 'OR':
|
||||
q = Q()
|
||||
@@ -360,11 +364,11 @@ class FieldLookupBackend(BaseFilterBackend):
|
||||
for constrain in constrains:
|
||||
q_chain |= Q(**{constrain: term})
|
||||
queryset = queryset.filter(q_chain)
|
||||
for n,k,v in chain_filters:
|
||||
for n, k, v in chain_filters:
|
||||
if n:
|
||||
q = ~Q(**{k:v})
|
||||
q = ~Q(**{k: v})
|
||||
else:
|
||||
q = Q(**{k:v})
|
||||
q = Q(**{k: v})
|
||||
queryset = queryset.filter(q)
|
||||
queryset = queryset.filter(*args)
|
||||
if needs_distinct:
|
||||
@@ -377,9 +381,9 @@ class FieldLookupBackend(BaseFilterBackend):
|
||||
|
||||
|
||||
class OrderByBackend(BaseFilterBackend):
|
||||
'''
|
||||
"""
|
||||
Filter to apply ordering based on query string parameters.
|
||||
'''
|
||||
"""
|
||||
|
||||
def filter_queryset(self, request, queryset, view):
|
||||
try:
|
||||
|
||||
@@ -35,55 +35,50 @@ from rest_framework.negotiation import DefaultContentNegotiation
|
||||
|
||||
# AWX
|
||||
from awx.api.filters import FieldLookupBackend
|
||||
from awx.main.models import (
|
||||
UnifiedJob, UnifiedJobTemplate, User, Role, Credential,
|
||||
WorkflowJobTemplateNode, WorkflowApprovalTemplate
|
||||
)
|
||||
from awx.main.models import UnifiedJob, UnifiedJobTemplate, User, Role, Credential, WorkflowJobTemplateNode, WorkflowApprovalTemplate
|
||||
from awx.main.access import access_registry
|
||||
from awx.main.utils import (
|
||||
camelcase_to_underscore,
|
||||
get_search_fields,
|
||||
getattrd,
|
||||
get_object_or_400,
|
||||
decrypt_field,
|
||||
get_awx_version,
|
||||
)
|
||||
from awx.main.utils import camelcase_to_underscore, get_search_fields, getattrd, get_object_or_400, decrypt_field, get_awx_version
|
||||
from awx.main.utils.db import get_all_field_names
|
||||
from awx.main.views import ApiErrorView
|
||||
from awx.api.serializers import ResourceAccessListElementSerializer, CopySerializer, UserSerializer
|
||||
from awx.api.versioning import URLPathVersioning
|
||||
from awx.api.metadata import SublistAttachDetatchMetadata, Metadata
|
||||
|
||||
__all__ = ['APIView', 'GenericAPIView', 'ListAPIView', 'SimpleListAPIView',
|
||||
'ListCreateAPIView', 'SubListAPIView', 'SubListCreateAPIView',
|
||||
'SubListDestroyAPIView',
|
||||
'SubListCreateAttachDetachAPIView', 'RetrieveAPIView',
|
||||
'RetrieveUpdateAPIView', 'RetrieveDestroyAPIView',
|
||||
'RetrieveUpdateDestroyAPIView',
|
||||
'SubDetailAPIView',
|
||||
'ResourceAccessList',
|
||||
'ParentMixin',
|
||||
'DeleteLastUnattachLabelMixin',
|
||||
'SubListAttachDetachAPIView',
|
||||
'CopyAPIView', 'BaseUsersList',]
|
||||
__all__ = [
|
||||
'APIView',
|
||||
'GenericAPIView',
|
||||
'ListAPIView',
|
||||
'SimpleListAPIView',
|
||||
'ListCreateAPIView',
|
||||
'SubListAPIView',
|
||||
'SubListCreateAPIView',
|
||||
'SubListDestroyAPIView',
|
||||
'SubListCreateAttachDetachAPIView',
|
||||
'RetrieveAPIView',
|
||||
'RetrieveUpdateAPIView',
|
||||
'RetrieveDestroyAPIView',
|
||||
'RetrieveUpdateDestroyAPIView',
|
||||
'SubDetailAPIView',
|
||||
'ResourceAccessList',
|
||||
'ParentMixin',
|
||||
'DeleteLastUnattachLabelMixin',
|
||||
'SubListAttachDetachAPIView',
|
||||
'CopyAPIView',
|
||||
'BaseUsersList',
|
||||
]
|
||||
|
||||
logger = logging.getLogger('awx.api.generics')
|
||||
analytics_logger = logging.getLogger('awx.analytics.performance')
|
||||
|
||||
|
||||
class LoggedLoginView(auth_views.LoginView):
|
||||
|
||||
def get(self, request, *args, **kwargs):
|
||||
# The django.auth.contrib login form doesn't perform the content
|
||||
# negotiation we've come to expect from DRF; add in code to catch
|
||||
# situations where Accept != text/html (or */*) and reply with
|
||||
# an HTTP 406
|
||||
try:
|
||||
DefaultContentNegotiation().select_renderer(
|
||||
request,
|
||||
[StaticHTMLRenderer],
|
||||
'html'
|
||||
)
|
||||
DefaultContentNegotiation().select_renderer(request, [StaticHTMLRenderer], 'html')
|
||||
except NotAcceptable:
|
||||
resp = Response(status=status.HTTP_406_NOT_ACCEPTABLE)
|
||||
resp.accepted_renderer = StaticHTMLRenderer()
|
||||
@@ -96,7 +91,7 @@ class LoggedLoginView(auth_views.LoginView):
|
||||
ret = super(LoggedLoginView, self).post(request, *args, **kwargs)
|
||||
current_user = getattr(request, 'user', None)
|
||||
if request.user.is_authenticated:
|
||||
logger.info(smart_text(u"User {} logged in from {}".format(self.request.user.username,request.META.get('REMOTE_ADDR', None))))
|
||||
logger.info(smart_text(u"User {} logged in from {}".format(self.request.user.username, request.META.get('REMOTE_ADDR', None))))
|
||||
ret.set_cookie('userLoggedIn', 'true')
|
||||
current_user = UserSerializer(self.request.user)
|
||||
current_user = smart_text(JSONRenderer().render(current_user.data))
|
||||
@@ -106,29 +101,27 @@ class LoggedLoginView(auth_views.LoginView):
|
||||
return ret
|
||||
else:
|
||||
if 'username' in self.request.POST:
|
||||
logger.warn(smart_text(u"Login failed for user {} from {}".format(self.request.POST.get('username'),request.META.get('REMOTE_ADDR', None))))
|
||||
logger.warn(smart_text(u"Login failed for user {} from {}".format(self.request.POST.get('username'), request.META.get('REMOTE_ADDR', None))))
|
||||
ret.status_code = 401
|
||||
return ret
|
||||
|
||||
|
||||
class LoggedLogoutView(auth_views.LogoutView):
|
||||
|
||||
def dispatch(self, request, *args, **kwargs):
|
||||
original_user = getattr(request, 'user', None)
|
||||
ret = super(LoggedLogoutView, self).dispatch(request, *args, **kwargs)
|
||||
current_user = getattr(request, 'user', None)
|
||||
ret.set_cookie('userLoggedIn', 'false')
|
||||
if (not current_user or not getattr(current_user, 'pk', True)) \
|
||||
and current_user != original_user:
|
||||
if (not current_user or not getattr(current_user, 'pk', True)) and current_user != original_user:
|
||||
logger.info("User {} logged out.".format(original_user.username))
|
||||
return ret
|
||||
|
||||
|
||||
def get_view_description(view, html=False):
|
||||
'''Wrapper around REST framework get_view_description() to continue
|
||||
"""Wrapper around REST framework get_view_description() to continue
|
||||
to support our historical div.
|
||||
|
||||
'''
|
||||
"""
|
||||
desc = views.get_view_description(view, html=html)
|
||||
if html:
|
||||
desc = '<div class="description">%s</div>' % desc
|
||||
@@ -138,6 +131,7 @@ def get_view_description(view, html=False):
|
||||
def get_default_schema():
|
||||
if settings.SETTINGS_MODULE == 'awx.settings.development':
|
||||
from awx.api.swagger import AutoSchema
|
||||
|
||||
return AutoSchema()
|
||||
else:
|
||||
return views.APIView.schema
|
||||
@@ -149,21 +143,23 @@ class APIView(views.APIView):
|
||||
versioning_class = URLPathVersioning
|
||||
|
||||
def initialize_request(self, request, *args, **kwargs):
|
||||
'''
|
||||
"""
|
||||
Store the Django REST Framework Request object as an attribute on the
|
||||
normal Django request, store time the request started.
|
||||
'''
|
||||
"""
|
||||
self.time_started = time.time()
|
||||
if getattr(settings, 'SQL_DEBUG', False):
|
||||
self.queries_before = len(connection.queries)
|
||||
|
||||
# If there are any custom headers in REMOTE_HOST_HEADERS, make sure
|
||||
# they respect the allowed proxy list
|
||||
if all([
|
||||
settings.PROXY_IP_ALLOWED_LIST,
|
||||
request.environ.get('REMOTE_ADDR') not in settings.PROXY_IP_ALLOWED_LIST,
|
||||
request.environ.get('REMOTE_HOST') not in settings.PROXY_IP_ALLOWED_LIST
|
||||
]):
|
||||
if all(
|
||||
[
|
||||
settings.PROXY_IP_ALLOWED_LIST,
|
||||
request.environ.get('REMOTE_ADDR') not in settings.PROXY_IP_ALLOWED_LIST,
|
||||
request.environ.get('REMOTE_HOST') not in settings.PROXY_IP_ALLOWED_LIST,
|
||||
]
|
||||
):
|
||||
for custom_header in settings.REMOTE_HOST_HEADERS:
|
||||
if custom_header.startswith('HTTP_'):
|
||||
request.environ.pop(custom_header, None)
|
||||
@@ -178,17 +174,19 @@ class APIView(views.APIView):
|
||||
request.drf_request_user = None
|
||||
self.__init_request_error__ = exc
|
||||
except UnsupportedMediaType as exc:
|
||||
exc.detail = _('You did not use correct Content-Type in your HTTP request. '
|
||||
'If you are using our REST API, the Content-Type must be application/json')
|
||||
exc.detail = _(
|
||||
'You did not use correct Content-Type in your HTTP request. ' 'If you are using our REST API, the Content-Type must be application/json'
|
||||
)
|
||||
self.__init_request_error__ = exc
|
||||
return drf_request
|
||||
|
||||
def finalize_response(self, request, response, *args, **kwargs):
|
||||
'''
|
||||
"""
|
||||
Log warning for 400 requests. Add header with elapsed time.
|
||||
'''
|
||||
"""
|
||||
from awx.main.utils import get_licenser
|
||||
from awx.main.utils.licensing import OpenLicense
|
||||
|
||||
#
|
||||
# If the URL was rewritten, and we get a 404, we should entirely
|
||||
# replace the view in the request context with an ApiErrorView()
|
||||
@@ -212,8 +210,12 @@ class APIView(views.APIView):
|
||||
return response
|
||||
|
||||
if response.status_code >= 400:
|
||||
status_msg = "status %s received by user %s attempting to access %s from %s" % \
|
||||
(response.status_code, request.user, request.path, request.META.get('REMOTE_ADDR', None))
|
||||
status_msg = "status %s received by user %s attempting to access %s from %s" % (
|
||||
response.status_code,
|
||||
request.user,
|
||||
request.path,
|
||||
request.META.get('REMOTE_ADDR', None),
|
||||
)
|
||||
if hasattr(self, '__init_request_error__'):
|
||||
response = self.handle_exception(self.__init_request_error__)
|
||||
if response.status_code == 401:
|
||||
@@ -225,7 +227,7 @@ class APIView(views.APIView):
|
||||
time_started = getattr(self, 'time_started', None)
|
||||
response['X-API-Product-Version'] = get_awx_version()
|
||||
response['X-API-Product-Name'] = 'AWX' if isinstance(get_licenser(), OpenLicense) else 'Red Hat Ansible Tower'
|
||||
|
||||
|
||||
response['X-API-Node'] = settings.CLUSTER_HOST_ID
|
||||
if time_started:
|
||||
time_elapsed = time.time() - self.time_started
|
||||
@@ -311,18 +313,12 @@ class APIView(views.APIView):
|
||||
return data
|
||||
|
||||
def determine_version(self, request, *args, **kwargs):
|
||||
return (
|
||||
getattr(request, 'version', None),
|
||||
getattr(request, 'versioning_scheme', None),
|
||||
)
|
||||
return (getattr(request, 'version', None), getattr(request, 'versioning_scheme', None))
|
||||
|
||||
def dispatch(self, request, *args, **kwargs):
|
||||
if self.versioning_class is not None:
|
||||
scheme = self.versioning_class()
|
||||
request.version, request.versioning_scheme = (
|
||||
scheme.determine_version(request, *args, **kwargs),
|
||||
scheme
|
||||
)
|
||||
request.version, request.versioning_scheme = (scheme.determine_version(request, *args, **kwargs), scheme)
|
||||
if 'version' in kwargs:
|
||||
kwargs.pop('version')
|
||||
return super(APIView, self).dispatch(request, *args, **kwargs)
|
||||
@@ -378,25 +374,22 @@ class GenericAPIView(generics.GenericAPIView, APIView):
|
||||
d = super(GenericAPIView, self).get_description_context()
|
||||
if hasattr(self.model, "_meta"):
|
||||
if hasattr(self.model._meta, "verbose_name"):
|
||||
d.update({
|
||||
'model_verbose_name': smart_text(self.model._meta.verbose_name),
|
||||
'model_verbose_name_plural': smart_text(self.model._meta.verbose_name_plural),
|
||||
})
|
||||
d.update(
|
||||
{
|
||||
'model_verbose_name': smart_text(self.model._meta.verbose_name),
|
||||
'model_verbose_name_plural': smart_text(self.model._meta.verbose_name_plural),
|
||||
}
|
||||
)
|
||||
serializer = self.get_serializer()
|
||||
metadata = self.metadata_class()
|
||||
metadata.request = self.request
|
||||
for method, key in [
|
||||
('GET', 'serializer_fields'),
|
||||
('POST', 'serializer_create_fields'),
|
||||
('PUT', 'serializer_update_fields')
|
||||
]:
|
||||
for method, key in [('GET', 'serializer_fields'), ('POST', 'serializer_create_fields'), ('PUT', 'serializer_update_fields')]:
|
||||
d[key] = metadata.get_serializer_info(serializer, method=method)
|
||||
d['settings'] = settings
|
||||
return d
|
||||
|
||||
|
||||
class SimpleListAPIView(generics.ListAPIView, GenericAPIView):
|
||||
|
||||
def get_queryset(self):
|
||||
return self.request.user.get_queryset(self.model)
|
||||
|
||||
@@ -413,9 +406,7 @@ class ListAPIView(generics.ListAPIView, GenericAPIView):
|
||||
else:
|
||||
order_field = 'name'
|
||||
d = super(ListAPIView, self).get_description_context()
|
||||
d.update({
|
||||
'order_field': order_field,
|
||||
})
|
||||
d.update({'order_field': order_field})
|
||||
return d
|
||||
|
||||
@property
|
||||
@@ -426,9 +417,13 @@ class ListAPIView(generics.ListAPIView, GenericAPIView):
|
||||
def related_search_fields(self):
|
||||
def skip_related_name(name):
|
||||
return (
|
||||
name is None or name.endswith('_role') or name.startswith('_') or
|
||||
name.startswith('deprecated_') or name.endswith('_set') or
|
||||
name == 'polymorphic_ctype')
|
||||
name is None
|
||||
or name.endswith('_role')
|
||||
or name.startswith('_')
|
||||
or name.startswith('deprecated_')
|
||||
or name.endswith('_set')
|
||||
or name == 'polymorphic_ctype'
|
||||
)
|
||||
|
||||
fields = set([])
|
||||
for field in self.model._meta.fields:
|
||||
@@ -482,9 +477,7 @@ class ParentMixin(object):
|
||||
def get_parent_object(self):
|
||||
if self.parent_object is not None:
|
||||
return self.parent_object
|
||||
parent_filter = {
|
||||
self.lookup_field: self.kwargs.get(self.lookup_field, None),
|
||||
}
|
||||
parent_filter = {self.lookup_field: self.kwargs.get(self.lookup_field, None)}
|
||||
self.parent_object = get_object_or_404(self.parent_model, **parent_filter)
|
||||
return self.parent_object
|
||||
|
||||
@@ -513,10 +506,12 @@ class SubListAPIView(ParentMixin, ListAPIView):
|
||||
|
||||
def get_description_context(self):
|
||||
d = super(SubListAPIView, self).get_description_context()
|
||||
d.update({
|
||||
'parent_model_verbose_name': smart_text(self.parent_model._meta.verbose_name),
|
||||
'parent_model_verbose_name_plural': smart_text(self.parent_model._meta.verbose_name_plural),
|
||||
})
|
||||
d.update(
|
||||
{
|
||||
'parent_model_verbose_name': smart_text(self.parent_model._meta.verbose_name),
|
||||
'parent_model_verbose_name_plural': smart_text(self.parent_model._meta.verbose_name_plural),
|
||||
}
|
||||
)
|
||||
return d
|
||||
|
||||
def get_queryset(self):
|
||||
@@ -531,7 +526,6 @@ class SubListAPIView(ParentMixin, ListAPIView):
|
||||
|
||||
|
||||
class DestroyAPIView(generics.DestroyAPIView):
|
||||
|
||||
def has_delete_permission(self, obj):
|
||||
return self.request.user.can_access(self.model, 'delete', obj)
|
||||
|
||||
@@ -545,12 +539,12 @@ class SubListDestroyAPIView(DestroyAPIView, SubListAPIView):
|
||||
"""
|
||||
Concrete view for deleting everything related by `relationship`.
|
||||
"""
|
||||
|
||||
check_sub_obj_permission = True
|
||||
|
||||
def destroy(self, request, *args, **kwargs):
|
||||
instance_list = self.get_queryset()
|
||||
if (not self.check_sub_obj_permission and
|
||||
not request.user.can_access(self.parent_model, 'delete', self.get_parent_object())):
|
||||
if not self.check_sub_obj_permission and not request.user.can_access(self.parent_model, 'delete', self.get_parent_object()):
|
||||
raise PermissionDenied()
|
||||
self.perform_list_destroy(instance_list)
|
||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||
@@ -574,9 +568,7 @@ class SubListCreateAPIView(SubListAPIView, ListCreateAPIView):
|
||||
|
||||
def get_description_context(self):
|
||||
d = super(SubListCreateAPIView, self).get_description_context()
|
||||
d.update({
|
||||
'parent_key': getattr(self, 'parent_key', None),
|
||||
})
|
||||
d.update({'parent_key': getattr(self, 'parent_key', None)})
|
||||
return d
|
||||
|
||||
def get_queryset(self):
|
||||
@@ -610,8 +602,7 @@ class SubListCreateAPIView(SubListAPIView, ListCreateAPIView):
|
||||
# attempt to deserialize the object
|
||||
serializer = self.get_serializer(data=data)
|
||||
if not serializer.is_valid():
|
||||
return Response(serializer.errors,
|
||||
status=status.HTTP_400_BAD_REQUEST)
|
||||
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
# Verify we have permission to add the object as given.
|
||||
if not request.user.can_access(self.model, 'add', serializer.validated_data):
|
||||
@@ -635,9 +626,7 @@ class SubListCreateAttachDetachAPIView(SubListCreateAPIView):
|
||||
|
||||
def get_description_context(self):
|
||||
d = super(SubListCreateAttachDetachAPIView, self).get_description_context()
|
||||
d.update({
|
||||
"has_attach": True,
|
||||
})
|
||||
d.update({"has_attach": True})
|
||||
return d
|
||||
|
||||
def attach_validate(self, request):
|
||||
@@ -675,9 +664,7 @@ class SubListCreateAttachDetachAPIView(SubListCreateAPIView):
|
||||
sub = get_object_or_400(self.model, pk=sub_id)
|
||||
|
||||
# Verify we have permission to attach.
|
||||
if not request.user.can_access(self.parent_model, 'attach', parent, sub,
|
||||
self.relationship, data,
|
||||
skip_sub_obj_read_check=created):
|
||||
if not request.user.can_access(self.parent_model, 'attach', parent, sub, self.relationship, data, skip_sub_obj_read_check=created):
|
||||
raise PermissionDenied()
|
||||
|
||||
# Verify that the relationship to be added is valid.
|
||||
@@ -716,8 +703,7 @@ class SubListCreateAttachDetachAPIView(SubListCreateAPIView):
|
||||
relationship = getattrd(parent, self.relationship)
|
||||
sub = get_object_or_400(self.model, pk=sub_id)
|
||||
|
||||
if not request.user.can_access(self.parent_model, 'unattach', parent,
|
||||
sub, self.relationship, request.data):
|
||||
if not request.user.can_access(self.parent_model, 'unattach', parent, sub, self.relationship, request.data):
|
||||
raise PermissionDenied()
|
||||
|
||||
if parent_key:
|
||||
@@ -735,28 +721,24 @@ class SubListCreateAttachDetachAPIView(SubListCreateAPIView):
|
||||
|
||||
def post(self, request, *args, **kwargs):
|
||||
if not isinstance(request.data, dict):
|
||||
return Response('invalid type for post data',
|
||||
status=status.HTTP_400_BAD_REQUEST)
|
||||
return Response('invalid type for post data', status=status.HTTP_400_BAD_REQUEST)
|
||||
if 'disassociate' in request.data:
|
||||
return self.unattach(request, *args, **kwargs)
|
||||
else:
|
||||
return self.attach(request, *args, **kwargs)
|
||||
|
||||
|
||||
|
||||
class SubListAttachDetachAPIView(SubListCreateAttachDetachAPIView):
|
||||
'''
|
||||
"""
|
||||
Derived version of SubListCreateAttachDetachAPIView that prohibits creation
|
||||
'''
|
||||
"""
|
||||
|
||||
metadata_class = SublistAttachDetatchMetadata
|
||||
|
||||
def post(self, request, *args, **kwargs):
|
||||
sub_id = request.data.get('id', None)
|
||||
if not sub_id:
|
||||
return Response(
|
||||
dict(msg=_("{} 'id' field is missing.".format(
|
||||
self.model._meta.verbose_name.title()))),
|
||||
status=status.HTTP_400_BAD_REQUEST)
|
||||
return Response(dict(msg=_("{} 'id' field is missing.".format(self.model._meta.verbose_name.title()))), status=status.HTTP_400_BAD_REQUEST)
|
||||
return super(SubListAttachDetachAPIView, self).post(request, *args, **kwargs)
|
||||
|
||||
def update_raw_data(self, data):
|
||||
@@ -768,11 +750,11 @@ class SubListAttachDetachAPIView(SubListCreateAttachDetachAPIView):
|
||||
|
||||
|
||||
class DeleteLastUnattachLabelMixin(object):
|
||||
'''
|
||||
"""
|
||||
Models for which you want the last instance to be deleted from the database
|
||||
when the last disassociate is called should inherit from this class. Further,
|
||||
the model should implement is_detached()
|
||||
'''
|
||||
"""
|
||||
|
||||
def unattach(self, request, *args, **kwargs):
|
||||
(sub_id, res) = super(DeleteLastUnattachLabelMixin, self).unattach_validate(request)
|
||||
@@ -798,7 +780,6 @@ class RetrieveAPIView(generics.RetrieveAPIView, GenericAPIView):
|
||||
|
||||
|
||||
class RetrieveUpdateAPIView(RetrieveAPIView, generics.RetrieveUpdateAPIView):
|
||||
|
||||
def update(self, request, *args, **kwargs):
|
||||
self.update_filter(request, *args, **kwargs)
|
||||
return super(RetrieveUpdateAPIView, self).update(request, *args, **kwargs)
|
||||
@@ -839,6 +820,7 @@ class ResourceAccessList(ParentMixin, ListAPIView):
|
||||
|
||||
def trigger_delayed_deep_copy(*args, **kwargs):
|
||||
from awx.main.tasks import deep_copy_model_obj
|
||||
|
||||
connection.on_commit(lambda: deep_copy_model_obj.delay(*args, **kwargs))
|
||||
|
||||
|
||||
@@ -869,8 +851,7 @@ class CopyAPIView(GenericAPIView):
|
||||
field_val[secret] = decrypt_field(obj, secret)
|
||||
elif isinstance(field_val, dict):
|
||||
for sub_field in field_val:
|
||||
if isinstance(sub_field, str) \
|
||||
and isinstance(field_val[sub_field], str):
|
||||
if isinstance(sub_field, str) and isinstance(field_val[sub_field], str):
|
||||
field_val[sub_field] = decrypt_field(obj, field_name, sub_field)
|
||||
elif isinstance(field_val, str):
|
||||
try:
|
||||
@@ -882,15 +863,11 @@ class CopyAPIView(GenericAPIView):
|
||||
def _build_create_dict(self, obj):
|
||||
ret = {}
|
||||
if self.copy_return_serializer_class:
|
||||
all_fields = Metadata().get_serializer_info(
|
||||
self._get_copy_return_serializer(), method='POST'
|
||||
)
|
||||
all_fields = Metadata().get_serializer_info(self._get_copy_return_serializer(), method='POST')
|
||||
for field_name, field_info in all_fields.items():
|
||||
if not hasattr(obj, field_name) or field_info.get('read_only', True):
|
||||
continue
|
||||
ret[field_name] = CopyAPIView._decrypt_model_field_if_needed(
|
||||
obj, field_name, getattr(obj, field_name)
|
||||
)
|
||||
ret[field_name] = CopyAPIView._decrypt_model_field_if_needed(obj, field_name, getattr(obj, field_name))
|
||||
return ret
|
||||
|
||||
@staticmethod
|
||||
@@ -908,9 +885,11 @@ class CopyAPIView(GenericAPIView):
|
||||
except AttributeError:
|
||||
continue
|
||||
# Adjust copy blocked fields here.
|
||||
if field.name in fields_to_discard or field.name in [
|
||||
'id', 'pk', 'polymorphic_ctype', 'unifiedjobtemplate_ptr', 'created_by', 'modified_by'
|
||||
] or field.name.endswith('_role'):
|
||||
if (
|
||||
field.name in fields_to_discard
|
||||
or field.name in ['id', 'pk', 'polymorphic_ctype', 'unifiedjobtemplate_ptr', 'created_by', 'modified_by']
|
||||
or field.name.endswith('_role')
|
||||
):
|
||||
create_kwargs.pop(field.name, None)
|
||||
continue
|
||||
if field.one_to_many:
|
||||
@@ -926,33 +905,24 @@ class CopyAPIView(GenericAPIView):
|
||||
elif field.name == 'name' and not old_parent:
|
||||
create_kwargs[field.name] = copy_name or field_val + ' copy'
|
||||
elif field.name in fields_to_preserve:
|
||||
create_kwargs[field.name] = CopyAPIView._decrypt_model_field_if_needed(
|
||||
obj, field.name, field_val
|
||||
)
|
||||
create_kwargs[field.name] = CopyAPIView._decrypt_model_field_if_needed(obj, field.name, field_val)
|
||||
|
||||
# WorkflowJobTemplateNodes that represent an approval are *special*;
|
||||
# when we copy them, we actually want to *copy* the UJT they point at
|
||||
# rather than share the template reference between nodes in disparate
|
||||
# workflows
|
||||
if (
|
||||
isinstance(obj, WorkflowJobTemplateNode) and
|
||||
isinstance(getattr(obj, 'unified_job_template'), WorkflowApprovalTemplate)
|
||||
):
|
||||
new_approval_template, sub_objs = CopyAPIView.copy_model_obj(
|
||||
None, None, WorkflowApprovalTemplate,
|
||||
obj.unified_job_template, creater
|
||||
)
|
||||
if isinstance(obj, WorkflowJobTemplateNode) and isinstance(getattr(obj, 'unified_job_template'), WorkflowApprovalTemplate):
|
||||
new_approval_template, sub_objs = CopyAPIView.copy_model_obj(None, None, WorkflowApprovalTemplate, obj.unified_job_template, creater)
|
||||
create_kwargs['unified_job_template'] = new_approval_template
|
||||
|
||||
new_obj = model.objects.create(**create_kwargs)
|
||||
logger.debug('Deep copy: Created new object {}({})'.format(
|
||||
new_obj, model
|
||||
))
|
||||
logger.debug('Deep copy: Created new object {}({})'.format(new_obj, model))
|
||||
# Need to save separatedly because Djang-crum get_current_user would
|
||||
# not work properly in non-request-response-cycle context.
|
||||
new_obj.created_by = creater
|
||||
new_obj.save()
|
||||
from awx.main.signals import disable_activity_stream
|
||||
|
||||
with disable_activity_stream():
|
||||
for m2m in m2m_to_preserve:
|
||||
for related_obj in m2m_to_preserve[m2m].all():
|
||||
@@ -978,8 +948,7 @@ class CopyAPIView(GenericAPIView):
|
||||
for key in create_kwargs:
|
||||
create_kwargs[key] = getattr(create_kwargs[key], 'pk', None) or create_kwargs[key]
|
||||
try:
|
||||
can_copy = request.user.can_access(self.model, 'add', create_kwargs) and \
|
||||
request.user.can_access(self.model, 'copy_related', obj)
|
||||
can_copy = request.user.can_access(self.model, 'add', create_kwargs) and request.user.can_access(self.model, 'copy_related', obj)
|
||||
except PermissionDenied:
|
||||
return Response({'can_copy': False})
|
||||
return Response({'can_copy': can_copy})
|
||||
@@ -998,8 +967,7 @@ class CopyAPIView(GenericAPIView):
|
||||
if not serializer.is_valid():
|
||||
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
||||
new_obj, sub_objs = CopyAPIView.copy_model_obj(
|
||||
None, None, self.model, obj, request.user, create_kwargs=create_kwargs,
|
||||
copy_name=serializer.validated_data.get('name', '')
|
||||
None, None, self.model, obj, request.user, create_kwargs=create_kwargs, copy_name=serializer.validated_data.get('name', '')
|
||||
)
|
||||
if hasattr(new_obj, 'admin_role') and request.user not in new_obj.admin_role.members.all():
|
||||
new_obj.admin_role.members.add(request.user)
|
||||
@@ -1011,13 +979,9 @@ class CopyAPIView(GenericAPIView):
|
||||
cache.set(key, sub_objs, timeout=3600)
|
||||
permission_check_func = None
|
||||
if hasattr(type(self), 'deep_copy_permission_check_func'):
|
||||
permission_check_func = (
|
||||
type(self).__module__, type(self).__name__, 'deep_copy_permission_check_func'
|
||||
)
|
||||
permission_check_func = (type(self).__module__, type(self).__name__, 'deep_copy_permission_check_func')
|
||||
trigger_delayed_deep_copy(
|
||||
self.model.__module__, self.model.__name__,
|
||||
obj.pk, new_obj.pk, request.user.pk, key,
|
||||
permission_check_func=permission_check_func
|
||||
self.model.__module__, self.model.__name__, obj.pk, new_obj.pk, request.user.pk, key, permission_check_func=permission_check_func
|
||||
)
|
||||
serializer = self._get_copy_return_serializer(new_obj)
|
||||
headers = {'Location': new_obj.get_absolute_url(request=request)}
|
||||
@@ -1026,7 +990,7 @@ class CopyAPIView(GenericAPIView):
|
||||
|
||||
class BaseUsersList(SubListCreateAttachDetachAPIView):
|
||||
def post(self, request, *args, **kwargs):
|
||||
ret = super(BaseUsersList, self).post( request, *args, **kwargs)
|
||||
ret = super(BaseUsersList, self).post(request, *args, **kwargs)
|
||||
if ret.status_code != 201:
|
||||
return ret
|
||||
try:
|
||||
|
||||
@@ -24,22 +24,27 @@ from rest_framework.request import clone_request
|
||||
from awx.api.fields import ChoiceNullField
|
||||
from awx.main.fields import JSONField, ImplicitRoleField
|
||||
from awx.main.models import NotificationTemplate
|
||||
from awx.main.scheduler.kubernetes import PodManager
|
||||
from awx.main.tasks import AWXReceptorJob
|
||||
|
||||
|
||||
class Metadata(metadata.SimpleMetadata):
|
||||
|
||||
def get_field_info(self, field):
|
||||
field_info = OrderedDict()
|
||||
field_info['type'] = self.label_lookup[field]
|
||||
field_info['required'] = getattr(field, 'required', False)
|
||||
|
||||
text_attrs = [
|
||||
'read_only', 'label', 'help_text',
|
||||
'min_length', 'max_length',
|
||||
'min_value', 'max_value',
|
||||
'category', 'category_slug',
|
||||
'defined_in_file', 'unit',
|
||||
'read_only',
|
||||
'label',
|
||||
'help_text',
|
||||
'min_length',
|
||||
'max_length',
|
||||
'min_value',
|
||||
'max_value',
|
||||
'category',
|
||||
'category_slug',
|
||||
'defined_in_file',
|
||||
'unit',
|
||||
]
|
||||
|
||||
for attr in text_attrs:
|
||||
@@ -61,8 +66,9 @@ class Metadata(metadata.SimpleMetadata):
|
||||
'type': _('Data type for this {}.'),
|
||||
'url': _('URL for this {}.'),
|
||||
'related': _('Data structure with URLs of related resources.'),
|
||||
'summary_fields': _('Data structure with name/description for related resources. '
|
||||
'The output for some objects may be limited for performance reasons.'),
|
||||
'summary_fields': _(
|
||||
'Data structure with name/description for related resources. ' 'The output for some objects may be limited for performance reasons.'
|
||||
),
|
||||
'created': _('Timestamp when this {} was created.'),
|
||||
'modified': _('Timestamp when this {} was last modified.'),
|
||||
}
|
||||
@@ -101,9 +107,7 @@ class Metadata(metadata.SimpleMetadata):
|
||||
field_info['children'] = self.get_serializer_info(field)
|
||||
|
||||
if not isinstance(field, (RelatedField, ManyRelatedField)) and hasattr(field, 'choices'):
|
||||
choices = [
|
||||
(choice_value, choice_name) for choice_value, choice_name in field.choices.items()
|
||||
]
|
||||
choices = [(choice_value, choice_name) for choice_value, choice_name in field.choices.items()]
|
||||
if not any(choice in ('', None) for choice, _ in choices):
|
||||
if field.allow_blank:
|
||||
choices = [("", "---------")] + choices
|
||||
@@ -131,7 +135,6 @@ class Metadata(metadata.SimpleMetadata):
|
||||
for (notification_type_name, notification_tr_name, notification_type_class) in NotificationTemplate.NOTIFICATION_TYPES:
|
||||
field_info[notification_type_name] = notification_type_class.default_messages
|
||||
|
||||
|
||||
# Update type of fields returned...
|
||||
model_field = None
|
||||
if serializer and hasattr(serializer, 'Meta') and hasattr(serializer.Meta, 'model'):
|
||||
@@ -149,22 +152,19 @@ class Metadata(metadata.SimpleMetadata):
|
||||
field_info['type'] = 'integer'
|
||||
elif field.field_name in ('created', 'modified'):
|
||||
field_info['type'] = 'datetime'
|
||||
elif (
|
||||
RelatedField in field.__class__.__bases__ or
|
||||
isinstance(model_field, ForeignKey)
|
||||
):
|
||||
elif RelatedField in field.__class__.__bases__ or isinstance(model_field, ForeignKey):
|
||||
field_info['type'] = 'id'
|
||||
elif (
|
||||
isinstance(field, JSONField) or
|
||||
isinstance(model_field, JSONField) or
|
||||
isinstance(field, DRFJSONField) or
|
||||
isinstance(getattr(field, 'model_field', None), JSONField) or
|
||||
field.field_name == 'credential_passwords'
|
||||
isinstance(field, JSONField)
|
||||
or isinstance(model_field, JSONField)
|
||||
or isinstance(field, DRFJSONField)
|
||||
or isinstance(getattr(field, 'model_field', None), JSONField)
|
||||
or field.field_name == 'credential_passwords'
|
||||
):
|
||||
field_info['type'] = 'json'
|
||||
elif (
|
||||
isinstance(field, ManyRelatedField) and
|
||||
field.field_name == 'credentials'
|
||||
isinstance(field, ManyRelatedField)
|
||||
and field.field_name == 'credentials'
|
||||
# launch-time credentials
|
||||
):
|
||||
field_info['type'] = 'list_of_ids'
|
||||
@@ -175,10 +175,7 @@ class Metadata(metadata.SimpleMetadata):
|
||||
|
||||
def get_serializer_info(self, serializer, method=None):
|
||||
filterer = getattr(serializer, 'filter_field_metadata', lambda fields, method: fields)
|
||||
return filterer(
|
||||
super(Metadata, self).get_serializer_info(serializer),
|
||||
method
|
||||
)
|
||||
return filterer(super(Metadata, self).get_serializer_info(serializer), method)
|
||||
|
||||
def determine_actions(self, request, view):
|
||||
# Add field information for GET requests (so field names/labels are
|
||||
@@ -209,7 +206,7 @@ class Metadata(metadata.SimpleMetadata):
|
||||
continue
|
||||
|
||||
if field == "pod_spec_override":
|
||||
meta['default'] = PodManager().pod_definition
|
||||
meta['default'] = AWXReceptorJob().pod_definition
|
||||
|
||||
# Add type choices if available from the serializer.
|
||||
if field == 'type' and hasattr(serializer, 'get_type_choices'):
|
||||
@@ -274,6 +271,7 @@ class Metadata(metadata.SimpleMetadata):
|
||||
metadata['object_roles'] = roles
|
||||
|
||||
from rest_framework import generics
|
||||
|
||||
if isinstance(view, generics.ListAPIView) and hasattr(view, 'paginator'):
|
||||
metadata['max_page_size'] = view.paginator.max_page_size
|
||||
|
||||
@@ -293,7 +291,6 @@ class RoleMetadata(Metadata):
|
||||
|
||||
|
||||
class SublistAttachDetatchMetadata(Metadata):
|
||||
|
||||
def determine_actions(self, request, view):
|
||||
actions = super(SublistAttachDetatchMetadata, self).determine_actions(request, view)
|
||||
method = 'POST'
|
||||
|
||||
@@ -3,13 +3,9 @@
|
||||
|
||||
from django.conf.urls import url
|
||||
|
||||
from awx.api.views import (
|
||||
MetricsView
|
||||
)
|
||||
from awx.api.views import MetricsView
|
||||
|
||||
|
||||
urls = [
|
||||
url(r'^$', MetricsView.as_view(), name='metrics_view'),
|
||||
]
|
||||
urls = [url(r'^$', MetricsView.as_view(), name='metrics_view')]
|
||||
|
||||
__all__ = ['urls']
|
||||
|
||||
@@ -10,7 +10,6 @@ from rest_framework.utils.urls import replace_query_param
|
||||
|
||||
|
||||
class DisabledPaginator(DjangoPaginator):
|
||||
|
||||
@property
|
||||
def num_pages(self):
|
||||
return 1
|
||||
@@ -49,8 +48,7 @@ class Pagination(pagination.PageNumberPagination):
|
||||
|
||||
def get_html_context(self):
|
||||
context = super().get_html_context()
|
||||
context['page_links'] = [pl._replace(url=self.cap_page_size(pl.url))
|
||||
for pl in context['page_links']]
|
||||
context['page_links'] = [pl._replace(url=self.cap_page_size(pl.url)) for pl in context['page_links']]
|
||||
|
||||
return context
|
||||
|
||||
|
||||
@@ -15,16 +15,25 @@ from awx.main.utils import get_object_or_400
|
||||
|
||||
logger = logging.getLogger('awx.api.permissions')
|
||||
|
||||
__all__ = ['ModelAccessPermission', 'JobTemplateCallbackPermission', 'VariableDataPermission',
|
||||
'TaskPermission', 'ProjectUpdatePermission', 'InventoryInventorySourcesUpdatePermission',
|
||||
'UserPermission', 'IsSuperUser', 'InstanceGroupTowerPermission', 'WorkflowApprovalPermission']
|
||||
__all__ = [
|
||||
'ModelAccessPermission',
|
||||
'JobTemplateCallbackPermission',
|
||||
'VariableDataPermission',
|
||||
'TaskPermission',
|
||||
'ProjectUpdatePermission',
|
||||
'InventoryInventorySourcesUpdatePermission',
|
||||
'UserPermission',
|
||||
'IsSuperUser',
|
||||
'InstanceGroupTowerPermission',
|
||||
'WorkflowApprovalPermission',
|
||||
]
|
||||
|
||||
|
||||
class ModelAccessPermission(permissions.BasePermission):
|
||||
'''
|
||||
"""
|
||||
Default permissions class to check user access based on the model and
|
||||
request method, optionally verifying the request data.
|
||||
'''
|
||||
"""
|
||||
|
||||
def check_options_permissions(self, request, view, obj=None):
|
||||
return self.check_get_permissions(request, view, obj)
|
||||
@@ -35,8 +44,7 @@ class ModelAccessPermission(permissions.BasePermission):
|
||||
def check_get_permissions(self, request, view, obj=None):
|
||||
if hasattr(view, 'parent_model'):
|
||||
parent_obj = view.get_parent_object()
|
||||
if not check_user_access(request.user, view.parent_model, 'read',
|
||||
parent_obj):
|
||||
if not check_user_access(request.user, view.parent_model, 'read', parent_obj):
|
||||
return False
|
||||
if not obj:
|
||||
return True
|
||||
@@ -45,8 +53,7 @@ class ModelAccessPermission(permissions.BasePermission):
|
||||
def check_post_permissions(self, request, view, obj=None):
|
||||
if hasattr(view, 'parent_model'):
|
||||
parent_obj = view.get_parent_object()
|
||||
if not check_user_access(request.user, view.parent_model, 'read',
|
||||
parent_obj):
|
||||
if not check_user_access(request.user, view.parent_model, 'read', parent_obj):
|
||||
return False
|
||||
if hasattr(view, 'parent_key'):
|
||||
if not check_user_access(request.user, view.model, 'add', {view.parent_key: parent_obj}):
|
||||
@@ -60,10 +67,7 @@ class ModelAccessPermission(permissions.BasePermission):
|
||||
extra_kwargs = {}
|
||||
if view.obj_permission_type == 'admin':
|
||||
extra_kwargs['data'] = {}
|
||||
return check_user_access(
|
||||
request.user, view.model, view.obj_permission_type, obj,
|
||||
**extra_kwargs
|
||||
)
|
||||
return check_user_access(request.user, view.model, view.obj_permission_type, obj, **extra_kwargs)
|
||||
else:
|
||||
if obj:
|
||||
return True
|
||||
@@ -74,8 +78,7 @@ class ModelAccessPermission(permissions.BasePermission):
|
||||
# FIXME: For some reason this needs to return True
|
||||
# because it is first called with obj=None?
|
||||
return True
|
||||
return check_user_access(request.user, view.model, 'change', obj,
|
||||
request.data)
|
||||
return check_user_access(request.user, view.model, 'change', obj, request.data)
|
||||
|
||||
def check_patch_permissions(self, request, view, obj=None):
|
||||
return self.check_put_permissions(request, view, obj)
|
||||
@@ -89,10 +92,10 @@ class ModelAccessPermission(permissions.BasePermission):
|
||||
return check_user_access(request.user, view.model, 'delete', obj)
|
||||
|
||||
def check_permissions(self, request, view, obj=None):
|
||||
'''
|
||||
"""
|
||||
Perform basic permissions checking before delegating to the appropriate
|
||||
method based on the request method.
|
||||
'''
|
||||
"""
|
||||
|
||||
# Don't allow anonymous users. 401, not 403, hence no raised exception.
|
||||
if not request.user or request.user.is_anonymous:
|
||||
@@ -117,9 +120,7 @@ class ModelAccessPermission(permissions.BasePermission):
|
||||
return result
|
||||
|
||||
def has_permission(self, request, view, obj=None):
|
||||
logger.debug('has_permission(user=%s method=%s data=%r, %s, %r)',
|
||||
request.user, request.method, request.data,
|
||||
view.__class__.__name__, obj)
|
||||
logger.debug('has_permission(user=%s method=%s data=%r, %s, %r)', request.user, request.method, request.data, view.__class__.__name__, obj)
|
||||
try:
|
||||
response = self.check_permissions(request, view, obj)
|
||||
except Exception as e:
|
||||
@@ -134,10 +135,10 @@ class ModelAccessPermission(permissions.BasePermission):
|
||||
|
||||
|
||||
class JobTemplateCallbackPermission(ModelAccessPermission):
|
||||
'''
|
||||
"""
|
||||
Permission check used by job template callback view for requests from
|
||||
empheral hosts.
|
||||
'''
|
||||
"""
|
||||
|
||||
def has_permission(self, request, view, obj=None):
|
||||
# If another authentication method was used and it's not a POST, return
|
||||
@@ -160,18 +161,16 @@ class JobTemplateCallbackPermission(ModelAccessPermission):
|
||||
|
||||
|
||||
class VariableDataPermission(ModelAccessPermission):
|
||||
|
||||
def check_put_permissions(self, request, view, obj=None):
|
||||
if not obj:
|
||||
return True
|
||||
return check_user_access(request.user, view.model, 'change', obj,
|
||||
dict(variables=request.data))
|
||||
return check_user_access(request.user, view.model, 'change', obj, dict(variables=request.data))
|
||||
|
||||
|
||||
class TaskPermission(ModelAccessPermission):
|
||||
'''
|
||||
"""
|
||||
Permission checks used for API callbacks from running a task.
|
||||
'''
|
||||
"""
|
||||
|
||||
def has_permission(self, request, view, obj=None):
|
||||
# If another authentication method was used other than the one for
|
||||
@@ -182,8 +181,7 @@ class TaskPermission(ModelAccessPermission):
|
||||
# Verify that the ID present in the auth token is for a valid, active
|
||||
# unified job.
|
||||
try:
|
||||
unified_job = UnifiedJob.objects.get(status='running',
|
||||
pk=int(request.auth.split('-')[0]))
|
||||
unified_job = UnifiedJob.objects.get(status='running', pk=int(request.auth.split('-')[0]))
|
||||
except (UnifiedJob.DoesNotExist, TypeError):
|
||||
return False
|
||||
|
||||
@@ -197,10 +195,10 @@ class TaskPermission(ModelAccessPermission):
|
||||
|
||||
|
||||
class WorkflowApprovalPermission(ModelAccessPermission):
|
||||
'''
|
||||
"""
|
||||
Permission check used by workflow `approval` and `deny` views to determine
|
||||
who has access to approve and deny paused workflow nodes
|
||||
'''
|
||||
"""
|
||||
|
||||
def check_post_permissions(self, request, view, obj=None):
|
||||
approval = get_object_or_400(view.model, pk=view.kwargs['pk'])
|
||||
@@ -208,9 +206,10 @@ class WorkflowApprovalPermission(ModelAccessPermission):
|
||||
|
||||
|
||||
class ProjectUpdatePermission(ModelAccessPermission):
|
||||
'''
|
||||
"""
|
||||
Permission check used by ProjectUpdateView to determine who can update projects
|
||||
'''
|
||||
"""
|
||||
|
||||
def check_get_permissions(self, request, view, obj=None):
|
||||
project = get_object_or_400(view.model, pk=view.kwargs['pk'])
|
||||
return check_user_access(request.user, view.model, 'read', project)
|
||||
|
||||
@@ -11,7 +11,6 @@ from rest_framework.utils import encoders
|
||||
|
||||
|
||||
class SurrogateEncoder(encoders.JSONEncoder):
|
||||
|
||||
def encode(self, obj):
|
||||
ret = super(SurrogateEncoder, self).encode(obj)
|
||||
try:
|
||||
@@ -28,9 +27,9 @@ class DefaultJSONRenderer(renderers.JSONRenderer):
|
||||
|
||||
|
||||
class BrowsableAPIRenderer(renderers.BrowsableAPIRenderer):
|
||||
'''
|
||||
"""
|
||||
Customizations to the default browsable API renderer.
|
||||
'''
|
||||
"""
|
||||
|
||||
def get_default_renderer(self, view):
|
||||
renderer = super(BrowsableAPIRenderer, self).get_default_renderer(view)
|
||||
@@ -48,9 +47,7 @@ class BrowsableAPIRenderer(renderers.BrowsableAPIRenderer):
|
||||
# see: https://github.com/ansible/awx/issues/3108
|
||||
# https://code.djangoproject.com/ticket/28121
|
||||
return data
|
||||
return super(BrowsableAPIRenderer, self).get_content(renderer, data,
|
||||
accepted_media_type,
|
||||
renderer_context)
|
||||
return super(BrowsableAPIRenderer, self).get_content(renderer, data, accepted_media_type, renderer_context)
|
||||
|
||||
def get_context(self, data, accepted_media_type, renderer_context):
|
||||
# Store the associated response status to know how to populate the raw
|
||||
@@ -125,18 +122,25 @@ class AnsiDownloadRenderer(PlainTextRenderer):
|
||||
|
||||
|
||||
class PrometheusJSONRenderer(renderers.JSONRenderer):
|
||||
|
||||
def render(self, data, accepted_media_type=None, renderer_context=None):
|
||||
if isinstance(data, dict):
|
||||
# HTTP errors are {'detail': ErrorDetail(string='...', code=...)}
|
||||
return super(PrometheusJSONRenderer, self).render(
|
||||
data, accepted_media_type, renderer_context
|
||||
)
|
||||
return super(PrometheusJSONRenderer, self).render(data, accepted_media_type, renderer_context)
|
||||
parsed_metrics = text_string_to_metric_families(data)
|
||||
data = {}
|
||||
for family in parsed_metrics:
|
||||
data[family.name] = {}
|
||||
data[family.name]['help_text'] = family.documentation
|
||||
data[family.name]['type'] = family.type
|
||||
data[family.name]['samples'] = []
|
||||
for sample in family.samples:
|
||||
data[sample[0]] = {"labels": sample[1], "value": sample[2]}
|
||||
return super(PrometheusJSONRenderer, self).render(
|
||||
data, accepted_media_type, renderer_context
|
||||
)
|
||||
sample_dict = {"labels": sample[1], "value": sample[2]}
|
||||
if family.type == 'histogram':
|
||||
if sample[0].endswith("_sum"):
|
||||
sample_dict['sample_type'] = "sum"
|
||||
elif sample[0].endswith("_count"):
|
||||
sample_dict['sample_type'] = "count"
|
||||
elif sample[0].endswith("_bucket"):
|
||||
sample_dict['sample_type'] = "bucket"
|
||||
data[family.name]['samples'].append(sample_dict)
|
||||
return super(PrometheusJSONRenderer, self).render(data, accepted_media_type, renderer_context)
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -14,7 +14,6 @@ from rest_framework_swagger import renderers
|
||||
|
||||
|
||||
class SuperUserSchemaGenerator(SchemaGenerator):
|
||||
|
||||
def has_view_permissions(self, path, method, view):
|
||||
#
|
||||
# Generate the Swagger schema as if you were a superuser and
|
||||
@@ -25,17 +24,17 @@ class SuperUserSchemaGenerator(SchemaGenerator):
|
||||
|
||||
|
||||
class AutoSchema(DRFAuthSchema):
|
||||
|
||||
def get_link(self, path, method, base_url):
|
||||
link = super(AutoSchema, self).get_link(path, method, base_url)
|
||||
try:
|
||||
serializer = self.view.get_serializer()
|
||||
except Exception:
|
||||
serializer = None
|
||||
warnings.warn('{}.get_serializer() raised an exception during '
|
||||
'schema generation. Serializer fields will not be '
|
||||
'generated for {} {}.'
|
||||
.format(self.view.__class__.__name__, method, path))
|
||||
warnings.warn(
|
||||
'{}.get_serializer() raised an exception during '
|
||||
'schema generation. Serializer fields will not be '
|
||||
'generated for {} {}.'.format(self.view.__class__.__name__, method, path)
|
||||
)
|
||||
|
||||
link.__dict__['deprecated'] = getattr(self.view, 'deprecated', False)
|
||||
|
||||
@@ -43,9 +42,7 @@ class AutoSchema(DRFAuthSchema):
|
||||
if hasattr(self.view, 'swagger_topic'):
|
||||
link.__dict__['topic'] = str(self.view.swagger_topic).title()
|
||||
elif serializer and hasattr(serializer, 'Meta'):
|
||||
link.__dict__['topic'] = str(
|
||||
serializer.Meta.model._meta.verbose_name_plural
|
||||
).title()
|
||||
link.__dict__['topic'] = str(serializer.Meta.model._meta.verbose_name_plural).title()
|
||||
elif hasattr(self.view, 'model'):
|
||||
link.__dict__['topic'] = str(self.view.model._meta.verbose_name_plural).title()
|
||||
else:
|
||||
@@ -62,18 +59,10 @@ class SwaggerSchemaView(APIView):
|
||||
_ignore_model_permissions = True
|
||||
exclude_from_schema = True
|
||||
permission_classes = [AllowAny]
|
||||
renderer_classes = [
|
||||
CoreJSONRenderer,
|
||||
renderers.OpenAPIRenderer,
|
||||
renderers.SwaggerUIRenderer
|
||||
]
|
||||
renderer_classes = [CoreJSONRenderer, renderers.OpenAPIRenderer, renderers.SwaggerUIRenderer]
|
||||
|
||||
def get(self, request):
|
||||
generator = SuperUserSchemaGenerator(
|
||||
title='Ansible Tower API',
|
||||
patterns=None,
|
||||
urlconf=None
|
||||
)
|
||||
generator = SuperUserSchemaGenerator(title='Ansible Tower API', patterns=None, urlconf=None)
|
||||
schema = generator.get_schema(request=request)
|
||||
# python core-api doesn't support the deprecation yet, so track it
|
||||
# ourselves and return it in a response header
|
||||
@@ -103,11 +92,6 @@ class SwaggerSchemaView(APIView):
|
||||
schema._data[topic]._data[path] = node
|
||||
|
||||
if not schema:
|
||||
raise exceptions.ValidationError(
|
||||
'The schema generator did not return a schema Document'
|
||||
)
|
||||
raise exceptions.ValidationError('The schema generator did not return a schema Document')
|
||||
|
||||
return Response(
|
||||
schema,
|
||||
headers={'X-Deprecated-Paths': json.dumps(_deprecated)}
|
||||
)
|
||||
return Response(schema, headers={'X-Deprecated-Paths': json.dumps(_deprecated)})
|
||||
|
||||
1
awx/api/templates/api/metrics_view.md
Normal file
1
awx/api/templates/api/metrics_view.md
Normal file
@@ -0,0 +1 @@
|
||||
query params to filter response, e.g., ?subsystemonly=1&metric=callback_receiver_events_insert_db&node=awx-1
|
||||
@@ -3,10 +3,7 @@
|
||||
|
||||
from django.conf.urls import url
|
||||
|
||||
from awx.api.views import (
|
||||
ActivityStreamList,
|
||||
ActivityStreamDetail,
|
||||
)
|
||||
from awx.api.views import ActivityStreamList, ActivityStreamDetail
|
||||
|
||||
|
||||
urls = [
|
||||
|
||||
@@ -3,10 +3,7 @@
|
||||
|
||||
from django.conf.urls import url
|
||||
|
||||
from awx.api.views import (
|
||||
AdHocCommandEventList,
|
||||
AdHocCommandEventDetail,
|
||||
)
|
||||
from awx.api.views import AdHocCommandEventList, AdHocCommandEventDetail
|
||||
|
||||
|
||||
urls = [
|
||||
|
||||
@@ -3,10 +3,7 @@
|
||||
|
||||
from django.conf.urls import url
|
||||
|
||||
from awx.api.views import (
|
||||
CredentialInputSourceDetail,
|
||||
CredentialInputSourceList,
|
||||
)
|
||||
from awx.api.views import CredentialInputSourceDetail, CredentialInputSourceList
|
||||
|
||||
|
||||
urls = [
|
||||
|
||||
@@ -3,13 +3,7 @@
|
||||
|
||||
from django.conf.urls import url
|
||||
|
||||
from awx.api.views import (
|
||||
CredentialTypeList,
|
||||
CredentialTypeDetail,
|
||||
CredentialTypeCredentialList,
|
||||
CredentialTypeActivityStreamList,
|
||||
CredentialTypeExternalTest,
|
||||
)
|
||||
from awx.api.views import CredentialTypeList, CredentialTypeDetail, CredentialTypeCredentialList, CredentialTypeActivityStreamList, CredentialTypeExternalTest
|
||||
|
||||
|
||||
urls = [
|
||||
|
||||
20
awx/api/urls/execution_environments.py
Normal file
20
awx/api/urls/execution_environments.py
Normal file
@@ -0,0 +1,20 @@
|
||||
from django.conf.urls import url
|
||||
|
||||
from awx.api.views import (
|
||||
ExecutionEnvironmentList,
|
||||
ExecutionEnvironmentDetail,
|
||||
ExecutionEnvironmentJobTemplateList,
|
||||
ExecutionEnvironmentCopy,
|
||||
ExecutionEnvironmentActivityStreamList,
|
||||
)
|
||||
|
||||
|
||||
urls = [
|
||||
url(r'^$', ExecutionEnvironmentList.as_view(), name='execution_environment_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/$', ExecutionEnvironmentDetail.as_view(), name='execution_environment_detail'),
|
||||
url(r'^(?P<pk>[0-9]+)/unified_job_templates/$', ExecutionEnvironmentJobTemplateList.as_view(), name='execution_environment_job_template_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/copy/$', ExecutionEnvironmentCopy.as_view(), name='execution_environment_copy'),
|
||||
url(r'^(?P<pk>[0-9]+)/activity_stream/$', ExecutionEnvironmentActivityStreamList.as_view(), name='execution_environment_activity_stream_list'),
|
||||
]
|
||||
|
||||
__all__ = ['urls']
|
||||
@@ -3,20 +3,14 @@
|
||||
|
||||
from django.conf.urls import url
|
||||
|
||||
from awx.api.views import (
|
||||
InstanceList,
|
||||
InstanceDetail,
|
||||
InstanceUnifiedJobsList,
|
||||
InstanceInstanceGroupsList,
|
||||
)
|
||||
from awx.api.views import InstanceList, InstanceDetail, InstanceUnifiedJobsList, InstanceInstanceGroupsList
|
||||
|
||||
|
||||
urls = [
|
||||
url(r'^$', InstanceList.as_view(), name='instance_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/$', InstanceDetail.as_view(), name='instance_detail'),
|
||||
url(r'^(?P<pk>[0-9]+)/jobs/$', InstanceUnifiedJobsList.as_view(), name='instance_unified_jobs_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/instance_groups/$', InstanceInstanceGroupsList.as_view(),
|
||||
name='instance_instance_groups_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/instance_groups/$', InstanceInstanceGroupsList.as_view(), name='instance_instance_groups_list'),
|
||||
]
|
||||
|
||||
__all__ = ['urls']
|
||||
|
||||
@@ -3,12 +3,7 @@
|
||||
|
||||
from django.conf.urls import url
|
||||
|
||||
from awx.api.views import (
|
||||
InstanceGroupList,
|
||||
InstanceGroupDetail,
|
||||
InstanceGroupUnifiedJobsList,
|
||||
InstanceGroupInstanceList,
|
||||
)
|
||||
from awx.api.views import InstanceGroupList, InstanceGroupDetail, InstanceGroupUnifiedJobsList, InstanceGroupInstanceList
|
||||
|
||||
|
||||
urls = [
|
||||
|
||||
@@ -3,12 +3,7 @@
|
||||
|
||||
from django.conf.urls import url
|
||||
|
||||
from awx.api.views import (
|
||||
InventoryScriptList,
|
||||
InventoryScriptDetail,
|
||||
InventoryScriptObjectRolesList,
|
||||
InventoryScriptCopy,
|
||||
)
|
||||
from awx.api.views import InventoryScriptList, InventoryScriptDetail, InventoryScriptObjectRolesList, InventoryScriptCopy
|
||||
|
||||
|
||||
urls = [
|
||||
|
||||
@@ -29,12 +29,21 @@ urls = [
|
||||
url(r'^(?P<pk>[0-9]+)/credentials/$', InventorySourceCredentialsList.as_view(), name='inventory_source_credentials_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/groups/$', InventorySourceGroupsList.as_view(), name='inventory_source_groups_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/hosts/$', InventorySourceHostsList.as_view(), name='inventory_source_hosts_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/notification_templates_started/$', InventorySourceNotificationTemplatesStartedList.as_view(),
|
||||
name='inventory_source_notification_templates_started_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/notification_templates_error/$', InventorySourceNotificationTemplatesErrorList.as_view(),
|
||||
name='inventory_source_notification_templates_error_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/notification_templates_success/$', InventorySourceNotificationTemplatesSuccessList.as_view(),
|
||||
name='inventory_source_notification_templates_success_list'),
|
||||
url(
|
||||
r'^(?P<pk>[0-9]+)/notification_templates_started/$',
|
||||
InventorySourceNotificationTemplatesStartedList.as_view(),
|
||||
name='inventory_source_notification_templates_started_list',
|
||||
),
|
||||
url(
|
||||
r'^(?P<pk>[0-9]+)/notification_templates_error/$',
|
||||
InventorySourceNotificationTemplatesErrorList.as_view(),
|
||||
name='inventory_source_notification_templates_error_list',
|
||||
),
|
||||
url(
|
||||
r'^(?P<pk>[0-9]+)/notification_templates_success/$',
|
||||
InventorySourceNotificationTemplatesSuccessList.as_view(),
|
||||
name='inventory_source_notification_templates_success_list',
|
||||
),
|
||||
]
|
||||
|
||||
__all__ = ['urls']
|
||||
|
||||
@@ -3,12 +3,7 @@
|
||||
|
||||
from django.conf.urls import url
|
||||
|
||||
from awx.api.views import (
|
||||
JobEventList,
|
||||
JobEventDetail,
|
||||
JobEventChildrenList,
|
||||
JobEventHostsList,
|
||||
)
|
||||
from awx.api.views import JobEventList, JobEventDetail, JobEventChildrenList, JobEventHostsList
|
||||
|
||||
|
||||
urls = [
|
||||
|
||||
@@ -3,13 +3,9 @@
|
||||
|
||||
from django.conf.urls import url
|
||||
|
||||
from awx.api.views import (
|
||||
JobHostSummaryDetail,
|
||||
)
|
||||
from awx.api.views import JobHostSummaryDetail
|
||||
|
||||
|
||||
urls = [
|
||||
url(r'^(?P<pk>[0-9]+)/$', JobHostSummaryDetail.as_view(), name='job_host_summary_detail'),
|
||||
]
|
||||
urls = [url(r'^(?P<pk>[0-9]+)/$', JobHostSummaryDetail.as_view(), name='job_host_summary_detail')]
|
||||
|
||||
__all__ = ['urls']
|
||||
|
||||
@@ -34,12 +34,21 @@ urls = [
|
||||
url(r'^(?P<pk>[0-9]+)/schedules/$', JobTemplateSchedulesList.as_view(), name='job_template_schedules_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/survey_spec/$', JobTemplateSurveySpec.as_view(), name='job_template_survey_spec'),
|
||||
url(r'^(?P<pk>[0-9]+)/activity_stream/$', JobTemplateActivityStreamList.as_view(), name='job_template_activity_stream_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/notification_templates_started/$', JobTemplateNotificationTemplatesStartedList.as_view(),
|
||||
name='job_template_notification_templates_started_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/notification_templates_error/$', JobTemplateNotificationTemplatesErrorList.as_view(),
|
||||
name='job_template_notification_templates_error_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/notification_templates_success/$', JobTemplateNotificationTemplatesSuccessList.as_view(),
|
||||
name='job_template_notification_templates_success_list'),
|
||||
url(
|
||||
r'^(?P<pk>[0-9]+)/notification_templates_started/$',
|
||||
JobTemplateNotificationTemplatesStartedList.as_view(),
|
||||
name='job_template_notification_templates_started_list',
|
||||
),
|
||||
url(
|
||||
r'^(?P<pk>[0-9]+)/notification_templates_error/$',
|
||||
JobTemplateNotificationTemplatesErrorList.as_view(),
|
||||
name='job_template_notification_templates_error_list',
|
||||
),
|
||||
url(
|
||||
r'^(?P<pk>[0-9]+)/notification_templates_success/$',
|
||||
JobTemplateNotificationTemplatesSuccessList.as_view(),
|
||||
name='job_template_notification_templates_success_list',
|
||||
),
|
||||
url(r'^(?P<pk>[0-9]+)/instance_groups/$', JobTemplateInstanceGroupsList.as_view(), name='job_template_instance_groups_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/access_list/$', JobTemplateAccessList.as_view(), name='job_template_access_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/object_roles/$', JobTemplateObjectRolesList.as_view(), name='job_template_object_roles_list'),
|
||||
|
||||
@@ -3,15 +3,9 @@
|
||||
|
||||
from django.conf.urls import url
|
||||
|
||||
from awx.api.views import (
|
||||
LabelList,
|
||||
LabelDetail,
|
||||
)
|
||||
from awx.api.views import LabelList, LabelDetail
|
||||
|
||||
|
||||
urls = [
|
||||
url(r'^$', LabelList.as_view(), name='label_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/$', LabelDetail.as_view(), name='label_detail'),
|
||||
]
|
||||
urls = [url(r'^$', LabelList.as_view(), name='label_list'), url(r'^(?P<pk>[0-9]+)/$', LabelDetail.as_view(), name='label_detail')]
|
||||
|
||||
__all__ = ['urls']
|
||||
|
||||
@@ -3,15 +3,9 @@
|
||||
|
||||
from django.conf.urls import url
|
||||
|
||||
from awx.api.views import (
|
||||
NotificationList,
|
||||
NotificationDetail,
|
||||
)
|
||||
from awx.api.views import NotificationList, NotificationDetail
|
||||
|
||||
|
||||
urls = [
|
||||
url(r'^$', NotificationList.as_view(), name='notification_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/$', NotificationDetail.as_view(), name='notification_detail'),
|
||||
]
|
||||
urls = [url(r'^$', NotificationList.as_view(), name='notification_list'), url(r'^(?P<pk>[0-9]+)/$', NotificationDetail.as_view(), name='notification_detail')]
|
||||
|
||||
__all__ = ['urls']
|
||||
|
||||
@@ -16,32 +16,12 @@ from awx.api.views import (
|
||||
|
||||
urls = [
|
||||
url(r'^applications/$', OAuth2ApplicationList.as_view(), name='o_auth2_application_list'),
|
||||
url(
|
||||
r'^applications/(?P<pk>[0-9]+)/$',
|
||||
OAuth2ApplicationDetail.as_view(),
|
||||
name='o_auth2_application_detail'
|
||||
),
|
||||
url(
|
||||
r'^applications/(?P<pk>[0-9]+)/tokens/$',
|
||||
ApplicationOAuth2TokenList.as_view(),
|
||||
name='o_auth2_application_token_list'
|
||||
),
|
||||
url(
|
||||
r'^applications/(?P<pk>[0-9]+)/activity_stream/$',
|
||||
OAuth2ApplicationActivityStreamList.as_view(),
|
||||
name='o_auth2_application_activity_stream_list'
|
||||
),
|
||||
url(r'^applications/(?P<pk>[0-9]+)/$', OAuth2ApplicationDetail.as_view(), name='o_auth2_application_detail'),
|
||||
url(r'^applications/(?P<pk>[0-9]+)/tokens/$', ApplicationOAuth2TokenList.as_view(), name='o_auth2_application_token_list'),
|
||||
url(r'^applications/(?P<pk>[0-9]+)/activity_stream/$', OAuth2ApplicationActivityStreamList.as_view(), name='o_auth2_application_activity_stream_list'),
|
||||
url(r'^tokens/$', OAuth2TokenList.as_view(), name='o_auth2_token_list'),
|
||||
url(
|
||||
r'^tokens/(?P<pk>[0-9]+)/$',
|
||||
OAuth2TokenDetail.as_view(),
|
||||
name='o_auth2_token_detail'
|
||||
),
|
||||
url(
|
||||
r'^tokens/(?P<pk>[0-9]+)/activity_stream/$',
|
||||
OAuth2TokenActivityStreamList.as_view(),
|
||||
name='o_auth2_token_activity_stream_list'
|
||||
),
|
||||
url(r'^tokens/(?P<pk>[0-9]+)/$', OAuth2TokenDetail.as_view(), name='o_auth2_token_detail'),
|
||||
url(r'^tokens/(?P<pk>[0-9]+)/activity_stream/$', OAuth2TokenActivityStreamList.as_view(), name='o_auth2_token_activity_stream_list'),
|
||||
]
|
||||
|
||||
__all__ = ['urls']
|
||||
|
||||
@@ -10,13 +10,10 @@ from oauthlib import oauth2
|
||||
from oauth2_provider import views
|
||||
|
||||
from awx.main.models import RefreshToken
|
||||
from awx.api.views import (
|
||||
ApiOAuthAuthorizationRootView,
|
||||
)
|
||||
from awx.api.views import ApiOAuthAuthorizationRootView
|
||||
|
||||
|
||||
class TokenView(views.TokenView):
|
||||
|
||||
def create_token_response(self, request):
|
||||
# Django OAuth2 Toolkit has a bug whereby refresh tokens are *never*
|
||||
# properly expired (ugh):
|
||||
@@ -26,9 +23,7 @@ class TokenView(views.TokenView):
|
||||
# This code detects and auto-expires them on refresh grant
|
||||
# requests.
|
||||
if request.POST.get('grant_type') == 'refresh_token' and 'refresh_token' in request.POST:
|
||||
refresh_token = RefreshToken.objects.filter(
|
||||
token=request.POST['refresh_token']
|
||||
).first()
|
||||
refresh_token = RefreshToken.objects.filter(token=request.POST['refresh_token']).first()
|
||||
if refresh_token:
|
||||
expire_seconds = settings.OAUTH2_PROVIDER.get('REFRESH_TOKEN_EXPIRE_SECONDS', 0)
|
||||
if refresh_token.created + timedelta(seconds=expire_seconds) < now():
|
||||
|
||||
@@ -9,6 +9,7 @@ from awx.api.views import (
|
||||
OrganizationUsersList,
|
||||
OrganizationAdminsList,
|
||||
OrganizationInventoriesList,
|
||||
OrganizationExecutionEnvironmentsList,
|
||||
OrganizationProjectsList,
|
||||
OrganizationJobTemplatesList,
|
||||
OrganizationWorkflowJobTemplatesList,
|
||||
@@ -34,6 +35,7 @@ urls = [
|
||||
url(r'^(?P<pk>[0-9]+)/users/$', OrganizationUsersList.as_view(), name='organization_users_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/admins/$', OrganizationAdminsList.as_view(), name='organization_admins_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/inventories/$', OrganizationInventoriesList.as_view(), name='organization_inventories_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/execution_environments/$', OrganizationExecutionEnvironmentsList.as_view(), name='organization_execution_environments_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/projects/$', OrganizationProjectsList.as_view(), name='organization_projects_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/job_templates/$', OrganizationJobTemplatesList.as_view(), name='organization_job_templates_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/workflow_job_templates/$', OrganizationWorkflowJobTemplatesList.as_view(), name='organization_workflow_job_templates_list'),
|
||||
@@ -41,14 +43,26 @@ urls = [
|
||||
url(r'^(?P<pk>[0-9]+)/credentials/$', OrganizationCredentialList.as_view(), name='organization_credential_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/activity_stream/$', OrganizationActivityStreamList.as_view(), name='organization_activity_stream_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/notification_templates/$', OrganizationNotificationTemplatesList.as_view(), name='organization_notification_templates_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/notification_templates_started/$', OrganizationNotificationTemplatesStartedList.as_view(),
|
||||
name='organization_notification_templates_started_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/notification_templates_error/$', OrganizationNotificationTemplatesErrorList.as_view(),
|
||||
name='organization_notification_templates_error_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/notification_templates_success/$', OrganizationNotificationTemplatesSuccessList.as_view(),
|
||||
name='organization_notification_templates_success_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/notification_templates_approvals/$', OrganizationNotificationTemplatesApprovalList.as_view(),
|
||||
name='organization_notification_templates_approvals_list'),
|
||||
url(
|
||||
r'^(?P<pk>[0-9]+)/notification_templates_started/$',
|
||||
OrganizationNotificationTemplatesStartedList.as_view(),
|
||||
name='organization_notification_templates_started_list',
|
||||
),
|
||||
url(
|
||||
r'^(?P<pk>[0-9]+)/notification_templates_error/$',
|
||||
OrganizationNotificationTemplatesErrorList.as_view(),
|
||||
name='organization_notification_templates_error_list',
|
||||
),
|
||||
url(
|
||||
r'^(?P<pk>[0-9]+)/notification_templates_success/$',
|
||||
OrganizationNotificationTemplatesSuccessList.as_view(),
|
||||
name='organization_notification_templates_success_list',
|
||||
),
|
||||
url(
|
||||
r'^(?P<pk>[0-9]+)/notification_templates_approvals/$',
|
||||
OrganizationNotificationTemplatesApprovalList.as_view(),
|
||||
name='organization_notification_templates_approvals_list',
|
||||
),
|
||||
url(r'^(?P<pk>[0-9]+)/instance_groups/$', OrganizationInstanceGroupsList.as_view(), name='organization_instance_groups_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/galaxy_credentials/$', OrganizationGalaxyCredentialsList.as_view(), name='organization_galaxy_credentials_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/object_roles/$', OrganizationObjectRolesList.as_view(), name='organization_object_roles_list'),
|
||||
|
||||
@@ -35,10 +35,16 @@ urls = [
|
||||
url(r'^(?P<pk>[0-9]+)/activity_stream/$', ProjectActivityStreamList.as_view(), name='project_activity_stream_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/schedules/$', ProjectSchedulesList.as_view(), name='project_schedules_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/notification_templates_error/$', ProjectNotificationTemplatesErrorList.as_view(), name='project_notification_templates_error_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/notification_templates_success/$', ProjectNotificationTemplatesSuccessList.as_view(),
|
||||
name='project_notification_templates_success_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/notification_templates_started/$', ProjectNotificationTemplatesStartedList.as_view(),
|
||||
name='project_notification_templates_started_list'),
|
||||
url(
|
||||
r'^(?P<pk>[0-9]+)/notification_templates_success/$',
|
||||
ProjectNotificationTemplatesSuccessList.as_view(),
|
||||
name='project_notification_templates_success_list',
|
||||
),
|
||||
url(
|
||||
r'^(?P<pk>[0-9]+)/notification_templates_started/$',
|
||||
ProjectNotificationTemplatesStartedList.as_view(),
|
||||
name='project_notification_templates_started_list',
|
||||
),
|
||||
url(r'^(?P<pk>[0-9]+)/object_roles/$', ProjectObjectRolesList.as_view(), name='project_object_roles_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/access_list/$', ProjectAccessList.as_view(), name='project_access_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/copy/$', ProjectCopy.as_view(), name='project_copy'),
|
||||
|
||||
@@ -3,14 +3,7 @@
|
||||
|
||||
from django.conf.urls import url
|
||||
|
||||
from awx.api.views import (
|
||||
RoleList,
|
||||
RoleDetail,
|
||||
RoleUsersList,
|
||||
RoleTeamsList,
|
||||
RoleParentsList,
|
||||
RoleChildrenList,
|
||||
)
|
||||
from awx.api.views import RoleList, RoleDetail, RoleUsersList, RoleTeamsList, RoleParentsList, RoleChildrenList
|
||||
|
||||
|
||||
urls = [
|
||||
|
||||
@@ -3,12 +3,7 @@
|
||||
|
||||
from django.conf.urls import url
|
||||
|
||||
from awx.api.views import (
|
||||
ScheduleList,
|
||||
ScheduleDetail,
|
||||
ScheduleUnifiedJobsList,
|
||||
ScheduleCredentialsList,
|
||||
)
|
||||
from awx.api.views import ScheduleList, ScheduleDetail, ScheduleUnifiedJobsList, ScheduleCredentialsList
|
||||
|
||||
|
||||
urls = [
|
||||
|
||||
@@ -3,13 +3,7 @@
|
||||
|
||||
from django.conf.urls import url
|
||||
|
||||
from awx.api.views import (
|
||||
SystemJobList,
|
||||
SystemJobDetail,
|
||||
SystemJobCancel,
|
||||
SystemJobNotificationsList,
|
||||
SystemJobEventsList
|
||||
)
|
||||
from awx.api.views import SystemJobList, SystemJobDetail, SystemJobCancel, SystemJobNotificationsList, SystemJobEventsList
|
||||
|
||||
|
||||
urls = [
|
||||
|
||||
@@ -21,12 +21,21 @@ urls = [
|
||||
url(r'^(?P<pk>[0-9]+)/launch/$', SystemJobTemplateLaunch.as_view(), name='system_job_template_launch'),
|
||||
url(r'^(?P<pk>[0-9]+)/jobs/$', SystemJobTemplateJobsList.as_view(), name='system_job_template_jobs_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/schedules/$', SystemJobTemplateSchedulesList.as_view(), name='system_job_template_schedules_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/notification_templates_started/$', SystemJobTemplateNotificationTemplatesStartedList.as_view(),
|
||||
name='system_job_template_notification_templates_started_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/notification_templates_error/$', SystemJobTemplateNotificationTemplatesErrorList.as_view(),
|
||||
name='system_job_template_notification_templates_error_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/notification_templates_success/$', SystemJobTemplateNotificationTemplatesSuccessList.as_view(),
|
||||
name='system_job_template_notification_templates_success_list'),
|
||||
url(
|
||||
r'^(?P<pk>[0-9]+)/notification_templates_started/$',
|
||||
SystemJobTemplateNotificationTemplatesStartedList.as_view(),
|
||||
name='system_job_template_notification_templates_started_list',
|
||||
),
|
||||
url(
|
||||
r'^(?P<pk>[0-9]+)/notification_templates_error/$',
|
||||
SystemJobTemplateNotificationTemplatesErrorList.as_view(),
|
||||
name='system_job_template_notification_templates_error_list',
|
||||
),
|
||||
url(
|
||||
r'^(?P<pk>[0-9]+)/notification_templates_success/$',
|
||||
SystemJobTemplateNotificationTemplatesSuccessList.as_view(),
|
||||
name='system_job_template_notification_templates_success_list',
|
||||
),
|
||||
]
|
||||
|
||||
__all__ = ['urls']
|
||||
|
||||
@@ -5,10 +5,7 @@ from __future__ import absolute_import, unicode_literals
|
||||
from django.conf import settings
|
||||
from django.conf.urls import include, url
|
||||
|
||||
from awx.api.generics import (
|
||||
LoggedLoginView,
|
||||
LoggedLogoutView,
|
||||
)
|
||||
from awx.api.generics import LoggedLoginView, LoggedLogoutView
|
||||
from awx.api.views import (
|
||||
ApiRootView,
|
||||
ApiV2RootView,
|
||||
@@ -33,15 +30,14 @@ from awx.api.views import (
|
||||
OAuth2ApplicationDetail,
|
||||
)
|
||||
|
||||
from awx.api.views.metrics import (
|
||||
MetricsView,
|
||||
)
|
||||
from awx.api.views.metrics import MetricsView
|
||||
|
||||
from .organization import urls as organization_urls
|
||||
from .user import urls as user_urls
|
||||
from .project import urls as project_urls
|
||||
from .project_update import urls as project_update_urls
|
||||
from .inventory import urls as inventory_urls
|
||||
from .execution_environments import urls as execution_environment_urls
|
||||
from .team import urls as team_urls
|
||||
from .host import urls as host_urls
|
||||
from .group import urls as group_urls
|
||||
@@ -106,6 +102,7 @@ v2_urls = [
|
||||
url(r'^schedules/', include(schedule_urls)),
|
||||
url(r'^organizations/', include(organization_urls)),
|
||||
url(r'^users/', include(user_urls)),
|
||||
url(r'^execution_environments/', include(execution_environment_urls)),
|
||||
url(r'^projects/', include(project_urls)),
|
||||
url(r'^project_updates/', include(project_update_urls)),
|
||||
url(r'^teams/', include(team_urls)),
|
||||
@@ -144,17 +141,11 @@ app_name = 'api'
|
||||
urlpatterns = [
|
||||
url(r'^$', ApiRootView.as_view(), name='api_root_view'),
|
||||
url(r'^(?P<version>(v2))/', include(v2_urls)),
|
||||
url(r'^login/$', LoggedLoginView.as_view(
|
||||
template_name='rest_framework/login.html',
|
||||
extra_context={'inside_login_context': True}
|
||||
), name='login'),
|
||||
url(r'^logout/$', LoggedLogoutView.as_view(
|
||||
next_page='/api/', redirect_field_name='next'
|
||||
), name='logout'),
|
||||
url(r'^login/$', LoggedLoginView.as_view(template_name='rest_framework/login.html', extra_context={'inside_login_context': True}), name='login'),
|
||||
url(r'^logout/$', LoggedLogoutView.as_view(next_page='/api/', redirect_field_name='next'), name='logout'),
|
||||
url(r'^o/', include(oauth2_root_urls)),
|
||||
]
|
||||
if settings.SETTINGS_MODULE == 'awx.settings.development':
|
||||
from awx.api.swagger import SwaggerSchemaView
|
||||
urlpatterns += [
|
||||
url(r'^swagger/$', SwaggerSchemaView.as_view(), name='swagger_view'),
|
||||
]
|
||||
|
||||
urlpatterns += [url(r'^swagger/$', SwaggerSchemaView.as_view(), name='swagger_view')]
|
||||
|
||||
@@ -20,7 +20,7 @@ from awx.api.views import (
|
||||
UserAuthorizedTokenList,
|
||||
)
|
||||
|
||||
urls = [
|
||||
urls = [
|
||||
url(r'^$', UserList.as_view(), name='user_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/$', UserDetail.as_view(), name='user_detail'),
|
||||
url(r'^(?P<pk>[0-9]+)/teams/$', UserTeamsList.as_view(), name='user_teams_list'),
|
||||
@@ -35,7 +35,6 @@ urls = [
|
||||
url(r'^(?P<pk>[0-9]+)/tokens/$', OAuth2UserTokenList.as_view(), name='o_auth2_token_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/authorized_tokens/$', UserAuthorizedTokenList.as_view(), name='user_authorized_token_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/personal_tokens/$', UserPersonalTokenList.as_view(), name='user_personal_token_list'),
|
||||
|
||||
]
|
||||
]
|
||||
|
||||
__all__ = ['urls']
|
||||
|
||||
@@ -1,10 +1,6 @@
|
||||
from django.conf.urls import url
|
||||
|
||||
from awx.api.views import (
|
||||
WebhookKeyView,
|
||||
GithubWebhookReceiver,
|
||||
GitlabWebhookReceiver,
|
||||
)
|
||||
from awx.api.views import WebhookKeyView, GithubWebhookReceiver, GitlabWebhookReceiver
|
||||
|
||||
|
||||
urlpatterns = [
|
||||
|
||||
@@ -3,12 +3,7 @@
|
||||
|
||||
from django.conf.urls import url
|
||||
|
||||
from awx.api.views import (
|
||||
WorkflowApprovalList,
|
||||
WorkflowApprovalDetail,
|
||||
WorkflowApprovalApprove,
|
||||
WorkflowApprovalDeny,
|
||||
)
|
||||
from awx.api.views import WorkflowApprovalList, WorkflowApprovalDetail, WorkflowApprovalApprove, WorkflowApprovalDeny
|
||||
|
||||
|
||||
urls = [
|
||||
|
||||
@@ -3,10 +3,7 @@
|
||||
|
||||
from django.conf.urls import url
|
||||
|
||||
from awx.api.views import (
|
||||
WorkflowApprovalTemplateDetail,
|
||||
WorkflowApprovalTemplateJobsList,
|
||||
)
|
||||
from awx.api.views import WorkflowApprovalTemplateDetail, WorkflowApprovalTemplateJobsList
|
||||
|
||||
|
||||
urls = [
|
||||
|
||||
@@ -33,14 +33,26 @@ urls = [
|
||||
url(r'^(?P<pk>[0-9]+)/survey_spec/$', WorkflowJobTemplateSurveySpec.as_view(), name='workflow_job_template_survey_spec'),
|
||||
url(r'^(?P<pk>[0-9]+)/workflow_nodes/$', WorkflowJobTemplateWorkflowNodesList.as_view(), name='workflow_job_template_workflow_nodes_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/activity_stream/$', WorkflowJobTemplateActivityStreamList.as_view(), name='workflow_job_template_activity_stream_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/notification_templates_started/$', WorkflowJobTemplateNotificationTemplatesStartedList.as_view(),
|
||||
name='workflow_job_template_notification_templates_started_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/notification_templates_error/$', WorkflowJobTemplateNotificationTemplatesErrorList.as_view(),
|
||||
name='workflow_job_template_notification_templates_error_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/notification_templates_success/$', WorkflowJobTemplateNotificationTemplatesSuccessList.as_view(),
|
||||
name='workflow_job_template_notification_templates_success_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/notification_templates_approvals/$', WorkflowJobTemplateNotificationTemplatesApprovalList.as_view(),
|
||||
name='workflow_job_template_notification_templates_approvals_list'),
|
||||
url(
|
||||
r'^(?P<pk>[0-9]+)/notification_templates_started/$',
|
||||
WorkflowJobTemplateNotificationTemplatesStartedList.as_view(),
|
||||
name='workflow_job_template_notification_templates_started_list',
|
||||
),
|
||||
url(
|
||||
r'^(?P<pk>[0-9]+)/notification_templates_error/$',
|
||||
WorkflowJobTemplateNotificationTemplatesErrorList.as_view(),
|
||||
name='workflow_job_template_notification_templates_error_list',
|
||||
),
|
||||
url(
|
||||
r'^(?P<pk>[0-9]+)/notification_templates_success/$',
|
||||
WorkflowJobTemplateNotificationTemplatesSuccessList.as_view(),
|
||||
name='workflow_job_template_notification_templates_success_list',
|
||||
),
|
||||
url(
|
||||
r'^(?P<pk>[0-9]+)/notification_templates_approvals/$',
|
||||
WorkflowJobTemplateNotificationTemplatesApprovalList.as_view(),
|
||||
name='workflow_job_template_notification_templates_approvals_list',
|
||||
),
|
||||
url(r'^(?P<pk>[0-9]+)/access_list/$', WorkflowJobTemplateAccessList.as_view(), name='workflow_job_template_access_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/object_roles/$', WorkflowJobTemplateObjectRolesList.as_view(), name='workflow_job_template_object_roles_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/labels/$', WorkflowJobTemplateLabelList.as_view(), name='workflow_job_template_label_list'),
|
||||
|
||||
@@ -40,13 +40,10 @@ def reverse(viewname, args=None, kwargs=None, request=None, format=None, **extra
|
||||
|
||||
|
||||
class URLPathVersioning(BaseVersioning):
|
||||
|
||||
def reverse(self, viewname, args=None, kwargs=None, request=None, format=None, **extra):
|
||||
if request.version is not None:
|
||||
kwargs = {} if (kwargs is None) else kwargs
|
||||
kwargs[self.version_param] = request.version
|
||||
request = None
|
||||
|
||||
return super(BaseVersioning, self).reverse(
|
||||
viewname, args, kwargs, request, format, **extra
|
||||
)
|
||||
return super(BaseVersioning, self).reverse(viewname, args, kwargs, request, format, **extra)
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -28,14 +28,7 @@ from awx.main.models import (
|
||||
InventorySource,
|
||||
CustomInventoryScript,
|
||||
)
|
||||
from awx.api.generics import (
|
||||
ListCreateAPIView,
|
||||
RetrieveUpdateDestroyAPIView,
|
||||
SubListAPIView,
|
||||
SubListAttachDetachAPIView,
|
||||
ResourceAccessList,
|
||||
CopyAPIView,
|
||||
)
|
||||
from awx.api.generics import ListCreateAPIView, RetrieveUpdateDestroyAPIView, SubListAPIView, SubListAttachDetachAPIView, ResourceAccessList, CopyAPIView
|
||||
|
||||
from awx.api.serializers import (
|
||||
InventorySerializer,
|
||||
@@ -46,10 +39,7 @@ from awx.api.serializers import (
|
||||
CustomInventoryScriptSerializer,
|
||||
JobTemplateSerializer,
|
||||
)
|
||||
from awx.api.views.mixin import (
|
||||
RelatedJobsPreventDeleteMixin,
|
||||
ControlledByScmMixin,
|
||||
)
|
||||
from awx.api.views.mixin import RelatedJobsPreventDeleteMixin, ControlledByScmMixin
|
||||
|
||||
logger = logging.getLogger('awx.api.views.organization')
|
||||
|
||||
@@ -101,7 +91,7 @@ class InventoryScriptObjectRolesList(SubListAPIView):
|
||||
model = Role
|
||||
serializer_class = RoleSerializer
|
||||
parent_model = CustomInventoryScript
|
||||
search_fields = ('role_field', 'content_type__model',)
|
||||
search_fields = ('role_field', 'content_type__model')
|
||||
|
||||
def get_queryset(self):
|
||||
po = self.get_parent_object()
|
||||
@@ -134,8 +124,7 @@ class InventoryDetail(RelatedJobsPreventDeleteMixin, ControlledByScmMixin, Retri
|
||||
|
||||
# Do not allow changes to an Inventory kind.
|
||||
if kind is not None and obj.kind != kind:
|
||||
return Response(dict(error=_('You cannot turn a regular inventory into a "smart" inventory.')),
|
||||
status=status.HTTP_405_METHOD_NOT_ALLOWED)
|
||||
return Response(dict(error=_('You cannot turn a regular inventory into a "smart" inventory.')), status=status.HTTP_405_METHOD_NOT_ALLOWED)
|
||||
return super(InventoryDetail, self).update(request, *args, **kwargs)
|
||||
|
||||
def destroy(self, request, *args, **kwargs):
|
||||
@@ -175,7 +164,7 @@ class InventoryInstanceGroupsList(SubListAttachDetachAPIView):
|
||||
|
||||
class InventoryAccessList(ResourceAccessList):
|
||||
|
||||
model = User # needs to be User for AccessLists's
|
||||
model = User # needs to be User for AccessLists's
|
||||
parent_model = Inventory
|
||||
|
||||
|
||||
@@ -184,7 +173,7 @@ class InventoryObjectRolesList(SubListAPIView):
|
||||
model = Role
|
||||
serializer_class = RoleSerializer
|
||||
parent_model = Inventory
|
||||
search_fields = ('role_field', 'content_type__model',)
|
||||
search_fields = ('role_field', 'content_type__model')
|
||||
|
||||
def get_queryset(self):
|
||||
po = self.get_parent_object()
|
||||
|
||||
@@ -14,12 +14,11 @@ from rest_framework.exceptions import PermissionDenied
|
||||
|
||||
# AWX
|
||||
# from awx.main.analytics import collectors
|
||||
import awx.main.analytics.subsystem_metrics as s_metrics
|
||||
from awx.main.analytics.metrics import metrics
|
||||
from awx.api import renderers
|
||||
|
||||
from awx.api.generics import (
|
||||
APIView,
|
||||
)
|
||||
from awx.api.generics import APIView
|
||||
|
||||
|
||||
logger = logging.getLogger('awx.analytics')
|
||||
@@ -30,13 +29,15 @@ class MetricsView(APIView):
|
||||
name = _('Metrics')
|
||||
swagger_topic = 'Metrics'
|
||||
|
||||
renderer_classes = [renderers.PlainTextRenderer,
|
||||
renderers.PrometheusJSONRenderer,
|
||||
renderers.BrowsableAPIRenderer,]
|
||||
renderer_classes = [renderers.PlainTextRenderer, renderers.PrometheusJSONRenderer, renderers.BrowsableAPIRenderer]
|
||||
|
||||
def get(self, request):
|
||||
''' Show Metrics Details '''
|
||||
if (request.user.is_superuser or request.user.is_system_auditor):
|
||||
return Response(metrics().decode('UTF-8'))
|
||||
if request.user.is_superuser or request.user.is_system_auditor:
|
||||
metrics_to_show = ''
|
||||
if not request.query_params.get('subsystemonly', "0") == "1":
|
||||
metrics_to_show += metrics().decode('UTF-8')
|
||||
if not request.query_params.get('dbonly', "0") == "1":
|
||||
metrics_to_show += s_metrics.metrics(request)
|
||||
return Response(metrics_to_show)
|
||||
raise PermissionDenied()
|
||||
|
||||
|
||||
@@ -16,14 +16,8 @@ from rest_framework.response import Response
|
||||
from rest_framework import status
|
||||
|
||||
from awx.main.constants import ACTIVE_STATES
|
||||
from awx.main.utils import (
|
||||
get_object_or_400,
|
||||
parse_yaml_or_json,
|
||||
)
|
||||
from awx.main.models.ha import (
|
||||
Instance,
|
||||
InstanceGroup,
|
||||
)
|
||||
from awx.main.utils import get_object_or_400, parse_yaml_or_json
|
||||
from awx.main.models.ha import Instance, InstanceGroup
|
||||
from awx.main.models.organization import Team
|
||||
from awx.main.models.projects import Project
|
||||
from awx.main.models.inventory import Inventory
|
||||
@@ -34,9 +28,10 @@ logger = logging.getLogger('awx.api.views.mixin')
|
||||
|
||||
|
||||
class UnifiedJobDeletionMixin(object):
|
||||
'''
|
||||
"""
|
||||
Special handling when deleting a running unified job object.
|
||||
'''
|
||||
"""
|
||||
|
||||
def destroy(self, request, *args, **kwargs):
|
||||
obj = self.get_object()
|
||||
if not request.user.can_access(self.model, 'delete', obj):
|
||||
@@ -53,22 +48,21 @@ class UnifiedJobDeletionMixin(object):
|
||||
# Prohibit deletion if job events are still coming in
|
||||
if obj.finished and now() < obj.finished + dateutil.relativedelta.relativedelta(minutes=1):
|
||||
# less than 1 minute has passed since job finished and events are not in
|
||||
return Response({"error": _("Job has not finished processing events.")},
|
||||
status=status.HTTP_400_BAD_REQUEST)
|
||||
return Response({"error": _("Job has not finished processing events.")}, status=status.HTTP_400_BAD_REQUEST)
|
||||
else:
|
||||
# if it has been > 1 minute, events are probably lost
|
||||
logger.warning('Allowing deletion of {} through the API without all events '
|
||||
'processed.'.format(obj.log_format))
|
||||
logger.warning('Allowing deletion of {} through the API without all events ' 'processed.'.format(obj.log_format))
|
||||
obj.delete()
|
||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||
|
||||
|
||||
class InstanceGroupMembershipMixin(object):
|
||||
'''
|
||||
"""
|
||||
This mixin overloads attach/detach so that it calls InstanceGroup.save(),
|
||||
triggering a background recalculation of policy-based instance group
|
||||
membership.
|
||||
'''
|
||||
"""
|
||||
|
||||
def attach(self, request, *args, **kwargs):
|
||||
response = super(InstanceGroupMembershipMixin, self).attach(request, *args, **kwargs)
|
||||
sub_id, res = self.attach_validate(request)
|
||||
@@ -84,9 +78,7 @@ class InstanceGroupMembershipMixin(object):
|
||||
ig_obj = get_object_or_400(ig_qs, pk=sub_id)
|
||||
else:
|
||||
# similar to get_parent_object, but selected for update
|
||||
parent_filter = {
|
||||
self.lookup_field: self.kwargs.get(self.lookup_field, None),
|
||||
}
|
||||
parent_filter = {self.lookup_field: self.kwargs.get(self.lookup_field, None)}
|
||||
ig_obj = get_object_or_404(ig_qs, **parent_filter)
|
||||
if inst_name not in ig_obj.policy_instance_list:
|
||||
ig_obj.policy_instance_list.append(inst_name)
|
||||
@@ -126,9 +118,7 @@ class InstanceGroupMembershipMixin(object):
|
||||
ig_obj = get_object_or_400(ig_qs, pk=sub_id)
|
||||
else:
|
||||
# similar to get_parent_object, but selected for update
|
||||
parent_filter = {
|
||||
self.lookup_field: self.kwargs.get(self.lookup_field, None),
|
||||
}
|
||||
parent_filter = {self.lookup_field: self.kwargs.get(self.lookup_field, None)}
|
||||
ig_obj = get_object_or_404(ig_qs, **parent_filter)
|
||||
if inst_name in ig_obj.policy_instance_list:
|
||||
ig_obj.policy_instance_list.pop(ig_obj.policy_instance_list.index(inst_name))
|
||||
@@ -146,16 +136,13 @@ class RelatedJobsPreventDeleteMixin(object):
|
||||
if len(active_jobs) > 0:
|
||||
raise ActiveJobConflict(active_jobs)
|
||||
time_cutoff = now() - dateutil.relativedelta.relativedelta(minutes=1)
|
||||
recent_jobs = obj._get_related_jobs().filter(finished__gte = time_cutoff)
|
||||
recent_jobs = obj._get_related_jobs().filter(finished__gte=time_cutoff)
|
||||
for unified_job in recent_jobs.get_real_instances():
|
||||
if not unified_job.event_processing_finished:
|
||||
raise PermissionDenied(_(
|
||||
'Related job {} is still processing events.'
|
||||
).format(unified_job.log_format))
|
||||
raise PermissionDenied(_('Related job {} is still processing events.').format(unified_job.log_format))
|
||||
|
||||
|
||||
class OrganizationCountsMixin(object):
|
||||
|
||||
def get_serializer_context(self, *args, **kwargs):
|
||||
full_context = super(OrganizationCountsMixin, self).get_serializer_context(*args, **kwargs)
|
||||
|
||||
@@ -177,26 +164,23 @@ class OrganizationCountsMixin(object):
|
||||
# Produce counts of Foreign Key relationships
|
||||
db_results['inventories'] = inv_qs.values('organization').annotate(Count('organization')).order_by('organization')
|
||||
|
||||
db_results['teams'] = Team.accessible_objects(
|
||||
self.request.user, 'read_role').values('organization').annotate(
|
||||
Count('organization')).order_by('organization')
|
||||
db_results['teams'] = (
|
||||
Team.accessible_objects(self.request.user, 'read_role').values('organization').annotate(Count('organization')).order_by('organization')
|
||||
)
|
||||
|
||||
db_results['job_templates'] = jt_qs.values('organization').annotate(Count('organization')).order_by('organization')
|
||||
|
||||
db_results['projects'] = project_qs.values('organization').annotate(Count('organization')).order_by('organization')
|
||||
|
||||
# Other members and admins of organization are always viewable
|
||||
db_results['users'] = org_qs.annotate(
|
||||
users=Count('member_role__members', distinct=True),
|
||||
admins=Count('admin_role__members', distinct=True)
|
||||
).values('id', 'users', 'admins')
|
||||
db_results['users'] = org_qs.annotate(users=Count('member_role__members', distinct=True), admins=Count('admin_role__members', distinct=True)).values(
|
||||
'id', 'users', 'admins'
|
||||
)
|
||||
|
||||
count_context = {}
|
||||
for org in org_id_list:
|
||||
org_id = org['id']
|
||||
count_context[org_id] = {
|
||||
'inventories': 0, 'teams': 0, 'users': 0, 'job_templates': 0,
|
||||
'admins': 0, 'projects': 0}
|
||||
count_context[org_id] = {'inventories': 0, 'teams': 0, 'users': 0, 'job_templates': 0, 'admins': 0, 'projects': 0}
|
||||
|
||||
for res, count_qs in db_results.items():
|
||||
if res == 'users':
|
||||
@@ -218,21 +202,20 @@ class OrganizationCountsMixin(object):
|
||||
|
||||
|
||||
class ControlledByScmMixin(object):
|
||||
'''
|
||||
"""
|
||||
Special method to reset SCM inventory commit hash
|
||||
if anything that it manages changes.
|
||||
'''
|
||||
"""
|
||||
|
||||
def _reset_inv_src_rev(self, obj):
|
||||
if self.request.method in SAFE_METHODS or not obj:
|
||||
return
|
||||
project_following_sources = obj.inventory_sources.filter(
|
||||
update_on_project_update=True, source='scm')
|
||||
project_following_sources = obj.inventory_sources.filter(update_on_project_update=True, source='scm')
|
||||
if project_following_sources:
|
||||
# Allow inventory changes unrelated to variables
|
||||
if self.model == Inventory and (
|
||||
not self.request or not self.request.data or
|
||||
parse_yaml_or_json(self.request.data.get('variables', '')) == parse_yaml_or_json(obj.variables)):
|
||||
not self.request or not self.request.data or parse_yaml_or_json(self.request.data.get('variables', '')) == parse_yaml_or_json(obj.variables)
|
||||
):
|
||||
return
|
||||
project_following_sources.update(scm_last_revision='')
|
||||
|
||||
|
||||
@@ -13,7 +13,9 @@ from django.utils.translation import ugettext_lazy as _
|
||||
from awx.main.models import (
|
||||
ActivityStream,
|
||||
Inventory,
|
||||
Host,
|
||||
Project,
|
||||
ExecutionEnvironment,
|
||||
JobTemplate,
|
||||
WorkflowJobTemplate,
|
||||
Organization,
|
||||
@@ -22,7 +24,7 @@ from awx.main.models import (
|
||||
User,
|
||||
Team,
|
||||
InstanceGroup,
|
||||
Credential
|
||||
Credential,
|
||||
)
|
||||
from awx.api.generics import (
|
||||
ListCreateAPIView,
|
||||
@@ -44,13 +46,13 @@ from awx.api.serializers import (
|
||||
RoleSerializer,
|
||||
NotificationTemplateSerializer,
|
||||
InstanceGroupSerializer,
|
||||
ProjectSerializer, JobTemplateSerializer, WorkflowJobTemplateSerializer,
|
||||
CredentialSerializer
|
||||
)
|
||||
from awx.api.views.mixin import (
|
||||
RelatedJobsPreventDeleteMixin,
|
||||
OrganizationCountsMixin,
|
||||
ExecutionEnvironmentSerializer,
|
||||
ProjectSerializer,
|
||||
JobTemplateSerializer,
|
||||
WorkflowJobTemplateSerializer,
|
||||
CredentialSerializer,
|
||||
)
|
||||
from awx.api.views.mixin import RelatedJobsPreventDeleteMixin, OrganizationCountsMixin
|
||||
|
||||
logger = logging.getLogger('awx.api.views.organization')
|
||||
|
||||
@@ -81,23 +83,21 @@ class OrganizationDetail(RelatedJobsPreventDeleteMixin, RetrieveUpdateDestroyAPI
|
||||
|
||||
org_counts = {}
|
||||
access_kwargs = {'accessor': self.request.user, 'role_field': 'read_role'}
|
||||
direct_counts = Organization.objects.filter(id=org_id).annotate(
|
||||
users=Count('member_role__members', distinct=True),
|
||||
admins=Count('admin_role__members', distinct=True)
|
||||
).values('users', 'admins')
|
||||
direct_counts = (
|
||||
Organization.objects.filter(id=org_id)
|
||||
.annotate(users=Count('member_role__members', distinct=True), admins=Count('admin_role__members', distinct=True))
|
||||
.values('users', 'admins')
|
||||
)
|
||||
|
||||
if not direct_counts:
|
||||
return full_context
|
||||
|
||||
org_counts = direct_counts[0]
|
||||
org_counts['inventories'] = Inventory.accessible_objects(**access_kwargs).filter(
|
||||
organization__id=org_id).count()
|
||||
org_counts['teams'] = Team.accessible_objects(**access_kwargs).filter(
|
||||
organization__id=org_id).count()
|
||||
org_counts['projects'] = Project.accessible_objects(**access_kwargs).filter(
|
||||
organization__id=org_id).count()
|
||||
org_counts['job_templates'] = JobTemplate.accessible_objects(**access_kwargs).filter(
|
||||
organization__id=org_id).count()
|
||||
org_counts['inventories'] = Inventory.accessible_objects(**access_kwargs).filter(organization__id=org_id).count()
|
||||
org_counts['teams'] = Team.accessible_objects(**access_kwargs).filter(organization__id=org_id).count()
|
||||
org_counts['projects'] = Project.accessible_objects(**access_kwargs).filter(organization__id=org_id).count()
|
||||
org_counts['job_templates'] = JobTemplate.accessible_objects(**access_kwargs).filter(organization__id=org_id).count()
|
||||
org_counts['hosts'] = Host.objects.org_active_count(org_id)
|
||||
|
||||
full_context['related_field_counts'] = {}
|
||||
full_context['related_field_counts'][org_id] = org_counts
|
||||
@@ -139,6 +139,16 @@ class OrganizationProjectsList(SubListCreateAPIView):
|
||||
parent_key = 'organization'
|
||||
|
||||
|
||||
class OrganizationExecutionEnvironmentsList(SubListCreateAttachDetachAPIView):
|
||||
|
||||
model = ExecutionEnvironment
|
||||
serializer_class = ExecutionEnvironmentSerializer
|
||||
parent_model = Organization
|
||||
relationship = 'executionenvironments'
|
||||
parent_key = 'organization'
|
||||
swagger_topic = "Execution Environments"
|
||||
|
||||
|
||||
class OrganizationJobTemplatesList(SubListCreateAPIView):
|
||||
|
||||
model = JobTemplate
|
||||
@@ -226,14 +236,12 @@ class OrganizationGalaxyCredentialsList(SubListAttachDetachAPIView):
|
||||
|
||||
def is_valid_relation(self, parent, sub, created=False):
|
||||
if sub.kind != 'galaxy_api_token':
|
||||
return {'msg': _(
|
||||
f"Credential must be a Galaxy credential, not {sub.credential_type.name}."
|
||||
)}
|
||||
return {'msg': _(f"Credential must be a Galaxy credential, not {sub.credential_type.name}.")}
|
||||
|
||||
|
||||
class OrganizationAccessList(ResourceAccessList):
|
||||
|
||||
model = User # needs to be User for AccessLists's
|
||||
model = User # needs to be User for AccessLists's
|
||||
parent_model = Organization
|
||||
|
||||
|
||||
@@ -242,7 +250,7 @@ class OrganizationObjectRolesList(SubListAPIView):
|
||||
model = Role
|
||||
serializer_class = RoleSerializer
|
||||
parent_model = Organization
|
||||
search_fields = ('role_field', 'content_type__model',)
|
||||
search_fields = ('role_field', 'content_type__model')
|
||||
|
||||
def get_queryset(self):
|
||||
po = self.get_parent_object()
|
||||
|
||||
@@ -24,22 +24,11 @@ from awx.api.generics import APIView
|
||||
from awx.conf.registry import settings_registry
|
||||
from awx.main.analytics import all_collectors
|
||||
from awx.main.ha import is_ha_environment
|
||||
from awx.main.utils import (
|
||||
get_awx_version,
|
||||
get_ansible_version,
|
||||
get_custom_venv_choices,
|
||||
to_python_boolean,
|
||||
)
|
||||
from awx.main.utils import get_awx_version, get_custom_venv_choices, to_python_boolean
|
||||
from awx.main.utils.licensing import validate_entitlement_manifest
|
||||
from awx.api.versioning import reverse, drf_reverse
|
||||
from awx.main.constants import PRIVILEGE_ESCALATION_METHODS
|
||||
from awx.main.models import (
|
||||
Project,
|
||||
Organization,
|
||||
Instance,
|
||||
InstanceGroup,
|
||||
JobTemplate,
|
||||
)
|
||||
from awx.main.models import Project, Organization, Instance, InstanceGroup, JobTemplate
|
||||
from awx.main.utils import set_environ
|
||||
|
||||
logger = logging.getLogger('awx.api.views.root')
|
||||
@@ -60,7 +49,7 @@ class ApiRootView(APIView):
|
||||
data = OrderedDict()
|
||||
data['description'] = _('AWX REST API')
|
||||
data['current_version'] = v2
|
||||
data['available_versions'] = dict(v2 = v2)
|
||||
data['available_versions'] = dict(v2=v2)
|
||||
data['oauth2'] = drf_reverse('api:oauth_authorization_root_view')
|
||||
data['custom_logo'] = settings.CUSTOM_LOGO
|
||||
data['custom_login_info'] = settings.CUSTOM_LOGIN_INFO
|
||||
@@ -100,6 +89,7 @@ class ApiVersionRootView(APIView):
|
||||
data['dashboard'] = reverse('api:dashboard_view', request=request)
|
||||
data['organizations'] = reverse('api:organization_list', request=request)
|
||||
data['users'] = reverse('api:user_list', request=request)
|
||||
data['execution_environments'] = reverse('api:execution_environment_list', request=request)
|
||||
data['projects'] = reverse('api:project_list', request=request)
|
||||
data['project_updates'] = reverse('api:project_update_list', request=request)
|
||||
data['teams'] = reverse('api:team_list', request=request)
|
||||
@@ -145,6 +135,7 @@ class ApiV2PingView(APIView):
|
||||
"""A simple view that reports very basic information about this
|
||||
instance, which is acceptable to be public information.
|
||||
"""
|
||||
|
||||
permission_classes = (AllowAny,)
|
||||
authentication_classes = ()
|
||||
name = _('Ping')
|
||||
@@ -156,23 +147,19 @@ class ApiV2PingView(APIView):
|
||||
Everything returned here should be considered public / insecure, as
|
||||
this requires no auth and is intended for use by the installer process.
|
||||
"""
|
||||
response = {
|
||||
'ha': is_ha_environment(),
|
||||
'version': get_awx_version(),
|
||||
'active_node': settings.CLUSTER_HOST_ID,
|
||||
'install_uuid': settings.INSTALL_UUID,
|
||||
}
|
||||
response = {'ha': is_ha_environment(), 'version': get_awx_version(), 'active_node': settings.CLUSTER_HOST_ID, 'install_uuid': settings.INSTALL_UUID}
|
||||
|
||||
response['instances'] = []
|
||||
for instance in Instance.objects.all():
|
||||
response['instances'].append(dict(node=instance.hostname, uuid=instance.uuid, heartbeat=instance.modified,
|
||||
capacity=instance.capacity, version=instance.version))
|
||||
response['instances'].append(
|
||||
dict(node=instance.hostname, uuid=instance.uuid, heartbeat=instance.modified, capacity=instance.capacity, version=instance.version)
|
||||
)
|
||||
sorted(response['instances'], key=operator.itemgetter('node'))
|
||||
response['instance_groups'] = []
|
||||
for instance_group in InstanceGroup.objects.prefetch_related('instances'):
|
||||
response['instance_groups'].append(dict(name=instance_group.name,
|
||||
capacity=instance_group.capacity,
|
||||
instances=[x.hostname for x in instance_group.instances.all()]))
|
||||
response['instance_groups'].append(
|
||||
dict(name=instance_group.name, capacity=instance_group.capacity, instances=[x.hostname for x in instance_group.instances.all()])
|
||||
)
|
||||
return Response(response)
|
||||
|
||||
|
||||
@@ -189,6 +176,7 @@ class ApiV2SubscriptionView(APIView):
|
||||
|
||||
def post(self, request):
|
||||
from awx.main.utils.common import get_licenser
|
||||
|
||||
data = request.data.copy()
|
||||
if data.get('subscriptions_password') == '$encrypted$':
|
||||
data['subscriptions_password'] = settings.SUBSCRIPTIONS_PASSWORD
|
||||
@@ -202,10 +190,7 @@ class ApiV2SubscriptionView(APIView):
|
||||
settings.SUBSCRIPTIONS_PASSWORD = data['subscriptions_password']
|
||||
except Exception as exc:
|
||||
msg = _("Invalid Subscription")
|
||||
if (
|
||||
isinstance(exc, requests.exceptions.HTTPError) and
|
||||
getattr(getattr(exc, 'response', None), 'status_code', None) == 401
|
||||
):
|
||||
if isinstance(exc, requests.exceptions.HTTPError) and getattr(getattr(exc, 'response', None), 'status_code', None) == 401:
|
||||
msg = _("The provided credentials are invalid (HTTP 401).")
|
||||
elif isinstance(exc, requests.exceptions.ProxyError):
|
||||
msg = _("Unable to connect to proxy server.")
|
||||
@@ -214,8 +199,7 @@ class ApiV2SubscriptionView(APIView):
|
||||
elif isinstance(exc, (ValueError, OSError)) and exc.args:
|
||||
msg = exc.args[0]
|
||||
else:
|
||||
logger.exception(smart_text(u"Invalid subscription submitted."),
|
||||
extra=dict(actor=request.user.username))
|
||||
logger.exception(smart_text(u"Invalid subscription submitted."), extra=dict(actor=request.user.username))
|
||||
return Response({"error": msg}, status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
return Response(validated)
|
||||
@@ -241,16 +225,14 @@ class ApiV2AttachView(APIView):
|
||||
pw = getattr(settings, 'SUBSCRIPTIONS_PASSWORD', None)
|
||||
if pool_id and user and pw:
|
||||
from awx.main.utils.common import get_licenser
|
||||
|
||||
data = request.data.copy()
|
||||
try:
|
||||
with set_environ(**settings.AWX_TASK_ENV):
|
||||
validated = get_licenser().validate_rh(user, pw)
|
||||
except Exception as exc:
|
||||
msg = _("Invalid Subscription")
|
||||
if (
|
||||
isinstance(exc, requests.exceptions.HTTPError) and
|
||||
getattr(getattr(exc, 'response', None), 'status_code', None) == 401
|
||||
):
|
||||
if isinstance(exc, requests.exceptions.HTTPError) and getattr(getattr(exc, 'response', None), 'status_code', None) == 401:
|
||||
msg = _("The provided credentials are invalid (HTTP 401).")
|
||||
elif isinstance(exc, requests.exceptions.ProxyError):
|
||||
msg = _("Unable to connect to proxy server.")
|
||||
@@ -259,8 +241,7 @@ class ApiV2AttachView(APIView):
|
||||
elif isinstance(exc, (ValueError, OSError)) and exc.args:
|
||||
msg = exc.args[0]
|
||||
else:
|
||||
logger.exception(smart_text(u"Invalid subscription submitted."),
|
||||
extra=dict(actor=request.user.username))
|
||||
logger.exception(smart_text(u"Invalid subscription submitted."), extra=dict(actor=request.user.username))
|
||||
return Response({"error": msg}, status=status.HTTP_400_BAD_REQUEST)
|
||||
for sub in validated:
|
||||
if sub['pool_id'] == pool_id:
|
||||
@@ -286,6 +267,7 @@ class ApiV2ConfigView(APIView):
|
||||
'''Return various sitewide configuration settings'''
|
||||
|
||||
from awx.main.utils.common import get_licenser
|
||||
|
||||
license_data = get_licenser().validate()
|
||||
|
||||
if not license_data.get('valid_key', False):
|
||||
@@ -297,7 +279,6 @@ class ApiV2ConfigView(APIView):
|
||||
time_zone=settings.TIME_ZONE,
|
||||
license_info=license_data,
|
||||
version=get_awx_version(),
|
||||
ansible_version=get_ansible_version(),
|
||||
eula=render_to_string("eula.md") if license_data.get('license_type', 'UNLICENSED') != 'open' else '',
|
||||
analytics_status=pendo_state,
|
||||
analytics_collectors=all_collectors(),
|
||||
@@ -313,22 +294,23 @@ class ApiV2ConfigView(APIView):
|
||||
user_ldap_fields.extend(getattr(settings, 'AUTH_LDAP_USER_FLAGS_BY_GROUP', {}).keys())
|
||||
data['user_ldap_fields'] = user_ldap_fields
|
||||
|
||||
if request.user.is_superuser \
|
||||
or request.user.is_system_auditor \
|
||||
or Organization.accessible_objects(request.user, 'admin_role').exists() \
|
||||
or Organization.accessible_objects(request.user, 'auditor_role').exists() \
|
||||
or Organization.accessible_objects(request.user, 'project_admin_role').exists():
|
||||
data.update(dict(
|
||||
project_base_dir = settings.PROJECTS_ROOT,
|
||||
project_local_paths = Project.get_local_path_choices(),
|
||||
custom_virtualenvs = get_custom_venv_choices()
|
||||
))
|
||||
if (
|
||||
request.user.is_superuser
|
||||
or request.user.is_system_auditor
|
||||
or Organization.accessible_objects(request.user, 'admin_role').exists()
|
||||
or Organization.accessible_objects(request.user, 'auditor_role').exists()
|
||||
or Organization.accessible_objects(request.user, 'project_admin_role').exists()
|
||||
):
|
||||
data.update(
|
||||
dict(
|
||||
project_base_dir=settings.PROJECTS_ROOT, project_local_paths=Project.get_local_path_choices(), custom_virtualenvs=get_custom_venv_choices()
|
||||
)
|
||||
)
|
||||
elif JobTemplate.accessible_objects(request.user, 'admin_role').exists():
|
||||
data['custom_virtualenvs'] = get_custom_venv_choices()
|
||||
|
||||
return Response(data)
|
||||
|
||||
|
||||
def post(self, request):
|
||||
if not isinstance(request.data, dict):
|
||||
return Response({"error": _("Invalid subscription data")}, status=status.HTTP_400_BAD_REQUEST)
|
||||
@@ -345,11 +327,11 @@ class ApiV2ConfigView(APIView):
|
||||
try:
|
||||
data_actual = json.dumps(request.data)
|
||||
except Exception:
|
||||
logger.info(smart_text(u"Invalid JSON submitted for license."),
|
||||
extra=dict(actor=request.user.username))
|
||||
logger.info(smart_text(u"Invalid JSON submitted for license."), extra=dict(actor=request.user.username))
|
||||
return Response({"error": _("Invalid JSON")}, status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
from awx.main.utils.common import get_licenser
|
||||
|
||||
license_data = json.loads(data_actual)
|
||||
if 'license_key' in license_data:
|
||||
return Response({"error": _('Legacy license submitted. A subscription manifest is now required.')}, status=status.HTTP_400_BAD_REQUEST)
|
||||
@@ -357,10 +339,7 @@ class ApiV2ConfigView(APIView):
|
||||
try:
|
||||
json_actual = json.loads(base64.b64decode(license_data['manifest']))
|
||||
if 'license_key' in json_actual:
|
||||
return Response(
|
||||
{"error": _('Legacy license submitted. A subscription manifest is now required.')},
|
||||
status=status.HTTP_400_BAD_REQUEST
|
||||
)
|
||||
return Response({"error": _('Legacy license submitted. A subscription manifest is now required.')}, status=status.HTTP_400_BAD_REQUEST)
|
||||
except Exception:
|
||||
pass
|
||||
try:
|
||||
@@ -374,8 +353,7 @@ class ApiV2ConfigView(APIView):
|
||||
try:
|
||||
license_data_validated = get_licenser().license_from_manifest(license_data)
|
||||
except Exception:
|
||||
logger.warning(smart_text(u"Invalid subscription submitted."),
|
||||
extra=dict(actor=request.user.username))
|
||||
logger.warning(smart_text(u"Invalid subscription submitted."), extra=dict(actor=request.user.username))
|
||||
return Response({"error": _("Invalid License")}, status=status.HTTP_400_BAD_REQUEST)
|
||||
else:
|
||||
license_data_validated = get_licenser().validate()
|
||||
@@ -386,8 +364,7 @@ class ApiV2ConfigView(APIView):
|
||||
settings.TOWER_URL_BASE = "{}://{}".format(request.scheme, request.get_host())
|
||||
return Response(license_data_validated)
|
||||
|
||||
logger.warning(smart_text(u"Invalid subscription submitted."),
|
||||
extra=dict(actor=request.user.username))
|
||||
logger.warning(smart_text(u"Invalid subscription submitted."), extra=dict(actor=request.user.username))
|
||||
return Response({"error": _("Invalid subscription")}, status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
def delete(self, request):
|
||||
|
||||
@@ -26,10 +26,7 @@ class WebhookKeyView(GenericAPIView):
|
||||
permission_classes = (WebhookKeyPermission,)
|
||||
|
||||
def get_queryset(self):
|
||||
qs_models = {
|
||||
'job_templates': JobTemplate,
|
||||
'workflow_job_templates': WorkflowJobTemplate,
|
||||
}
|
||||
qs_models = {'job_templates': JobTemplate, 'workflow_job_templates': WorkflowJobTemplate}
|
||||
self.model = qs_models.get(self.kwargs['model_kwarg'])
|
||||
|
||||
return super().get_queryset()
|
||||
@@ -57,10 +54,7 @@ class WebhookReceiverBase(APIView):
|
||||
ref_keys = {}
|
||||
|
||||
def get_queryset(self):
|
||||
qs_models = {
|
||||
'job_templates': JobTemplate,
|
||||
'workflow_job_templates': WorkflowJobTemplate,
|
||||
}
|
||||
qs_models = {'job_templates': JobTemplate, 'workflow_job_templates': WorkflowJobTemplate}
|
||||
model = qs_models.get(self.kwargs['model_kwarg'])
|
||||
if model is None:
|
||||
raise PermissionDenied
|
||||
@@ -120,10 +114,7 @@ class WebhookReceiverBase(APIView):
|
||||
# Ensure that the full contents of the request are captured for multiple uses.
|
||||
request.body
|
||||
|
||||
logger.debug(
|
||||
"headers: {}\n"
|
||||
"data: {}\n".format(request.headers, request.data)
|
||||
)
|
||||
logger.debug("headers: {}\n" "data: {}\n".format(request.headers, request.data))
|
||||
obj = self.get_object()
|
||||
self.check_signature(obj)
|
||||
|
||||
@@ -132,16 +123,11 @@ class WebhookReceiverBase(APIView):
|
||||
event_ref = self.get_event_ref()
|
||||
status_api = self.get_event_status_api()
|
||||
|
||||
kwargs = {
|
||||
'unified_job_template_id': obj.id,
|
||||
'webhook_service': obj.webhook_service,
|
||||
'webhook_guid': event_guid,
|
||||
}
|
||||
kwargs = {'unified_job_template_id': obj.id, 'webhook_service': obj.webhook_service, 'webhook_guid': event_guid}
|
||||
if WorkflowJob.objects.filter(**kwargs).exists() or Job.objects.filter(**kwargs).exists():
|
||||
# Short circuit if this webhook has already been received and acted upon.
|
||||
logger.debug("Webhook previously received, returning without action.")
|
||||
return Response({'message': _("Webhook previously received, aborting.")},
|
||||
status=status.HTTP_202_ACCEPTED)
|
||||
return Response({'message': _("Webhook previously received, aborting.")}, status=status.HTTP_202_ACCEPTED)
|
||||
|
||||
kwargs = {
|
||||
'_eager_fields': {
|
||||
@@ -156,7 +142,7 @@ class WebhookReceiverBase(APIView):
|
||||
'tower_webhook_event_ref': event_ref,
|
||||
'tower_webhook_status_api': status_api,
|
||||
'tower_webhook_payload': request.data,
|
||||
}
|
||||
},
|
||||
}
|
||||
|
||||
new_job = obj.create_unified_job(**kwargs)
|
||||
@@ -205,11 +191,7 @@ class GithubWebhookReceiver(WebhookReceiverBase):
|
||||
class GitlabWebhookReceiver(WebhookReceiverBase):
|
||||
service = 'gitlab'
|
||||
|
||||
ref_keys = {
|
||||
'Push Hook': 'checkout_sha',
|
||||
'Tag Push Hook': 'checkout_sha',
|
||||
'Merge Request Hook': 'object_attributes.last_commit.id',
|
||||
}
|
||||
ref_keys = {'Push Hook': 'checkout_sha', 'Tag Push Hook': 'checkout_sha', 'Merge Request Hook': 'object_attributes.last_commit.id'}
|
||||
|
||||
def get_event_type(self):
|
||||
return self.request.META.get('HTTP_X_GITLAB_EVENT')
|
||||
@@ -229,8 +211,7 @@ class GitlabWebhookReceiver(WebhookReceiverBase):
|
||||
return
|
||||
parsed = urllib.parse.urlparse(repo_url)
|
||||
|
||||
return "{}://{}/api/v4/projects/{}/statuses/{}".format(
|
||||
parsed.scheme, parsed.netloc, project['id'], self.get_event_ref())
|
||||
return "{}://{}/api/v4/projects/{}/statuses/{}".format(parsed.scheme, parsed.netloc, project['id'], self.get_event_ref())
|
||||
|
||||
def get_signature(self):
|
||||
return force_bytes(self.request.META.get('HTTP_X_GITLAB_TOKEN') or '')
|
||||
|
||||
@@ -4,11 +4,12 @@ import os
|
||||
import logging
|
||||
import django
|
||||
from awx import __version__ as tower_version
|
||||
|
||||
# Prepare the AWX environment.
|
||||
from awx import prepare_env, MODE
|
||||
from channels.routing import get_default_application # noqa
|
||||
prepare_env() # NOQA
|
||||
|
||||
prepare_env() # NOQA
|
||||
|
||||
|
||||
"""
|
||||
|
||||
@@ -10,12 +10,12 @@ from awx.conf.models import Setting
|
||||
|
||||
|
||||
class SettingAccess(BaseAccess):
|
||||
'''
|
||||
"""
|
||||
- I can see settings when I am a super user or system auditor.
|
||||
- I can edit settings when I am a super user.
|
||||
- I can clear settings when I am a super user.
|
||||
- I can always see/edit/clear my own user settings.
|
||||
'''
|
||||
"""
|
||||
|
||||
model = Setting
|
||||
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
# Django
|
||||
from django.apps import AppConfig
|
||||
|
||||
# from django.core import checks
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
|
||||
@@ -12,4 +13,5 @@ class ConfConfig(AppConfig):
|
||||
def ready(self):
|
||||
self.module.autodiscover()
|
||||
from .settings import SettingsWrapper
|
||||
|
||||
SettingsWrapper.initialize()
|
||||
|
||||
@@ -10,10 +10,8 @@ from django.core.validators import URLValidator, _lazy_re_compile
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
|
||||
# Django REST Framework
|
||||
from rest_framework.fields import ( # noqa
|
||||
BooleanField, CharField, ChoiceField, DictField, DateTimeField, EmailField,
|
||||
IntegerField, ListField, NullBooleanField
|
||||
)
|
||||
from rest_framework.fields import BooleanField, CharField, ChoiceField, DictField, DateTimeField, EmailField, IntegerField, ListField, NullBooleanField # noqa
|
||||
from rest_framework.serializers import PrimaryKeyRelatedField # noqa
|
||||
|
||||
logger = logging.getLogger('awx.conf.fields')
|
||||
|
||||
@@ -26,7 +24,6 @@ logger = logging.getLogger('awx.conf.fields')
|
||||
|
||||
|
||||
class CharField(CharField):
|
||||
|
||||
def to_representation(self, value):
|
||||
# django_rest_frameworks' default CharField implementation casts `None`
|
||||
# to a string `"None"`:
|
||||
@@ -38,7 +35,6 @@ class CharField(CharField):
|
||||
|
||||
|
||||
class IntegerField(IntegerField):
|
||||
|
||||
def get_value(self, dictionary):
|
||||
ret = super(IntegerField, self).get_value(dictionary)
|
||||
# Handle UI corner case
|
||||
@@ -59,9 +55,7 @@ class StringListField(ListField):
|
||||
|
||||
class StringListBooleanField(ListField):
|
||||
|
||||
default_error_messages = {
|
||||
'type_error': _('Expected None, True, False, a string or list of strings but got {input_type} instead.'),
|
||||
}
|
||||
default_error_messages = {'type_error': _('Expected None, True, False, a string or list of strings but got {input_type} instead.')}
|
||||
child = CharField()
|
||||
|
||||
def to_representation(self, value):
|
||||
@@ -100,10 +94,7 @@ class StringListBooleanField(ListField):
|
||||
|
||||
class StringListPathField(StringListField):
|
||||
|
||||
default_error_messages = {
|
||||
'type_error': _('Expected list of strings but got {input_type} instead.'),
|
||||
'path_error': _('{path} is not a valid path choice.'),
|
||||
}
|
||||
default_error_messages = {'type_error': _('Expected list of strings but got {input_type} instead.'), 'path_error': _('{path} is not a valid path choice.')}
|
||||
|
||||
def to_internal_value(self, paths):
|
||||
if isinstance(paths, (list, tuple)):
|
||||
@@ -122,12 +113,12 @@ class URLField(CharField):
|
||||
# these lines set up a custom regex that allow numbers in the
|
||||
# top-level domain
|
||||
tld_re = (
|
||||
r'\.' # dot
|
||||
r'(?!-)' # can't start with a dash
|
||||
r'(?:[a-z' + URLValidator.ul + r'0-9' + '-]{2,63}' # domain label, this line was changed from the original URLValidator
|
||||
r'|xn--[a-z0-9]{1,59})' # or punycode label
|
||||
r'(?<!-)' # can't end with a dash
|
||||
r'\.?' # may have a trailing dot
|
||||
r'\.' # dot
|
||||
r'(?!-)' # can't start with a dash
|
||||
r'(?:[a-z' + URLValidator.ul + r'0-9' + '-]{2,63}' # domain label, this line was changed from the original URLValidator
|
||||
r'|xn--[a-z0-9]{1,59})' # or punycode label
|
||||
r'(?<!-)' # can't end with a dash
|
||||
r'\.?' # may have a trailing dot
|
||||
)
|
||||
|
||||
host_re = '(' + URLValidator.hostname_re + URLValidator.domain_re + tld_re + '|localhost)'
|
||||
@@ -138,7 +129,9 @@ class URLField(CharField):
|
||||
r'(?:' + URLValidator.ipv4_re + '|' + URLValidator.ipv6_re + '|' + host_re + ')'
|
||||
r'(?::\d{2,5})?' # port
|
||||
r'(?:[/?#][^\s]*)?' # resource path
|
||||
r'\Z', re.IGNORECASE)
|
||||
r'\Z',
|
||||
re.IGNORECASE,
|
||||
)
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
schemes = kwargs.pop('schemes', None)
|
||||
@@ -183,9 +176,7 @@ class URLField(CharField):
|
||||
|
||||
class KeyValueField(DictField):
|
||||
child = CharField()
|
||||
default_error_messages = {
|
||||
'invalid_child': _('"{input}" is not a valid string.')
|
||||
}
|
||||
default_error_messages = {'invalid_child': _('"{input}" is not a valid string.')}
|
||||
|
||||
def to_internal_value(self, data):
|
||||
ret = super(KeyValueField, self).to_internal_value(data)
|
||||
@@ -198,9 +189,7 @@ class KeyValueField(DictField):
|
||||
|
||||
|
||||
class ListTuplesField(ListField):
|
||||
default_error_messages = {
|
||||
'type_error': _('Expected a list of tuples of max length 2 but got {input_type} instead.'),
|
||||
}
|
||||
default_error_messages = {'type_error': _('Expected a list of tuples of max length 2 but got {input_type} instead.')}
|
||||
|
||||
def to_representation(self, value):
|
||||
if isinstance(value, (list, tuple)):
|
||||
|
||||
@@ -6,6 +6,7 @@ __all__ = ['get_license']
|
||||
|
||||
def _get_validated_license_data():
|
||||
from awx.main.utils import get_licenser
|
||||
|
||||
return get_licenser().validate()
|
||||
|
||||
|
||||
|
||||
@@ -8,9 +8,7 @@ from django.conf import settings
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
|
||||
]
|
||||
dependencies = [migrations.swappable_dependency(settings.AUTH_USER_MODEL)]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
@@ -21,11 +19,11 @@ class Migration(migrations.Migration):
|
||||
('modified', models.DateTimeField(default=None, editable=False)),
|
||||
('key', models.CharField(max_length=255)),
|
||||
('value', jsonfield.fields.JSONField(null=True)),
|
||||
('user', models.ForeignKey(related_name='settings', default=None, editable=False,
|
||||
to=settings.AUTH_USER_MODEL, on_delete=models.CASCADE, null=True)),
|
||||
(
|
||||
'user',
|
||||
models.ForeignKey(related_name='settings', default=None, editable=False, to=settings.AUTH_USER_MODEL, on_delete=models.CASCADE, null=True),
|
||||
),
|
||||
],
|
||||
options={
|
||||
'abstract': False,
|
||||
},
|
||||
),
|
||||
options={'abstract': False},
|
||||
)
|
||||
]
|
||||
|
||||
@@ -15,11 +15,7 @@ def copy_tower_settings(apps, schema_editor):
|
||||
if tower_setting.key == 'LICENSE':
|
||||
value = json.loads(value)
|
||||
setting, created = Setting.objects.get_or_create(
|
||||
key=tower_setting.key,
|
||||
user=tower_setting.user,
|
||||
created=tower_setting.created,
|
||||
modified=tower_setting.modified,
|
||||
defaults=dict(value=value),
|
||||
key=tower_setting.key, user=tower_setting.user, created=tower_setting.created, modified=tower_setting.modified, defaults=dict(value=value)
|
||||
)
|
||||
if not created and setting.value != value:
|
||||
setting.value = value
|
||||
@@ -36,18 +32,9 @@ def revert_tower_settings(apps, schema_editor):
|
||||
# LICENSE is stored as a JSON object; convert it back to a string.
|
||||
if setting.key == 'LICENSE':
|
||||
value = json.dumps(value)
|
||||
defaults = dict(
|
||||
value=value,
|
||||
value_type='string',
|
||||
description='',
|
||||
category='',
|
||||
)
|
||||
defaults = dict(value=value, value_type='string', description='', category='')
|
||||
try:
|
||||
tower_setting, created = TowerSettings.objects.get_or_create(
|
||||
key=setting.key,
|
||||
user=setting.user,
|
||||
defaults=defaults,
|
||||
)
|
||||
tower_setting, created = TowerSettings.objects.get_or_create(key=setting.key, user=setting.user, defaults=defaults)
|
||||
if not created:
|
||||
update_fields = []
|
||||
for k, v in defaults.items():
|
||||
@@ -62,15 +49,8 @@ def revert_tower_settings(apps, schema_editor):
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('conf', '0001_initial'),
|
||||
('main', '0004_squashed_v310_release'),
|
||||
]
|
||||
dependencies = [('conf', '0001_initial'), ('main', '0004_squashed_v310_release')]
|
||||
|
||||
run_before = [
|
||||
('main', '0005_squashed_v310_v313_updates'),
|
||||
]
|
||||
run_before = [('main', '0005_squashed_v310_v313_updates')]
|
||||
|
||||
operations = [
|
||||
migrations.RunPython(copy_tower_settings, revert_tower_settings),
|
||||
]
|
||||
operations = [migrations.RunPython(copy_tower_settings, revert_tower_settings)]
|
||||
|
||||
@@ -7,14 +7,6 @@ import awx.main.fields
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('conf', '0002_v310_copy_tower_settings'),
|
||||
]
|
||||
dependencies = [('conf', '0002_v310_copy_tower_settings')]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name='setting',
|
||||
name='value',
|
||||
field=awx.main.fields.JSONField(null=True),
|
||||
),
|
||||
]
|
||||
operations = [migrations.AlterField(model_name='setting', name='value', field=awx.main.fields.JSONField(null=True))]
|
||||
|
||||
@@ -6,9 +6,7 @@ from django.db import migrations
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('conf', '0003_v310_JSONField_changes'),
|
||||
]
|
||||
dependencies = [('conf', '0003_v310_JSONField_changes')]
|
||||
|
||||
operations = [
|
||||
# This list is intentionally empty.
|
||||
|
||||
@@ -2,8 +2,8 @@
|
||||
from __future__ import unicode_literals
|
||||
from django.db import migrations
|
||||
from awx.conf.migrations import _rename_setting
|
||||
|
||||
|
||||
|
||||
|
||||
def copy_session_settings(apps, schema_editor):
|
||||
_rename_setting.rename_setting(apps, schema_editor, old_key='AUTH_TOKEN_PER_USER', new_key='SESSIONS_PER_USER')
|
||||
_rename_setting.rename_setting(apps, schema_editor, old_key='AUTH_TOKEN_EXPIRATION', new_key='SESSION_COOKIE_AGE')
|
||||
@@ -16,11 +16,6 @@ def reverse_copy_session_settings(apps, schema_editor):
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('conf', '0004_v320_reencrypt'),
|
||||
]
|
||||
dependencies = [('conf', '0004_v320_reencrypt')]
|
||||
|
||||
operations = [
|
||||
migrations.RunPython(copy_session_settings, reverse_copy_session_settings),
|
||||
]
|
||||
|
||||
operations = [migrations.RunPython(copy_session_settings, reverse_copy_session_settings)]
|
||||
|
||||
@@ -9,10 +9,6 @@ from django.db import migrations
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('conf', '0005_v330_rename_two_session_settings'),
|
||||
]
|
||||
dependencies = [('conf', '0005_v330_rename_two_session_settings')]
|
||||
|
||||
operations = [
|
||||
migrations.RunPython(fill_ldap_group_type_params),
|
||||
]
|
||||
operations = [migrations.RunPython(fill_ldap_group_type_params)]
|
||||
|
||||
@@ -10,10 +10,6 @@ def copy_allowed_ips(apps, schema_editor):
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('conf', '0006_v331_ldap_group_type'),
|
||||
]
|
||||
dependencies = [('conf', '0006_v331_ldap_group_type')]
|
||||
|
||||
operations = [
|
||||
migrations.RunPython(copy_allowed_ips),
|
||||
]
|
||||
operations = [migrations.RunPython(copy_allowed_ips)]
|
||||
|
||||
@@ -15,12 +15,6 @@ def _noop(apps, schema_editor):
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('conf', '0007_v380_rename_more_settings'),
|
||||
]
|
||||
dependencies = [('conf', '0007_v380_rename_more_settings')]
|
||||
|
||||
|
||||
operations = [
|
||||
migrations.RunPython(clear_old_license, _noop),
|
||||
migrations.RunPython(prefill_rh_credentials, _noop)
|
||||
]
|
||||
operations = [migrations.RunPython(clear_old_license, _noop), migrations.RunPython(prefill_rh_credentials, _noop)]
|
||||
|
||||
16
awx/conf/migrations/0009_rename_proot_settings.py
Normal file
16
awx/conf/migrations/0009_rename_proot_settings.py
Normal file
@@ -0,0 +1,16 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
from __future__ import unicode_literals
|
||||
from django.db import migrations
|
||||
from awx.conf.migrations import _rename_setting
|
||||
|
||||
|
||||
def rename_proot_settings(apps, schema_editor):
|
||||
_rename_setting.rename_setting(apps, schema_editor, old_key='AWX_PROOT_BASE_PATH', new_key='AWX_ISOLATION_BASE_PATH')
|
||||
_rename_setting.rename_setting(apps, schema_editor, old_key='AWX_PROOT_SHOW_PATHS', new_key='AWX_ISOLATION_SHOW_PATHS')
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [('conf', '0008_subscriptions')]
|
||||
|
||||
operations = [migrations.RunPython(rename_proot_settings)]
|
||||
@@ -1,4 +1,3 @@
|
||||
|
||||
import inspect
|
||||
|
||||
from django.conf import settings
|
||||
@@ -16,10 +15,7 @@ def fill_ldap_group_type_params(apps, schema_editor):
|
||||
entry = qs[0]
|
||||
group_type_params = entry.value
|
||||
else:
|
||||
entry = Setting(key='AUTH_LDAP_GROUP_TYPE_PARAMS',
|
||||
value=group_type_params,
|
||||
created=now(),
|
||||
modified=now())
|
||||
entry = Setting(key='AUTH_LDAP_GROUP_TYPE_PARAMS', value=group_type_params, created=now(), modified=now())
|
||||
|
||||
init_attrs = set(inspect.getargspec(group_type.__init__).args[1:])
|
||||
for k in list(group_type_params.keys()):
|
||||
|
||||
@@ -11,15 +11,16 @@ __all__ = ['get_encryption_key', 'decrypt_field']
|
||||
|
||||
|
||||
def get_encryption_key(field_name, pk=None):
|
||||
'''
|
||||
"""
|
||||
Generate key for encrypted password based on field name,
|
||||
``settings.SECRET_KEY``, and instance pk (if available).
|
||||
|
||||
:param pk: (optional) the primary key of the ``awx.conf.model.Setting``;
|
||||
can be omitted in situations where you're encrypting a setting
|
||||
that is not database-persistent (like a read-only setting)
|
||||
'''
|
||||
"""
|
||||
from django.conf import settings
|
||||
|
||||
h = hashlib.sha1()
|
||||
h.update(settings.SECRET_KEY)
|
||||
if pk is not None:
|
||||
@@ -29,11 +30,11 @@ def get_encryption_key(field_name, pk=None):
|
||||
|
||||
|
||||
def decrypt_value(encryption_key, value):
|
||||
raw_data = value[len('$encrypted$'):]
|
||||
raw_data = value[len('$encrypted$') :]
|
||||
# If the encrypted string contains a UTF8 marker, discard it
|
||||
utf8 = raw_data.startswith('UTF8$')
|
||||
if utf8:
|
||||
raw_data = raw_data[len('UTF8$'):]
|
||||
raw_data = raw_data[len('UTF8$') :]
|
||||
algo, b64data = raw_data.split('$', 1)
|
||||
if algo != 'AES':
|
||||
raise ValueError('unsupported algorithm: %s' % algo)
|
||||
@@ -48,9 +49,9 @@ def decrypt_value(encryption_key, value):
|
||||
|
||||
|
||||
def decrypt_field(instance, field_name, subfield=None):
|
||||
'''
|
||||
"""
|
||||
Return content of the given instance and field name decrypted.
|
||||
'''
|
||||
"""
|
||||
value = getattr(instance, field_name)
|
||||
if isinstance(value, dict) and subfield is not None:
|
||||
value = value[subfield]
|
||||
|
||||
@@ -6,11 +6,11 @@ from django.conf import settings
|
||||
|
||||
logger = logging.getLogger('awx.conf.settings')
|
||||
|
||||
__all__ = ['rename_setting']
|
||||
|
||||
|
||||
__all__ = ['rename_setting']
|
||||
|
||||
|
||||
def rename_setting(apps, schema_editor, old_key, new_key):
|
||||
|
||||
|
||||
old_setting = None
|
||||
Setting = apps.get_model('conf', 'Setting')
|
||||
if Setting.objects.filter(key=new_key).exists() or hasattr(settings, new_key):
|
||||
@@ -24,9 +24,4 @@ def rename_setting(apps, schema_editor, old_key, new_key):
|
||||
if hasattr(settings, old_key):
|
||||
old_setting = getattr(settings, old_key)
|
||||
if old_setting is not None:
|
||||
Setting.objects.create(key=new_key,
|
||||
value=old_setting,
|
||||
created=now(),
|
||||
modified=now()
|
||||
)
|
||||
|
||||
Setting.objects.create(key=new_key, value=old_setting, created=now(), modified=now())
|
||||
|
||||
@@ -6,7 +6,7 @@ from awx.main.utils.encryption import decrypt_field, encrypt_field
|
||||
logger = logging.getLogger('awx.conf.settings')
|
||||
|
||||
__all__ = ['clear_old_license', 'prefill_rh_credentials']
|
||||
|
||||
|
||||
|
||||
def clear_old_license(apps, schema_editor):
|
||||
Setting = apps.get_model('conf', 'Setting')
|
||||
@@ -17,10 +17,7 @@ def _migrate_setting(apps, old_key, new_key, encrypted=False):
|
||||
Setting = apps.get_model('conf', 'Setting')
|
||||
if not Setting.objects.filter(key=old_key).exists():
|
||||
return
|
||||
new_setting = Setting.objects.create(key=new_key,
|
||||
created=now(),
|
||||
modified=now()
|
||||
)
|
||||
new_setting = Setting.objects.create(key=new_key, created=now(), modified=now())
|
||||
if encrypted:
|
||||
new_setting.value = decrypt_field(Setting.objects.filter(key=old_key).first(), 'value')
|
||||
new_setting.value = encrypt_field(new_setting, 'value')
|
||||
|
||||
@@ -18,20 +18,9 @@ __all__ = ['Setting']
|
||||
|
||||
class Setting(CreatedModifiedModel):
|
||||
|
||||
key = models.CharField(
|
||||
max_length=255,
|
||||
)
|
||||
value = JSONField(
|
||||
null=True,
|
||||
)
|
||||
user = prevent_search(models.ForeignKey(
|
||||
'auth.User',
|
||||
related_name='settings',
|
||||
default=None,
|
||||
null=True,
|
||||
editable=False,
|
||||
on_delete=models.CASCADE,
|
||||
))
|
||||
key = models.CharField(max_length=255)
|
||||
value = JSONField(null=True)
|
||||
user = prevent_search(models.ForeignKey('auth.User', related_name='settings', default=None, null=True, editable=False, on_delete=models.CASCADE))
|
||||
|
||||
def __str__(self):
|
||||
try:
|
||||
@@ -66,6 +55,7 @@ class Setting(CreatedModifiedModel):
|
||||
# field and save again.
|
||||
if encrypted and new_instance:
|
||||
from awx.main.signals import disable_activity_stream
|
||||
|
||||
with disable_activity_stream():
|
||||
self.value = self._saved_value
|
||||
self.save(update_fields=['value'])
|
||||
@@ -82,6 +72,7 @@ class Setting(CreatedModifiedModel):
|
||||
import awx.conf.signals # noqa
|
||||
|
||||
from awx.main.registrar import activity_stream_registrar # noqa
|
||||
|
||||
activity_stream_registrar.connect(Setting)
|
||||
|
||||
import awx.conf.access # noqa
|
||||
|
||||
@@ -69,10 +69,7 @@ class SettingsRegistry(object):
|
||||
return self._dependent_settings.get(setting, set())
|
||||
|
||||
def get_registered_categories(self):
|
||||
categories = {
|
||||
'all': _('All'),
|
||||
'changed': _('Changed'),
|
||||
}
|
||||
categories = {'all': _('All'), 'changed': _('Changed')}
|
||||
for setting, kwargs in self._registry.items():
|
||||
category_slug = kwargs.get('category_slug', None)
|
||||
if category_slug is None or category_slug in categories:
|
||||
@@ -95,8 +92,11 @@ class SettingsRegistry(object):
|
||||
continue
|
||||
if kwargs.get('category_slug', None) in slugs_to_ignore:
|
||||
continue
|
||||
if (read_only in {True, False} and kwargs.get('read_only', False) != read_only and
|
||||
setting not in ('INSTALL_UUID', 'AWX_ISOLATED_PRIVATE_KEY', 'AWX_ISOLATED_PUBLIC_KEY')):
|
||||
if (
|
||||
read_only in {True, False}
|
||||
and kwargs.get('read_only', False) != read_only
|
||||
and setting not in ('INSTALL_UUID', 'AWX_ISOLATED_PRIVATE_KEY', 'AWX_ISOLATED_PUBLIC_KEY')
|
||||
):
|
||||
# Note: Doesn't catch fields that set read_only via __init__;
|
||||
# read-only field kwargs should always include read_only=True.
|
||||
continue
|
||||
@@ -117,6 +117,7 @@ class SettingsRegistry(object):
|
||||
|
||||
def get_setting_field(self, setting, mixin_class=None, for_user=False, **kwargs):
|
||||
from rest_framework.fields import empty
|
||||
|
||||
field_kwargs = {}
|
||||
field_kwargs.update(self._registry[setting])
|
||||
field_kwargs.update(kwargs)
|
||||
@@ -141,11 +142,7 @@ class SettingsRegistry(object):
|
||||
field_instance.placeholder = placeholder
|
||||
field_instance.defined_in_file = defined_in_file
|
||||
if field_instance.defined_in_file:
|
||||
field_instance.help_text = (
|
||||
str(_('This value has been set manually in a settings file.')) +
|
||||
'\n\n' +
|
||||
str(field_instance.help_text)
|
||||
)
|
||||
field_instance.help_text = str(_('This value has been set manually in a settings file.')) + '\n\n' + str(field_instance.help_text)
|
||||
field_instance.encrypted = encrypted
|
||||
original_field_instance = field_instance
|
||||
if field_class != original_field_class:
|
||||
|
||||
@@ -30,15 +30,9 @@ class SettingSerializer(BaseSerializer):
|
||||
class SettingCategorySerializer(serializers.Serializer):
|
||||
"""Serialize setting category """
|
||||
|
||||
url = serializers.CharField(
|
||||
read_only=True,
|
||||
)
|
||||
slug = serializers.CharField(
|
||||
read_only=True,
|
||||
)
|
||||
name = serializers.CharField(
|
||||
read_only=True,
|
||||
)
|
||||
url = serializers.CharField(read_only=True)
|
||||
slug = serializers.CharField(read_only=True)
|
||||
name = serializers.CharField(read_only=True)
|
||||
|
||||
|
||||
class SettingFieldMixin(object):
|
||||
|
||||
@@ -62,12 +62,12 @@ __all__ = ['SettingsWrapper', 'get_settings_to_cache', 'SETTING_CACHE_NOTSET']
|
||||
|
||||
@contextlib.contextmanager
|
||||
def _ctit_db_wrapper(trans_safe=False):
|
||||
'''
|
||||
"""
|
||||
Wrapper to avoid undesired actions by Django ORM when managing settings
|
||||
if only getting a setting, can use trans_safe=True, which will avoid
|
||||
throwing errors if the prior context was a broken transaction.
|
||||
Any database errors will be logged, but exception will be suppressed.
|
||||
'''
|
||||
"""
|
||||
rollback_set = None
|
||||
is_atomic = None
|
||||
try:
|
||||
@@ -115,7 +115,6 @@ class TransientSetting(object):
|
||||
|
||||
|
||||
class EncryptedCacheProxy(object):
|
||||
|
||||
def __init__(self, cache, registry, encrypter=None, decrypter=None):
|
||||
"""
|
||||
This proxy wraps a Django cache backend and overwrites the
|
||||
@@ -145,19 +144,11 @@ class EncryptedCacheProxy(object):
|
||||
|
||||
def set(self, key, value, log=True, **kwargs):
|
||||
if log is True:
|
||||
logger.debug('cache set(%r, %r, %r)', key, filter_sensitive(self.registry, key, value),
|
||||
SETTING_CACHE_TIMEOUT)
|
||||
self.cache.set(
|
||||
key,
|
||||
self._handle_encryption(self.encrypter, key, value),
|
||||
**kwargs
|
||||
)
|
||||
logger.debug('cache set(%r, %r, %r)', key, filter_sensitive(self.registry, key, value), SETTING_CACHE_TIMEOUT)
|
||||
self.cache.set(key, self._handle_encryption(self.encrypter, key, value), **kwargs)
|
||||
|
||||
def set_many(self, data, **kwargs):
|
||||
filtered_data = dict(
|
||||
(key, filter_sensitive(self.registry, key, value))
|
||||
for key, value in data.items()
|
||||
)
|
||||
filtered_data = dict((key, filter_sensitive(self.registry, key, value)) for key, value in data.items())
|
||||
logger.debug('cache set_many(%r, %r)', filtered_data, SETTING_CACHE_TIMEOUT)
|
||||
for key, value in data.items():
|
||||
self.set(key, value, log=False, **kwargs)
|
||||
@@ -168,18 +159,11 @@ class EncryptedCacheProxy(object):
|
||||
# as part of the AES key when encrypting/decrypting
|
||||
obj_id = self.cache.get(Setting.get_cache_id_key(key), default=empty)
|
||||
if obj_id is empty:
|
||||
logger.info('Efficiency notice: Corresponding id not stored in cache %s',
|
||||
Setting.get_cache_id_key(key))
|
||||
logger.info('Efficiency notice: Corresponding id not stored in cache %s', Setting.get_cache_id_key(key))
|
||||
obj_id = getattr(self._get_setting_from_db(key), 'pk', None)
|
||||
elif obj_id == SETTING_CACHE_NONE:
|
||||
obj_id = None
|
||||
return method(
|
||||
TransientSetting(
|
||||
pk=obj_id,
|
||||
value=value
|
||||
),
|
||||
'value'
|
||||
)
|
||||
return method(TransientSetting(pk=obj_id, value=value), 'value')
|
||||
|
||||
# If the field in question isn't an "encrypted" field, this function is
|
||||
# a no-op; it just returns the provided value
|
||||
@@ -206,9 +190,9 @@ def get_settings_to_cache(registry):
|
||||
|
||||
|
||||
def get_cache_value(value):
|
||||
'''Returns the proper special cache setting for a value
|
||||
"""Returns the proper special cache setting for a value
|
||||
based on instance type.
|
||||
'''
|
||||
"""
|
||||
if value is None:
|
||||
value = SETTING_CACHE_NONE
|
||||
elif isinstance(value, (list, tuple)) and len(value) == 0:
|
||||
@@ -219,7 +203,6 @@ def get_cache_value(value):
|
||||
|
||||
|
||||
class SettingsWrapper(UserSettingsHolder):
|
||||
|
||||
@classmethod
|
||||
def initialize(cls, cache=None, registry=None):
|
||||
"""
|
||||
@@ -231,11 +214,7 @@ class SettingsWrapper(UserSettingsHolder):
|
||||
``awx.conf.settings_registry`` is used by default.
|
||||
"""
|
||||
if not getattr(settings, '_awx_conf_settings', False):
|
||||
settings_wrapper = cls(
|
||||
settings._wrapped,
|
||||
cache=cache or django_cache,
|
||||
registry=registry or settings_registry
|
||||
)
|
||||
settings_wrapper = cls(settings._wrapped, cache=cache or django_cache, registry=registry or settings_registry)
|
||||
settings._wrapped = settings_wrapper
|
||||
|
||||
def __init__(self, default_settings, cache, registry):
|
||||
@@ -322,7 +301,7 @@ class SettingsWrapper(UserSettingsHolder):
|
||||
try:
|
||||
value = decrypt_field(setting, 'value')
|
||||
except ValueError as e:
|
||||
#TODO: Remove in Tower 3.3
|
||||
# TODO: Remove in Tower 3.3
|
||||
logger.debug('encountered error decrypting field: %s - attempting fallback to old', e)
|
||||
value = old_decrypt_field(setting, 'value')
|
||||
|
||||
@@ -345,14 +324,12 @@ class SettingsWrapper(UserSettingsHolder):
|
||||
# Generate a cache key for each setting and store them all at once.
|
||||
settings_to_cache = dict([(Setting.get_cache_key(k), v) for k, v in settings_to_cache.items()])
|
||||
for k, id_val in setting_ids.items():
|
||||
logger.debug('Saving id in cache for encrypted setting %s, %s',
|
||||
Setting.get_cache_id_key(k), id_val)
|
||||
logger.debug('Saving id in cache for encrypted setting %s, %s', Setting.get_cache_id_key(k), id_val)
|
||||
self.cache.cache.set(Setting.get_cache_id_key(k), id_val)
|
||||
settings_to_cache['_awx_conf_preload_expires'] = self._awx_conf_preload_expires
|
||||
self.cache.set_many(settings_to_cache, timeout=SETTING_CACHE_TIMEOUT)
|
||||
|
||||
def _get_local(self, name, validate=True):
|
||||
self.__clean_on_fork__()
|
||||
self._preload_cache()
|
||||
cache_key = Setting.get_cache_key(name)
|
||||
try:
|
||||
@@ -421,9 +398,7 @@ class SettingsWrapper(UserSettingsHolder):
|
||||
else:
|
||||
return value
|
||||
except Exception:
|
||||
logger.warning(
|
||||
'The current value "%r" for setting "%s" is invalid.',
|
||||
value, name, exc_info=True)
|
||||
logger.warning('The current value "%r" for setting "%s" is invalid.', value, name, exc_info=True)
|
||||
return empty
|
||||
|
||||
def _get_default(self, name):
|
||||
@@ -454,8 +429,7 @@ class SettingsWrapper(UserSettingsHolder):
|
||||
setting_value = field.run_validation(data)
|
||||
db_value = field.to_representation(setting_value)
|
||||
except Exception as e:
|
||||
logger.exception('Unable to assign value "%r" to setting "%s".',
|
||||
value, name, exc_info=True)
|
||||
logger.exception('Unable to assign value "%r" to setting "%s".', value, name, exc_info=True)
|
||||
raise e
|
||||
|
||||
setting = Setting.objects.filter(key=name, user__isnull=True).order_by('pk').first()
|
||||
@@ -493,8 +467,7 @@ class SettingsWrapper(UserSettingsHolder):
|
||||
def __dir__(self):
|
||||
keys = []
|
||||
with _ctit_db_wrapper(trans_safe=True):
|
||||
for setting in Setting.objects.filter(
|
||||
key__in=self.all_supported_settings, user__isnull=True):
|
||||
for setting in Setting.objects.filter(key__in=self.all_supported_settings, user__isnull=True):
|
||||
# Skip returning settings that have been overridden but are
|
||||
# considered to be "not set".
|
||||
if setting.value is None and SETTING_CACHE_NOTSET == SETTING_CACHE_NONE:
|
||||
@@ -512,7 +485,7 @@ class SettingsWrapper(UserSettingsHolder):
|
||||
with _ctit_db_wrapper(trans_safe=True):
|
||||
set_locally = Setting.objects.filter(key=setting, user__isnull=True).exists()
|
||||
set_on_default = getattr(self.default_settings, 'is_overridden', lambda s: False)(setting)
|
||||
return (set_locally or set_on_default)
|
||||
return set_locally or set_on_default
|
||||
|
||||
|
||||
def __getattr_without_cache__(self, name):
|
||||
|
||||
@@ -30,12 +30,7 @@ def handle_setting_change(key, for_delete=False):
|
||||
|
||||
# Send setting_changed signal with new value for each setting.
|
||||
for setting_key in setting_keys:
|
||||
setting_changed.send(
|
||||
sender=Setting,
|
||||
setting=setting_key,
|
||||
value=getattr(settings, setting_key, None),
|
||||
enter=not bool(for_delete),
|
||||
)
|
||||
setting_changed.send(sender=Setting, setting=setting_key, value=getattr(settings, setting_key, None), enter=not bool(for_delete))
|
||||
|
||||
|
||||
@receiver(post_save, sender=Setting)
|
||||
|
||||
@@ -5,10 +5,7 @@ import pytest
|
||||
from django.urls import resolve
|
||||
from django.contrib.auth.models import User
|
||||
|
||||
from rest_framework.test import (
|
||||
APIRequestFactory,
|
||||
force_authenticate,
|
||||
)
|
||||
from rest_framework.test import APIRequestFactory, force_authenticate
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
@@ -41,4 +38,5 @@ def api_request(admin):
|
||||
response = view(request, *view_args, **view_kwargs)
|
||||
response.render()
|
||||
return response
|
||||
|
||||
return rf
|
||||
|
||||
@@ -45,44 +45,19 @@ def dummy_validate():
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_non_admin_user_does_not_see_categories(api_request, dummy_setting, normal_user):
|
||||
with dummy_setting(
|
||||
'FOO_BAR',
|
||||
field_class=fields.IntegerField,
|
||||
category='FooBar',
|
||||
category_slug='foobar'
|
||||
):
|
||||
response = api_request(
|
||||
'get',
|
||||
reverse('api:setting_category_list',
|
||||
kwargs={'version': 'v2'})
|
||||
)
|
||||
with dummy_setting('FOO_BAR', field_class=fields.IntegerField, category='FooBar', category_slug='foobar'):
|
||||
response = api_request('get', reverse('api:setting_category_list', kwargs={'version': 'v2'}))
|
||||
assert response.data['results']
|
||||
response = api_request(
|
||||
'get',
|
||||
reverse('api:setting_category_list',
|
||||
kwargs={'version': 'v2'}),
|
||||
user=normal_user
|
||||
)
|
||||
response = api_request('get', reverse('api:setting_category_list', kwargs={'version': 'v2'}), user=normal_user)
|
||||
assert not response.data['results']
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_setting_singleton_detail_retrieve(api_request, dummy_setting):
|
||||
with dummy_setting(
|
||||
'FOO_BAR_1',
|
||||
field_class=fields.IntegerField,
|
||||
category='FooBar',
|
||||
category_slug='foobar'
|
||||
), dummy_setting(
|
||||
'FOO_BAR_2',
|
||||
field_class=fields.IntegerField,
|
||||
category='FooBar',
|
||||
category_slug='foobar'
|
||||
with dummy_setting('FOO_BAR_1', field_class=fields.IntegerField, category='FooBar', category_slug='foobar'), dummy_setting(
|
||||
'FOO_BAR_2', field_class=fields.IntegerField, category='FooBar', category_slug='foobar'
|
||||
):
|
||||
response = api_request(
|
||||
'get',
|
||||
reverse('api:setting_singleton_detail', kwargs={'category_slug': 'foobar'})
|
||||
)
|
||||
response = api_request('get', reverse('api:setting_singleton_detail', kwargs={'category_slug': 'foobar'}))
|
||||
assert response.status_code == 200
|
||||
assert 'FOO_BAR_1' in response.data and response.data['FOO_BAR_1'] is None
|
||||
assert 'FOO_BAR_2' in response.data and response.data['FOO_BAR_2'] is None
|
||||
@@ -90,97 +65,43 @@ def test_setting_singleton_detail_retrieve(api_request, dummy_setting):
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_setting_singleton_detail_invalid_retrieve(api_request, dummy_setting, normal_user):
|
||||
with dummy_setting(
|
||||
'FOO_BAR_1',
|
||||
field_class=fields.IntegerField,
|
||||
category='FooBar',
|
||||
category_slug='foobar'
|
||||
), dummy_setting(
|
||||
'FOO_BAR_2',
|
||||
field_class=fields.IntegerField,
|
||||
category='FooBar',
|
||||
category_slug='foobar'
|
||||
with dummy_setting('FOO_BAR_1', field_class=fields.IntegerField, category='FooBar', category_slug='foobar'), dummy_setting(
|
||||
'FOO_BAR_2', field_class=fields.IntegerField, category='FooBar', category_slug='foobar'
|
||||
):
|
||||
response = api_request(
|
||||
'get',
|
||||
reverse('api:setting_singleton_detail', kwargs={'category_slug': 'barfoo'})
|
||||
)
|
||||
response = api_request('get', reverse('api:setting_singleton_detail', kwargs={'category_slug': 'barfoo'}))
|
||||
assert response.status_code == 404
|
||||
response = api_request(
|
||||
'get',
|
||||
reverse('api:setting_singleton_detail', kwargs={'category_slug': 'foobar'}),
|
||||
user = normal_user
|
||||
)
|
||||
response = api_request('get', reverse('api:setting_singleton_detail', kwargs={'category_slug': 'foobar'}), user=normal_user)
|
||||
assert response.status_code == 403
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_setting_signleton_retrieve_hierachy(api_request, dummy_setting):
|
||||
with dummy_setting(
|
||||
'FOO_BAR',
|
||||
field_class=fields.IntegerField,
|
||||
default=0,
|
||||
category='FooBar',
|
||||
category_slug='foobar'
|
||||
):
|
||||
response = api_request(
|
||||
'get',
|
||||
reverse('api:setting_singleton_detail', kwargs={'category_slug': 'foobar'})
|
||||
)
|
||||
with dummy_setting('FOO_BAR', field_class=fields.IntegerField, default=0, category='FooBar', category_slug='foobar'):
|
||||
response = api_request('get', reverse('api:setting_singleton_detail', kwargs={'category_slug': 'foobar'}))
|
||||
assert response.data['FOO_BAR'] == 0
|
||||
s = Setting(key='FOO_BAR', value=1)
|
||||
s.save()
|
||||
response = api_request(
|
||||
'get',
|
||||
reverse('api:setting_singleton_detail', kwargs={'category_slug': 'foobar'})
|
||||
)
|
||||
response = api_request('get', reverse('api:setting_singleton_detail', kwargs={'category_slug': 'foobar'}))
|
||||
assert response.data['FOO_BAR'] == 1
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_setting_singleton_retrieve_readonly(api_request, dummy_setting):
|
||||
with dummy_setting(
|
||||
'FOO_BAR',
|
||||
field_class=fields.IntegerField,
|
||||
read_only=True,
|
||||
default=2,
|
||||
category='FooBar',
|
||||
category_slug='foobar'
|
||||
):
|
||||
response = api_request(
|
||||
'get',
|
||||
reverse('api:setting_singleton_detail', kwargs={'category_slug': 'foobar'})
|
||||
)
|
||||
with dummy_setting('FOO_BAR', field_class=fields.IntegerField, read_only=True, default=2, category='FooBar', category_slug='foobar'):
|
||||
response = api_request('get', reverse('api:setting_singleton_detail', kwargs={'category_slug': 'foobar'}))
|
||||
assert response.data['FOO_BAR'] == 2
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_setting_singleton_update(api_request, dummy_setting):
|
||||
with dummy_setting(
|
||||
'FOO_BAR',
|
||||
field_class=fields.IntegerField,
|
||||
category='FooBar',
|
||||
category_slug='foobar'
|
||||
), mock.patch('awx.conf.views.handle_setting_changes'):
|
||||
api_request(
|
||||
'patch',
|
||||
reverse('api:setting_singleton_detail', kwargs={'category_slug': 'foobar'}),
|
||||
data={'FOO_BAR': 3}
|
||||
)
|
||||
response = api_request(
|
||||
'get',
|
||||
reverse('api:setting_singleton_detail', kwargs={'category_slug': 'foobar'})
|
||||
)
|
||||
with dummy_setting('FOO_BAR', field_class=fields.IntegerField, category='FooBar', category_slug='foobar'), mock.patch(
|
||||
'awx.conf.views.handle_setting_changes'
|
||||
):
|
||||
api_request('patch', reverse('api:setting_singleton_detail', kwargs={'category_slug': 'foobar'}), data={'FOO_BAR': 3})
|
||||
response = api_request('get', reverse('api:setting_singleton_detail', kwargs={'category_slug': 'foobar'}))
|
||||
assert response.data['FOO_BAR'] == 3
|
||||
api_request(
|
||||
'patch',
|
||||
reverse('api:setting_singleton_detail', kwargs={'category_slug': 'foobar'}),
|
||||
data={'FOO_BAR': 4}
|
||||
)
|
||||
response = api_request(
|
||||
'get',
|
||||
reverse('api:setting_singleton_detail', kwargs={'category_slug': 'foobar'})
|
||||
)
|
||||
api_request('patch', reverse('api:setting_singleton_detail', kwargs={'category_slug': 'foobar'}), data={'FOO_BAR': 4})
|
||||
response = api_request('get', reverse('api:setting_singleton_detail', kwargs={'category_slug': 'foobar'}))
|
||||
assert response.data['FOO_BAR'] == 4
|
||||
|
||||
|
||||
@@ -190,138 +111,70 @@ def test_setting_singleton_update_hybriddictfield_with_forbidden(api_request, du
|
||||
# indicating that only the defined fields can be filled in. Make
|
||||
# sure that the _Forbidden validator doesn't get used for the
|
||||
# fields. See also https://github.com/ansible/awx/issues/4099.
|
||||
with dummy_setting(
|
||||
'FOO_BAR',
|
||||
field_class=sso_fields.SAMLOrgAttrField,
|
||||
category='FooBar',
|
||||
category_slug='foobar',
|
||||
), mock.patch('awx.conf.views.handle_setting_changes'):
|
||||
with dummy_setting('FOO_BAR', field_class=sso_fields.SAMLOrgAttrField, category='FooBar', category_slug='foobar'), mock.patch(
|
||||
'awx.conf.views.handle_setting_changes'
|
||||
):
|
||||
api_request(
|
||||
'patch',
|
||||
reverse('api:setting_singleton_detail', kwargs={'category_slug': 'foobar'}),
|
||||
data={'FOO_BAR': {'saml_admin_attr': 'Admins', 'saml_attr': 'Orgs'}}
|
||||
)
|
||||
response = api_request(
|
||||
'get',
|
||||
reverse('api:setting_singleton_detail', kwargs={'category_slug': 'foobar'})
|
||||
data={'FOO_BAR': {'saml_admin_attr': 'Admins', 'saml_attr': 'Orgs'}},
|
||||
)
|
||||
response = api_request('get', reverse('api:setting_singleton_detail', kwargs={'category_slug': 'foobar'}))
|
||||
assert response.data['FOO_BAR'] == {'saml_admin_attr': 'Admins', 'saml_attr': 'Orgs'}
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_setting_singleton_update_dont_change_readonly_fields(api_request, dummy_setting):
|
||||
with dummy_setting(
|
||||
'FOO_BAR',
|
||||
field_class=fields.IntegerField,
|
||||
read_only=True,
|
||||
default=4,
|
||||
category='FooBar',
|
||||
category_slug='foobar'
|
||||
), mock.patch('awx.conf.views.handle_setting_changes'):
|
||||
api_request(
|
||||
'patch',
|
||||
reverse('api:setting_singleton_detail', kwargs={'category_slug': 'foobar'}),
|
||||
data={'FOO_BAR': 5}
|
||||
)
|
||||
response = api_request(
|
||||
'get',
|
||||
reverse('api:setting_singleton_detail', kwargs={'category_slug': 'foobar'})
|
||||
)
|
||||
with dummy_setting('FOO_BAR', field_class=fields.IntegerField, read_only=True, default=4, category='FooBar', category_slug='foobar'), mock.patch(
|
||||
'awx.conf.views.handle_setting_changes'
|
||||
):
|
||||
api_request('patch', reverse('api:setting_singleton_detail', kwargs={'category_slug': 'foobar'}), data={'FOO_BAR': 5})
|
||||
response = api_request('get', reverse('api:setting_singleton_detail', kwargs={'category_slug': 'foobar'}))
|
||||
assert response.data['FOO_BAR'] == 4
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_setting_singleton_update_dont_change_encrypted_mark(api_request, dummy_setting):
|
||||
with dummy_setting(
|
||||
'FOO_BAR',
|
||||
field_class=fields.CharField,
|
||||
encrypted=True,
|
||||
category='FooBar',
|
||||
category_slug='foobar'
|
||||
), mock.patch('awx.conf.views.handle_setting_changes'):
|
||||
api_request(
|
||||
'patch',
|
||||
reverse('api:setting_singleton_detail', kwargs={'category_slug': 'foobar'}),
|
||||
data={'FOO_BAR': 'password'}
|
||||
)
|
||||
with dummy_setting('FOO_BAR', field_class=fields.CharField, encrypted=True, category='FooBar', category_slug='foobar'), mock.patch(
|
||||
'awx.conf.views.handle_setting_changes'
|
||||
):
|
||||
api_request('patch', reverse('api:setting_singleton_detail', kwargs={'category_slug': 'foobar'}), data={'FOO_BAR': 'password'})
|
||||
assert Setting.objects.get(key='FOO_BAR').value.startswith('$encrypted$')
|
||||
response = api_request(
|
||||
'get',
|
||||
reverse('api:setting_singleton_detail', kwargs={'category_slug': 'foobar'})
|
||||
)
|
||||
response = api_request('get', reverse('api:setting_singleton_detail', kwargs={'category_slug': 'foobar'}))
|
||||
assert response.data['FOO_BAR'] == '$encrypted$'
|
||||
api_request(
|
||||
'patch',
|
||||
reverse('api:setting_singleton_detail', kwargs={'category_slug': 'foobar'}),
|
||||
data={'FOO_BAR': '$encrypted$'}
|
||||
)
|
||||
api_request('patch', reverse('api:setting_singleton_detail', kwargs={'category_slug': 'foobar'}), data={'FOO_BAR': '$encrypted$'})
|
||||
assert decrypt_field(Setting.objects.get(key='FOO_BAR'), 'value') == 'password'
|
||||
api_request(
|
||||
'patch',
|
||||
reverse('api:setting_singleton_detail', kwargs={'category_slug': 'foobar'}),
|
||||
data={'FOO_BAR': 'new_pw'}
|
||||
)
|
||||
api_request('patch', reverse('api:setting_singleton_detail', kwargs={'category_slug': 'foobar'}), data={'FOO_BAR': 'new_pw'})
|
||||
assert decrypt_field(Setting.objects.get(key='FOO_BAR'), 'value') == 'new_pw'
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_setting_singleton_update_runs_custom_validate(api_request, dummy_setting, dummy_validate):
|
||||
|
||||
def func_raising_exception(serializer, attrs):
|
||||
raise serializers.ValidationError('Error')
|
||||
|
||||
with dummy_setting(
|
||||
'FOO_BAR',
|
||||
field_class=fields.IntegerField,
|
||||
category='FooBar',
|
||||
category_slug='foobar'
|
||||
), dummy_validate(
|
||||
with dummy_setting('FOO_BAR', field_class=fields.IntegerField, category='FooBar', category_slug='foobar'), dummy_validate(
|
||||
'foobar', func_raising_exception
|
||||
), mock.patch('awx.conf.views.handle_setting_changes'):
|
||||
response = api_request(
|
||||
'patch',
|
||||
reverse('api:setting_singleton_detail', kwargs={'category_slug': 'foobar'}),
|
||||
data={'FOO_BAR': 23}
|
||||
)
|
||||
response = api_request('patch', reverse('api:setting_singleton_detail', kwargs={'category_slug': 'foobar'}), data={'FOO_BAR': 23})
|
||||
assert response.status_code == 400
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_setting_singleton_delete(api_request, dummy_setting):
|
||||
with dummy_setting(
|
||||
'FOO_BAR',
|
||||
field_class=fields.IntegerField,
|
||||
category='FooBar',
|
||||
category_slug='foobar'
|
||||
), mock.patch('awx.conf.views.handle_setting_changes'):
|
||||
api_request(
|
||||
'delete',
|
||||
reverse('api:setting_singleton_detail', kwargs={'category_slug': 'foobar'})
|
||||
)
|
||||
response = api_request(
|
||||
'get',
|
||||
reverse('api:setting_singleton_detail', kwargs={'category_slug': 'foobar'})
|
||||
)
|
||||
with dummy_setting('FOO_BAR', field_class=fields.IntegerField, category='FooBar', category_slug='foobar'), mock.patch(
|
||||
'awx.conf.views.handle_setting_changes'
|
||||
):
|
||||
api_request('delete', reverse('api:setting_singleton_detail', kwargs={'category_slug': 'foobar'}))
|
||||
response = api_request('get', reverse('api:setting_singleton_detail', kwargs={'category_slug': 'foobar'}))
|
||||
assert not response.data['FOO_BAR']
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_setting_singleton_delete_no_read_only_fields(api_request, dummy_setting):
|
||||
with dummy_setting(
|
||||
'FOO_BAR',
|
||||
field_class=fields.IntegerField,
|
||||
read_only=True,
|
||||
default=23,
|
||||
category='FooBar',
|
||||
category_slug='foobar'
|
||||
), mock.patch('awx.conf.views.handle_setting_changes'):
|
||||
api_request(
|
||||
'delete',
|
||||
reverse('api:setting_singleton_detail', kwargs={'category_slug': 'foobar'})
|
||||
)
|
||||
response = api_request(
|
||||
'get',
|
||||
reverse('api:setting_singleton_detail', kwargs={'category_slug': 'foobar'})
|
||||
)
|
||||
with dummy_setting('FOO_BAR', field_class=fields.IntegerField, read_only=True, default=23, category='FooBar', category_slug='foobar'), mock.patch(
|
||||
'awx.conf.views.handle_setting_changes'
|
||||
):
|
||||
api_request('delete', reverse('api:setting_singleton_detail', kwargs={'category_slug': 'foobar'}))
|
||||
response = api_request('get', reverse('api:setting_singleton_detail', kwargs={'category_slug': 'foobar'}))
|
||||
assert response.data['FOO_BAR'] == 23
|
||||
|
||||
|
||||
@@ -1,5 +1,3 @@
|
||||
|
||||
|
||||
# Ensure that our autouse overwrites are working
|
||||
def test_cache(settings):
|
||||
assert settings.CACHES['default']['BACKEND'] == 'django.core.cache.backends.locmem.LocMemCache'
|
||||
|
||||
@@ -4,7 +4,7 @@ from rest_framework.fields import ValidationError
|
||||
from awx.conf.fields import StringListBooleanField, StringListPathField, ListTuplesField, URLField
|
||||
|
||||
|
||||
class TestStringListBooleanField():
|
||||
class TestStringListBooleanField:
|
||||
|
||||
FIELD_VALUES = [
|
||||
("hello", "hello"),
|
||||
@@ -23,10 +23,7 @@ class TestStringListBooleanField():
|
||||
("NULL", None),
|
||||
]
|
||||
|
||||
FIELD_VALUES_INVALID = [
|
||||
1.245,
|
||||
{"a": "b"},
|
||||
]
|
||||
FIELD_VALUES_INVALID = [1.245, {"a": "b"}]
|
||||
|
||||
@pytest.mark.parametrize("value_in, value_known", FIELD_VALUES)
|
||||
def test_to_internal_value_valid(self, value_in, value_known):
|
||||
@@ -39,8 +36,7 @@ class TestStringListBooleanField():
|
||||
field = StringListBooleanField()
|
||||
with pytest.raises(ValidationError) as e:
|
||||
field.to_internal_value(value)
|
||||
assert e.value.detail[0] == "Expected None, True, False, a string or list " \
|
||||
"of strings but got {} instead.".format(type(value))
|
||||
assert e.value.detail[0] == "Expected None, True, False, a string or list " "of strings but got {} instead.".format(type(value))
|
||||
|
||||
@pytest.mark.parametrize("value_in, value_known", FIELD_VALUES)
|
||||
def test_to_representation_valid(self, value_in, value_known):
|
||||
@@ -53,22 +49,14 @@ class TestStringListBooleanField():
|
||||
field = StringListBooleanField()
|
||||
with pytest.raises(ValidationError) as e:
|
||||
field.to_representation(value)
|
||||
assert e.value.detail[0] == "Expected None, True, False, a string or list " \
|
||||
"of strings but got {} instead.".format(type(value))
|
||||
assert e.value.detail[0] == "Expected None, True, False, a string or list " "of strings but got {} instead.".format(type(value))
|
||||
|
||||
|
||||
class TestListTuplesField():
|
||||
class TestListTuplesField:
|
||||
|
||||
FIELD_VALUES = [
|
||||
([('a', 'b'), ('abc', '123')], [("a", "b"), ("abc", "123")]),
|
||||
]
|
||||
FIELD_VALUES = [([('a', 'b'), ('abc', '123')], [("a", "b"), ("abc", "123")])]
|
||||
|
||||
FIELD_VALUES_INVALID = [
|
||||
("abc", type("abc")),
|
||||
([('a', 'b', 'c'), ('abc', '123', '456')], type(('a',))),
|
||||
(['a', 'b'], type('a')),
|
||||
(123, type(123)),
|
||||
]
|
||||
FIELD_VALUES_INVALID = [("abc", type("abc")), ([('a', 'b', 'c'), ('abc', '123', '456')], type(('a',))), (['a', 'b'], type('a')), (123, type(123))]
|
||||
|
||||
@pytest.mark.parametrize("value_in, value_known", FIELD_VALUES)
|
||||
def test_to_internal_value_valid(self, value_in, value_known):
|
||||
@@ -81,11 +69,10 @@ class TestListTuplesField():
|
||||
field = ListTuplesField()
|
||||
with pytest.raises(ValidationError) as e:
|
||||
field.to_internal_value(value)
|
||||
assert e.value.detail[0] == "Expected a list of tuples of max length 2 " \
|
||||
"but got {} instead.".format(t)
|
||||
assert e.value.detail[0] == "Expected a list of tuples of max length 2 " "but got {} instead.".format(t)
|
||||
|
||||
|
||||
class TestStringListPathField():
|
||||
class TestStringListPathField:
|
||||
|
||||
FIELD_VALUES = [
|
||||
((".", "..", "/"), [".", "..", "/"]),
|
||||
@@ -93,22 +80,12 @@ class TestStringListPathField():
|
||||
(("///home///",), ["/home"]),
|
||||
(("/home/././././",), ["/home"]),
|
||||
(("/home", "/home", "/home/"), ["/home"]),
|
||||
(["/home/", "/home/", "/opt/", "/opt/", "/var/"], ["/home", "/opt", "/var"])
|
||||
(["/home/", "/home/", "/opt/", "/opt/", "/var/"], ["/home", "/opt", "/var"]),
|
||||
]
|
||||
|
||||
FIELD_VALUES_INVALID_TYPE = [
|
||||
1.245,
|
||||
{"a": "b"},
|
||||
("/home"),
|
||||
]
|
||||
FIELD_VALUES_INVALID_TYPE = [1.245, {"a": "b"}, ("/home")]
|
||||
|
||||
FIELD_VALUES_INVALID_PATH = [
|
||||
"",
|
||||
"~/",
|
||||
"home",
|
||||
"/invalid_path",
|
||||
"/home/invalid_path",
|
||||
]
|
||||
FIELD_VALUES_INVALID_PATH = ["", "~/", "home", "/invalid_path", "/home/invalid_path"]
|
||||
|
||||
@pytest.mark.parametrize("value_in, value_known", FIELD_VALUES)
|
||||
def test_to_internal_value_valid(self, value_in, value_known):
|
||||
@@ -131,16 +108,19 @@ class TestStringListPathField():
|
||||
assert e.value.detail[0] == "{} is not a valid path choice.".format(value)
|
||||
|
||||
|
||||
class TestURLField():
|
||||
class TestURLField:
|
||||
regex = "^https://www.example.org$"
|
||||
|
||||
@pytest.mark.parametrize("url,schemes,regex, allow_numbers_in_top_level_domain, expect_no_error",[
|
||||
("ldap://www.example.org42", "ldap", None, True, True),
|
||||
("https://www.example.org42", "https", None, False, False),
|
||||
("https://www.example.org", None, regex, None, True),
|
||||
("https://www.example3.org", None, regex, None, False),
|
||||
("ftp://www.example.org", "https", None, None, False)
|
||||
])
|
||||
@pytest.mark.parametrize(
|
||||
"url,schemes,regex, allow_numbers_in_top_level_domain, expect_no_error",
|
||||
[
|
||||
("ldap://www.example.org42", "ldap", None, True, True),
|
||||
("https://www.example.org42", "https", None, False, False),
|
||||
("https://www.example.org", None, regex, None, True),
|
||||
("https://www.example3.org", None, regex, None, False),
|
||||
("ftp://www.example.org", "https", None, None, False),
|
||||
],
|
||||
)
|
||||
def test_urls(self, url, schemes, regex, allow_numbers_in_top_level_domain, expect_no_error):
|
||||
kwargs = {}
|
||||
kwargs.setdefault("allow_numbers_in_top_level_domain", allow_numbers_in_top_level_domain)
|
||||
|
||||
@@ -33,30 +33,18 @@ def reg(request):
|
||||
if marker.name == 'defined_in_file':
|
||||
settings.configure(**marker.kwargs)
|
||||
|
||||
settings._wrapped = SettingsWrapper(settings._wrapped,
|
||||
cache,
|
||||
registry)
|
||||
settings._wrapped = SettingsWrapper(settings._wrapped, cache, registry)
|
||||
return registry
|
||||
|
||||
|
||||
def test_simple_setting_registration(reg):
|
||||
assert reg.get_registered_settings() == []
|
||||
reg.register(
|
||||
'AWX_SOME_SETTING_ENABLED',
|
||||
field_class=fields.BooleanField,
|
||||
category=_('System'),
|
||||
category_slug='system',
|
||||
)
|
||||
reg.register('AWX_SOME_SETTING_ENABLED', field_class=fields.BooleanField, category=_('System'), category_slug='system')
|
||||
assert reg.get_registered_settings() == ['AWX_SOME_SETTING_ENABLED']
|
||||
|
||||
|
||||
def test_simple_setting_unregistration(reg):
|
||||
reg.register(
|
||||
'AWX_SOME_SETTING_ENABLED',
|
||||
field_class=fields.BooleanField,
|
||||
category=_('System'),
|
||||
category_slug='system',
|
||||
)
|
||||
reg.register('AWX_SOME_SETTING_ENABLED', field_class=fields.BooleanField, category=_('System'), category_slug='system')
|
||||
assert reg.get_registered_settings() == ['AWX_SOME_SETTING_ENABLED']
|
||||
|
||||
reg.unregister('AWX_SOME_SETTING_ENABLED')
|
||||
@@ -67,12 +55,7 @@ def test_duplicate_setting_registration(reg):
|
||||
"ensure that settings cannot be registered twice."
|
||||
with pytest.raises(ImproperlyConfigured):
|
||||
for i in range(2):
|
||||
reg.register(
|
||||
'AWX_SOME_SETTING_ENABLED',
|
||||
field_class=fields.BooleanField,
|
||||
category=_('System'),
|
||||
category_slug='system',
|
||||
)
|
||||
reg.register('AWX_SOME_SETTING_ENABLED', field_class=fields.BooleanField, category=_('System'), category_slug='system')
|
||||
|
||||
|
||||
def test_field_class_required_for_registration(reg):
|
||||
@@ -82,110 +65,42 @@ def test_field_class_required_for_registration(reg):
|
||||
|
||||
|
||||
def test_get_registered_settings_by_slug(reg):
|
||||
reg.register(
|
||||
'AWX_SOME_SETTING_ENABLED',
|
||||
field_class=fields.BooleanField,
|
||||
category=_('System'),
|
||||
category_slug='system',
|
||||
)
|
||||
assert reg.get_registered_settings(category_slug='system') == [
|
||||
'AWX_SOME_SETTING_ENABLED'
|
||||
]
|
||||
reg.register('AWX_SOME_SETTING_ENABLED', field_class=fields.BooleanField, category=_('System'), category_slug='system')
|
||||
assert reg.get_registered_settings(category_slug='system') == ['AWX_SOME_SETTING_ENABLED']
|
||||
assert reg.get_registered_settings(category_slug='other') == []
|
||||
|
||||
|
||||
def test_get_registered_read_only_settings(reg):
|
||||
reg.register(
|
||||
'AWX_SOME_SETTING_ENABLED',
|
||||
field_class=fields.BooleanField,
|
||||
category=_('System'),
|
||||
category_slug='system'
|
||||
)
|
||||
reg.register(
|
||||
'AWX_SOME_READ_ONLY',
|
||||
field_class=fields.BooleanField,
|
||||
category=_('System'),
|
||||
category_slug='system',
|
||||
read_only=True
|
||||
)
|
||||
assert reg.get_registered_settings(read_only=True) ==[
|
||||
'AWX_SOME_READ_ONLY'
|
||||
]
|
||||
assert reg.get_registered_settings(read_only=False) == [
|
||||
'AWX_SOME_SETTING_ENABLED'
|
||||
]
|
||||
assert reg.get_registered_settings() == [
|
||||
'AWX_SOME_SETTING_ENABLED',
|
||||
'AWX_SOME_READ_ONLY'
|
||||
]
|
||||
reg.register('AWX_SOME_SETTING_ENABLED', field_class=fields.BooleanField, category=_('System'), category_slug='system')
|
||||
reg.register('AWX_SOME_READ_ONLY', field_class=fields.BooleanField, category=_('System'), category_slug='system', read_only=True)
|
||||
assert reg.get_registered_settings(read_only=True) == ['AWX_SOME_READ_ONLY']
|
||||
assert reg.get_registered_settings(read_only=False) == ['AWX_SOME_SETTING_ENABLED']
|
||||
assert reg.get_registered_settings() == ['AWX_SOME_SETTING_ENABLED', 'AWX_SOME_READ_ONLY']
|
||||
|
||||
|
||||
def test_get_dependent_settings(reg):
|
||||
reg.register('AWX_SOME_SETTING_ENABLED', field_class=fields.BooleanField, category=_('System'), category_slug='system')
|
||||
reg.register(
|
||||
'AWX_SOME_SETTING_ENABLED',
|
||||
field_class=fields.BooleanField,
|
||||
category=_('System'),
|
||||
category_slug='system'
|
||||
'AWX_SOME_DEPENDENT_SETTING', field_class=fields.BooleanField, category=_('System'), category_slug='system', depends_on=['AWX_SOME_SETTING_ENABLED']
|
||||
)
|
||||
reg.register(
|
||||
'AWX_SOME_DEPENDENT_SETTING',
|
||||
field_class=fields.BooleanField,
|
||||
category=_('System'),
|
||||
category_slug='system',
|
||||
depends_on=['AWX_SOME_SETTING_ENABLED']
|
||||
)
|
||||
assert reg.get_dependent_settings('AWX_SOME_SETTING_ENABLED') == set([
|
||||
'AWX_SOME_DEPENDENT_SETTING'
|
||||
])
|
||||
assert reg.get_dependent_settings('AWX_SOME_SETTING_ENABLED') == set(['AWX_SOME_DEPENDENT_SETTING'])
|
||||
|
||||
|
||||
def test_get_registered_categories(reg):
|
||||
reg.register(
|
||||
'AWX_SOME_SETTING_ENABLED',
|
||||
field_class=fields.BooleanField,
|
||||
category=_('System'),
|
||||
category_slug='system'
|
||||
)
|
||||
reg.register(
|
||||
'AWX_SOME_OTHER_SETTING_ENABLED',
|
||||
field_class=fields.BooleanField,
|
||||
category=_('OtherSystem'),
|
||||
category_slug='other-system'
|
||||
)
|
||||
assert reg.get_registered_categories() == {
|
||||
'all': _('All'),
|
||||
'changed': _('Changed'),
|
||||
'system': _('System'),
|
||||
'other-system': _('OtherSystem'),
|
||||
}
|
||||
reg.register('AWX_SOME_SETTING_ENABLED', field_class=fields.BooleanField, category=_('System'), category_slug='system')
|
||||
reg.register('AWX_SOME_OTHER_SETTING_ENABLED', field_class=fields.BooleanField, category=_('OtherSystem'), category_slug='other-system')
|
||||
assert reg.get_registered_categories() == {'all': _('All'), 'changed': _('Changed'), 'system': _('System'), 'other-system': _('OtherSystem')}
|
||||
|
||||
|
||||
def test_is_setting_encrypted(reg):
|
||||
reg.register(
|
||||
'AWX_SOME_SETTING_ENABLED',
|
||||
field_class=fields.CharField,
|
||||
category=_('System'),
|
||||
category_slug='system'
|
||||
)
|
||||
reg.register(
|
||||
'AWX_SOME_ENCRYPTED_SETTING',
|
||||
field_class=fields.CharField,
|
||||
category=_('System'),
|
||||
category_slug='system',
|
||||
encrypted=True
|
||||
)
|
||||
reg.register('AWX_SOME_SETTING_ENABLED', field_class=fields.CharField, category=_('System'), category_slug='system')
|
||||
reg.register('AWX_SOME_ENCRYPTED_SETTING', field_class=fields.CharField, category=_('System'), category_slug='system', encrypted=True)
|
||||
assert reg.is_setting_encrypted('AWX_SOME_SETTING_ENABLED') is False
|
||||
assert reg.is_setting_encrypted('AWX_SOME_ENCRYPTED_SETTING') is True
|
||||
|
||||
|
||||
def test_simple_field(reg):
|
||||
reg.register(
|
||||
'AWX_SOME_SETTING',
|
||||
field_class=fields.CharField,
|
||||
category=_('System'),
|
||||
category_slug='system',
|
||||
placeholder='Example Value',
|
||||
)
|
||||
reg.register('AWX_SOME_SETTING', field_class=fields.CharField, category=_('System'), category_slug='system', placeholder='Example Value')
|
||||
|
||||
field = reg.get_setting_field('AWX_SOME_SETTING')
|
||||
assert isinstance(field, fields.CharField)
|
||||
@@ -196,31 +111,20 @@ def test_simple_field(reg):
|
||||
|
||||
|
||||
def test_field_with_custom_attribute(reg):
|
||||
reg.register(
|
||||
'AWX_SOME_SETTING_ENABLED',
|
||||
field_class=fields.BooleanField,
|
||||
category_slug='system',
|
||||
)
|
||||
reg.register('AWX_SOME_SETTING_ENABLED', field_class=fields.BooleanField, category_slug='system')
|
||||
|
||||
field = reg.get_setting_field('AWX_SOME_SETTING_ENABLED',
|
||||
category_slug='other-system')
|
||||
field = reg.get_setting_field('AWX_SOME_SETTING_ENABLED', category_slug='other-system')
|
||||
assert field.category_slug == 'other-system'
|
||||
|
||||
|
||||
def test_field_with_custom_mixin(reg):
|
||||
class GreatMixin(object):
|
||||
|
||||
def is_great(self):
|
||||
return True
|
||||
|
||||
reg.register(
|
||||
'AWX_SOME_SETTING_ENABLED',
|
||||
field_class=fields.BooleanField,
|
||||
category_slug='system',
|
||||
)
|
||||
reg.register('AWX_SOME_SETTING_ENABLED', field_class=fields.BooleanField, category_slug='system')
|
||||
|
||||
field = reg.get_setting_field('AWX_SOME_SETTING_ENABLED',
|
||||
mixin_class=GreatMixin)
|
||||
field = reg.get_setting_field('AWX_SOME_SETTING_ENABLED', mixin_class=GreatMixin)
|
||||
assert isinstance(field, fields.BooleanField)
|
||||
assert isinstance(field, GreatMixin)
|
||||
assert field.is_great() is True
|
||||
@@ -228,12 +132,7 @@ def test_field_with_custom_mixin(reg):
|
||||
|
||||
@pytest.mark.defined_in_file(AWX_SOME_SETTING='DEFAULT')
|
||||
def test_default_value_from_settings(reg):
|
||||
reg.register(
|
||||
'AWX_SOME_SETTING',
|
||||
field_class=fields.CharField,
|
||||
category=_('System'),
|
||||
category_slug='system',
|
||||
)
|
||||
reg.register('AWX_SOME_SETTING', field_class=fields.CharField, category=_('System'), category_slug='system')
|
||||
|
||||
field = reg.get_setting_field('AWX_SOME_SETTING')
|
||||
assert field.default == 'DEFAULT'
|
||||
@@ -242,16 +141,10 @@ def test_default_value_from_settings(reg):
|
||||
@pytest.mark.defined_in_file(AWX_SOME_SETTING='DEFAULT')
|
||||
def test_default_value_from_settings_with_custom_representation(reg):
|
||||
class LowercaseCharField(fields.CharField):
|
||||
|
||||
def to_representation(self, value):
|
||||
return value.lower()
|
||||
|
||||
reg.register(
|
||||
'AWX_SOME_SETTING',
|
||||
field_class=LowercaseCharField,
|
||||
category=_('System'),
|
||||
category_slug='system',
|
||||
)
|
||||
reg.register('AWX_SOME_SETTING', field_class=LowercaseCharField, category=_('System'), category_slug='system')
|
||||
|
||||
field = reg.get_setting_field('AWX_SOME_SETTING')
|
||||
assert field.default == 'default'
|
||||
|
||||
@@ -53,9 +53,7 @@ def settings(request):
|
||||
|
||||
defaults['DEFAULTS_SNAPSHOT'] = {}
|
||||
settings.configure(**defaults)
|
||||
settings._wrapped = SettingsWrapper(settings._wrapped,
|
||||
cache,
|
||||
registry)
|
||||
settings._wrapped = SettingsWrapper(settings._wrapped, cache, registry)
|
||||
return settings
|
||||
|
||||
|
||||
@@ -67,14 +65,7 @@ def test_unregistered_setting(settings):
|
||||
|
||||
|
||||
def test_read_only_setting(settings):
|
||||
settings.registry.register(
|
||||
'AWX_READ_ONLY',
|
||||
field_class=fields.CharField,
|
||||
category=_('System'),
|
||||
category_slug='system',
|
||||
default='NO-EDITS',
|
||||
read_only=True
|
||||
)
|
||||
settings.registry.register('AWX_READ_ONLY', field_class=fields.CharField, category=_('System'), category_slug='system', default='NO-EDITS', read_only=True)
|
||||
assert settings.AWX_READ_ONLY == 'NO-EDITS'
|
||||
assert len(settings.registry.get_registered_settings(read_only=False)) == 0
|
||||
settings = settings.registry.get_registered_settings(read_only=True)
|
||||
@@ -85,13 +76,7 @@ def test_read_only_setting(settings):
|
||||
@pytest.mark.parametrize('read_only', [True, False])
|
||||
def test_setting_defined_in_file(settings, read_only):
|
||||
kwargs = {'read_only': True} if read_only else {}
|
||||
settings.registry.register(
|
||||
'AWX_SOME_SETTING',
|
||||
field_class=fields.CharField,
|
||||
category=_('System'),
|
||||
category_slug='system',
|
||||
**kwargs
|
||||
)
|
||||
settings.registry.register('AWX_SOME_SETTING', field_class=fields.CharField, category=_('System'), category_slug='system', **kwargs)
|
||||
assert settings.AWX_SOME_SETTING == 'DEFAULT'
|
||||
assert len(settings.registry.get_registered_settings(read_only=False)) == 0
|
||||
settings = settings.registry.get_registered_settings(read_only=True)
|
||||
@@ -100,13 +85,7 @@ def test_setting_defined_in_file(settings, read_only):
|
||||
|
||||
@pytest.mark.defined_in_file(AWX_SOME_SETTING='DEFAULT')
|
||||
def test_setting_defined_in_file_with_empty_default(settings):
|
||||
settings.registry.register(
|
||||
'AWX_SOME_SETTING',
|
||||
field_class=fields.CharField,
|
||||
category=_('System'),
|
||||
category_slug='system',
|
||||
default='',
|
||||
)
|
||||
settings.registry.register('AWX_SOME_SETTING', field_class=fields.CharField, category=_('System'), category_slug='system', default='')
|
||||
assert settings.AWX_SOME_SETTING == 'DEFAULT'
|
||||
assert len(settings.registry.get_registered_settings(read_only=False)) == 0
|
||||
settings = settings.registry.get_registered_settings(read_only=True)
|
||||
@@ -115,13 +94,7 @@ def test_setting_defined_in_file_with_empty_default(settings):
|
||||
|
||||
@pytest.mark.defined_in_file(AWX_SOME_SETTING='DEFAULT')
|
||||
def test_setting_defined_in_file_with_specific_default(settings):
|
||||
settings.registry.register(
|
||||
'AWX_SOME_SETTING',
|
||||
field_class=fields.CharField,
|
||||
category=_('System'),
|
||||
category_slug='system',
|
||||
default=123
|
||||
)
|
||||
settings.registry.register('AWX_SOME_SETTING', field_class=fields.CharField, category=_('System'), category_slug='system', default=123)
|
||||
assert settings.AWX_SOME_SETTING == 'DEFAULT'
|
||||
assert len(settings.registry.get_registered_settings(read_only=False)) == 0
|
||||
settings = settings.registry.get_registered_settings(read_only=True)
|
||||
@@ -131,12 +104,7 @@ def test_setting_defined_in_file_with_specific_default(settings):
|
||||
@pytest.mark.defined_in_file(AWX_SOME_SETTING='DEFAULT')
|
||||
def test_read_only_defaults_are_cached(settings):
|
||||
"read-only settings are stored in the cache"
|
||||
settings.registry.register(
|
||||
'AWX_SOME_SETTING',
|
||||
field_class=fields.CharField,
|
||||
category=_('System'),
|
||||
category_slug='system'
|
||||
)
|
||||
settings.registry.register('AWX_SOME_SETTING', field_class=fields.CharField, category=_('System'), category_slug='system')
|
||||
assert settings.AWX_SOME_SETTING == 'DEFAULT'
|
||||
assert settings.cache.get('AWX_SOME_SETTING') == 'DEFAULT'
|
||||
|
||||
@@ -144,12 +112,7 @@ def test_read_only_defaults_are_cached(settings):
|
||||
@pytest.mark.defined_in_file(AWX_SOME_SETTING='DEFAULT')
|
||||
def test_cache_respects_timeout(settings):
|
||||
"only preload the cache every SETTING_CACHE_TIMEOUT settings"
|
||||
settings.registry.register(
|
||||
'AWX_SOME_SETTING',
|
||||
field_class=fields.CharField,
|
||||
category=_('System'),
|
||||
category_slug='system'
|
||||
)
|
||||
settings.registry.register('AWX_SOME_SETTING', field_class=fields.CharField, category=_('System'), category_slug='system')
|
||||
|
||||
assert settings.AWX_SOME_SETTING == 'DEFAULT'
|
||||
cache_expiration = settings.cache.get('_awx_conf_preload_expires')
|
||||
@@ -161,13 +124,7 @@ def test_cache_respects_timeout(settings):
|
||||
|
||||
def test_default_setting(settings, mocker):
|
||||
"settings that specify a default are inserted into the cache"
|
||||
settings.registry.register(
|
||||
'AWX_SOME_SETTING',
|
||||
field_class=fields.CharField,
|
||||
category=_('System'),
|
||||
category_slug='system',
|
||||
default='DEFAULT'
|
||||
)
|
||||
settings.registry.register('AWX_SOME_SETTING', field_class=fields.CharField, category=_('System'), category_slug='system', default='DEFAULT')
|
||||
|
||||
settings_to_cache = mocker.Mock(**{'order_by.return_value': []})
|
||||
with mocker.patch('awx.conf.models.Setting.objects.filter', return_value=settings_to_cache):
|
||||
@@ -177,24 +134,13 @@ def test_default_setting(settings, mocker):
|
||||
|
||||
@pytest.mark.defined_in_file(AWX_SOME_SETTING='DEFAULT')
|
||||
def test_setting_is_from_setting_file(settings, mocker):
|
||||
settings.registry.register(
|
||||
'AWX_SOME_SETTING',
|
||||
field_class=fields.CharField,
|
||||
category=_('System'),
|
||||
category_slug='system'
|
||||
)
|
||||
settings.registry.register('AWX_SOME_SETTING', field_class=fields.CharField, category=_('System'), category_slug='system')
|
||||
assert settings.AWX_SOME_SETTING == 'DEFAULT'
|
||||
assert settings.registry.get_setting_field('AWX_SOME_SETTING').defined_in_file is True
|
||||
|
||||
|
||||
def test_setting_is_not_from_setting_file(settings, mocker):
|
||||
settings.registry.register(
|
||||
'AWX_SOME_SETTING',
|
||||
field_class=fields.CharField,
|
||||
category=_('System'),
|
||||
category_slug='system',
|
||||
default='DEFAULT'
|
||||
)
|
||||
settings.registry.register('AWX_SOME_SETTING', field_class=fields.CharField, category=_('System'), category_slug='system', default='DEFAULT')
|
||||
|
||||
settings_to_cache = mocker.Mock(**{'order_by.return_value': []})
|
||||
with mocker.patch('awx.conf.models.Setting.objects.filter', return_value=settings_to_cache):
|
||||
@@ -204,19 +150,9 @@ def test_setting_is_not_from_setting_file(settings, mocker):
|
||||
|
||||
def test_empty_setting(settings, mocker):
|
||||
"settings with no default and no defined value are not valid"
|
||||
settings.registry.register(
|
||||
'AWX_SOME_SETTING',
|
||||
field_class=fields.CharField,
|
||||
category=_('System'),
|
||||
category_slug='system'
|
||||
)
|
||||
settings.registry.register('AWX_SOME_SETTING', field_class=fields.CharField, category=_('System'), category_slug='system')
|
||||
|
||||
mocks = mocker.Mock(**{
|
||||
'order_by.return_value': mocker.Mock(**{
|
||||
'__iter__': lambda self: iter([]),
|
||||
'first.return_value': None
|
||||
}),
|
||||
})
|
||||
mocks = mocker.Mock(**{'order_by.return_value': mocker.Mock(**{'__iter__': lambda self: iter([]), 'first.return_value': None})})
|
||||
with mocker.patch('awx.conf.models.Setting.objects.filter', return_value=mocks):
|
||||
with pytest.raises(AttributeError):
|
||||
settings.AWX_SOME_SETTING
|
||||
@@ -225,21 +161,10 @@ def test_empty_setting(settings, mocker):
|
||||
|
||||
def test_setting_from_db(settings, mocker):
|
||||
"settings can be loaded from the database"
|
||||
settings.registry.register(
|
||||
'AWX_SOME_SETTING',
|
||||
field_class=fields.CharField,
|
||||
category=_('System'),
|
||||
category_slug='system',
|
||||
default='DEFAULT'
|
||||
)
|
||||
settings.registry.register('AWX_SOME_SETTING', field_class=fields.CharField, category=_('System'), category_slug='system', default='DEFAULT')
|
||||
|
||||
setting_from_db = mocker.Mock(key='AWX_SOME_SETTING', value='FROM_DB')
|
||||
mocks = mocker.Mock(**{
|
||||
'order_by.return_value': mocker.Mock(**{
|
||||
'__iter__': lambda self: iter([setting_from_db]),
|
||||
'first.return_value': setting_from_db
|
||||
}),
|
||||
})
|
||||
mocks = mocker.Mock(**{'order_by.return_value': mocker.Mock(**{'__iter__': lambda self: iter([setting_from_db]), 'first.return_value': setting_from_db})})
|
||||
with mocker.patch('awx.conf.models.Setting.objects.filter', return_value=mocks):
|
||||
assert settings.AWX_SOME_SETTING == 'FROM_DB'
|
||||
assert settings.cache.get('AWX_SOME_SETTING') == 'FROM_DB'
|
||||
@@ -248,12 +173,7 @@ def test_setting_from_db(settings, mocker):
|
||||
@pytest.mark.defined_in_file(AWX_SOME_SETTING='DEFAULT')
|
||||
def test_read_only_setting_assignment(settings):
|
||||
"read-only settings cannot be overwritten"
|
||||
settings.registry.register(
|
||||
'AWX_SOME_SETTING',
|
||||
field_class=fields.CharField,
|
||||
category=_('System'),
|
||||
category_slug='system'
|
||||
)
|
||||
settings.registry.register('AWX_SOME_SETTING', field_class=fields.CharField, category=_('System'), category_slug='system')
|
||||
assert settings.AWX_SOME_SETTING == 'DEFAULT'
|
||||
with pytest.raises(ImproperlyConfigured):
|
||||
settings.AWX_SOME_SETTING = 'CHANGED'
|
||||
@@ -262,41 +182,26 @@ def test_read_only_setting_assignment(settings):
|
||||
|
||||
def test_db_setting_create(settings, mocker):
|
||||
"settings are stored in the database when set for the first time"
|
||||
settings.registry.register(
|
||||
'AWX_SOME_SETTING',
|
||||
field_class=fields.CharField,
|
||||
category=_('System'),
|
||||
category_slug='system'
|
||||
)
|
||||
settings.registry.register('AWX_SOME_SETTING', field_class=fields.CharField, category=_('System'), category_slug='system')
|
||||
|
||||
setting_list = mocker.Mock(**{'order_by.return_value.first.return_value': None})
|
||||
with apply_patches([
|
||||
mocker.patch('awx.conf.models.Setting.objects.filter',
|
||||
return_value=setting_list),
|
||||
mocker.patch('awx.conf.models.Setting.objects.create', mocker.Mock())
|
||||
]):
|
||||
with apply_patches(
|
||||
[
|
||||
mocker.patch('awx.conf.models.Setting.objects.filter', return_value=setting_list),
|
||||
mocker.patch('awx.conf.models.Setting.objects.create', mocker.Mock()),
|
||||
]
|
||||
):
|
||||
settings.AWX_SOME_SETTING = 'NEW-VALUE'
|
||||
|
||||
models.Setting.objects.create.assert_called_with(
|
||||
key='AWX_SOME_SETTING',
|
||||
user=None,
|
||||
value='NEW-VALUE'
|
||||
)
|
||||
models.Setting.objects.create.assert_called_with(key='AWX_SOME_SETTING', user=None, value='NEW-VALUE')
|
||||
|
||||
|
||||
def test_db_setting_update(settings, mocker):
|
||||
"settings are updated in the database when their value changes"
|
||||
settings.registry.register(
|
||||
'AWX_SOME_SETTING',
|
||||
field_class=fields.CharField,
|
||||
category=_('System'),
|
||||
category_slug='system'
|
||||
)
|
||||
settings.registry.register('AWX_SOME_SETTING', field_class=fields.CharField, category=_('System'), category_slug='system')
|
||||
|
||||
existing_setting = mocker.Mock(key='AWX_SOME_SETTING', value='FROM_DB')
|
||||
setting_list = mocker.Mock(**{
|
||||
'order_by.return_value.first.return_value': existing_setting
|
||||
})
|
||||
setting_list = mocker.Mock(**{'order_by.return_value.first.return_value': existing_setting})
|
||||
with mocker.patch('awx.conf.models.Setting.objects.filter', return_value=setting_list):
|
||||
settings.AWX_SOME_SETTING = 'NEW-VALUE'
|
||||
|
||||
@@ -306,12 +211,7 @@ def test_db_setting_update(settings, mocker):
|
||||
|
||||
def test_db_setting_deletion(settings, mocker):
|
||||
"settings are auto-deleted from the database"
|
||||
settings.registry.register(
|
||||
'AWX_SOME_SETTING',
|
||||
field_class=fields.CharField,
|
||||
category=_('System'),
|
||||
category_slug='system'
|
||||
)
|
||||
settings.registry.register('AWX_SOME_SETTING', field_class=fields.CharField, category=_('System'), category_slug='system')
|
||||
|
||||
existing_setting = mocker.Mock(key='AWX_SOME_SETTING', value='FROM_DB')
|
||||
with mocker.patch('awx.conf.models.Setting.objects.filter', return_value=[existing_setting]):
|
||||
@@ -323,12 +223,7 @@ def test_db_setting_deletion(settings, mocker):
|
||||
@pytest.mark.defined_in_file(AWX_SOME_SETTING='DEFAULT')
|
||||
def test_read_only_setting_deletion(settings):
|
||||
"read-only settings cannot be deleted"
|
||||
settings.registry.register(
|
||||
'AWX_SOME_SETTING',
|
||||
field_class=fields.CharField,
|
||||
category=_('System'),
|
||||
category_slug='system'
|
||||
)
|
||||
settings.registry.register('AWX_SOME_SETTING', field_class=fields.CharField, category=_('System'), category_slug='system')
|
||||
assert settings.AWX_SOME_SETTING == 'DEFAULT'
|
||||
with pytest.raises(ImproperlyConfigured):
|
||||
del settings.AWX_SOME_SETTING
|
||||
@@ -337,36 +232,22 @@ def test_read_only_setting_deletion(settings):
|
||||
|
||||
def test_charfield_properly_sets_none(settings, mocker):
|
||||
"see: https://github.com/ansible/ansible-tower/issues/5322"
|
||||
settings.registry.register(
|
||||
'AWX_SOME_SETTING',
|
||||
field_class=fields.CharField,
|
||||
category=_('System'),
|
||||
category_slug='system',
|
||||
allow_null=True
|
||||
)
|
||||
settings.registry.register('AWX_SOME_SETTING', field_class=fields.CharField, category=_('System'), category_slug='system', allow_null=True)
|
||||
|
||||
setting_list = mocker.Mock(**{'order_by.return_value.first.return_value': None})
|
||||
with apply_patches([
|
||||
mocker.patch('awx.conf.models.Setting.objects.filter',
|
||||
return_value=setting_list),
|
||||
mocker.patch('awx.conf.models.Setting.objects.create', mocker.Mock())
|
||||
]):
|
||||
with apply_patches(
|
||||
[
|
||||
mocker.patch('awx.conf.models.Setting.objects.filter', return_value=setting_list),
|
||||
mocker.patch('awx.conf.models.Setting.objects.create', mocker.Mock()),
|
||||
]
|
||||
):
|
||||
settings.AWX_SOME_SETTING = None
|
||||
|
||||
models.Setting.objects.create.assert_called_with(
|
||||
key='AWX_SOME_SETTING',
|
||||
user=None,
|
||||
value=None
|
||||
)
|
||||
models.Setting.objects.create.assert_called_with(key='AWX_SOME_SETTING', user=None, value=None)
|
||||
|
||||
|
||||
def test_settings_use_cache(settings, mocker):
|
||||
settings.registry.register(
|
||||
'AWX_VAR',
|
||||
field_class=fields.CharField,
|
||||
category=_('System'),
|
||||
category_slug='system'
|
||||
)
|
||||
settings.registry.register('AWX_VAR', field_class=fields.CharField, category=_('System'), category_slug='system')
|
||||
settings.cache.set('AWX_VAR', 'foobar')
|
||||
settings.cache.set('_awx_conf_preload_expires', 100)
|
||||
# Will fail test if database is used
|
||||
@@ -374,13 +255,7 @@ def test_settings_use_cache(settings, mocker):
|
||||
|
||||
|
||||
def test_settings_use_an_encrypted_cache(settings, mocker):
|
||||
settings.registry.register(
|
||||
'AWX_ENCRYPTED',
|
||||
field_class=fields.CharField,
|
||||
category=_('System'),
|
||||
category_slug='system',
|
||||
encrypted=True
|
||||
)
|
||||
settings.registry.register('AWX_ENCRYPTED', field_class=fields.CharField, category=_('System'), category_slug='system', encrypted=True)
|
||||
assert isinstance(settings.cache, EncryptedCacheProxy)
|
||||
assert settings.cache.__dict__['encrypter'] == encrypt_field
|
||||
assert settings.cache.__dict__['decrypter'] == decrypt_field
|
||||
@@ -393,34 +268,18 @@ def test_settings_use_an_encrypted_cache(settings, mocker):
|
||||
|
||||
def test_sensitive_cache_data_is_encrypted(settings, mocker):
|
||||
"fields marked as `encrypted` are stored in the cache with encryption"
|
||||
settings.registry.register(
|
||||
'AWX_ENCRYPTED',
|
||||
field_class=fields.CharField,
|
||||
category=_('System'),
|
||||
category_slug='system',
|
||||
encrypted=True
|
||||
)
|
||||
settings.registry.register('AWX_ENCRYPTED', field_class=fields.CharField, category=_('System'), category_slug='system', encrypted=True)
|
||||
|
||||
def rot13(obj, attribute):
|
||||
assert obj.pk == 123
|
||||
return codecs.encode(getattr(obj, attribute), 'rot_13')
|
||||
|
||||
native_cache = LocMemCache(str(uuid4()), {})
|
||||
cache = EncryptedCacheProxy(
|
||||
native_cache,
|
||||
settings.registry,
|
||||
encrypter=rot13,
|
||||
decrypter=rot13
|
||||
)
|
||||
cache = EncryptedCacheProxy(native_cache, settings.registry, encrypter=rot13, decrypter=rot13)
|
||||
# Insert the setting value into the database; the encryption process will
|
||||
# use its primary key as part of the encryption key
|
||||
setting_from_db = mocker.Mock(pk=123, key='AWX_ENCRYPTED', value='SECRET!')
|
||||
mocks = mocker.Mock(**{
|
||||
'order_by.return_value': mocker.Mock(**{
|
||||
'__iter__': lambda self: iter([setting_from_db]),
|
||||
'first.return_value': setting_from_db
|
||||
}),
|
||||
})
|
||||
mocks = mocker.Mock(**{'order_by.return_value': mocker.Mock(**{'__iter__': lambda self: iter([setting_from_db]), 'first.return_value': setting_from_db})})
|
||||
with mocker.patch('awx.conf.models.Setting.objects.filter', return_value=mocks):
|
||||
cache.set('AWX_ENCRYPTED', 'SECRET!')
|
||||
assert cache.get('AWX_ENCRYPTED') == 'SECRET!'
|
||||
@@ -429,26 +288,14 @@ def test_sensitive_cache_data_is_encrypted(settings, mocker):
|
||||
|
||||
def test_readonly_sensitive_cache_data_is_encrypted(settings):
|
||||
"readonly fields marked as `encrypted` are stored in the cache with encryption"
|
||||
settings.registry.register(
|
||||
'AWX_ENCRYPTED',
|
||||
field_class=fields.CharField,
|
||||
category=_('System'),
|
||||
category_slug='system',
|
||||
read_only=True,
|
||||
encrypted=True
|
||||
)
|
||||
settings.registry.register('AWX_ENCRYPTED', field_class=fields.CharField, category=_('System'), category_slug='system', read_only=True, encrypted=True)
|
||||
|
||||
def rot13(obj, attribute):
|
||||
assert obj.pk is None
|
||||
return codecs.encode(getattr(obj, attribute), 'rot_13')
|
||||
|
||||
native_cache = LocMemCache(str(uuid4()), {})
|
||||
cache = EncryptedCacheProxy(
|
||||
native_cache,
|
||||
settings.registry,
|
||||
encrypter=rot13,
|
||||
decrypter=rot13
|
||||
)
|
||||
cache = EncryptedCacheProxy(native_cache, settings.registry, encrypter=rot13, decrypter=rot13)
|
||||
cache.set('AWX_ENCRYPTED', 'SECRET!')
|
||||
assert cache.get('AWX_ENCRYPTED') == 'SECRET!'
|
||||
assert native_cache.get('AWX_ENCRYPTED') == 'FRPERG!'
|
||||
|
||||
@@ -3,14 +3,10 @@
|
||||
|
||||
|
||||
from django.conf.urls import url
|
||||
from awx.conf.views import (
|
||||
SettingCategoryList,
|
||||
SettingSingletonDetail,
|
||||
SettingLoggingTest,
|
||||
)
|
||||
from awx.conf.views import SettingCategoryList, SettingSingletonDetail, SettingLoggingTest
|
||||
|
||||
|
||||
urlpatterns = [
|
||||
urlpatterns = [
|
||||
url(r'^$', SettingCategoryList.as_view(), name='setting_category_list'),
|
||||
url(r'^(?P<category_slug>[a-z0-9-]+)/$', SettingSingletonDetail.as_view(), name='setting_singleton_detail'),
|
||||
url(r'^logging/test/$', SettingLoggingTest.as_view(), name='setting_logging_test'),
|
||||
|
||||
@@ -7,7 +7,4 @@ __all__ = ['conf_to_dict']
|
||||
|
||||
|
||||
def conf_to_dict(obj):
|
||||
return {
|
||||
'category': settings_registry.get_setting_category(obj.key),
|
||||
'name': obj.key,
|
||||
}
|
||||
return {'category': settings_registry.get_setting_category(obj.key), 'name': obj.key}
|
||||
|
||||
@@ -22,12 +22,7 @@ from rest_framework import serializers
|
||||
from rest_framework import status
|
||||
|
||||
# Tower
|
||||
from awx.api.generics import (
|
||||
APIView,
|
||||
GenericAPIView,
|
||||
ListAPIView,
|
||||
RetrieveUpdateDestroyAPIView,
|
||||
)
|
||||
from awx.api.generics import APIView, GenericAPIView, ListAPIView, RetrieveUpdateDestroyAPIView
|
||||
from awx.api.permissions import IsSuperUser
|
||||
from awx.api.versioning import reverse
|
||||
from awx.main.utils import camelcase_to_underscore
|
||||
@@ -81,9 +76,7 @@ class SettingSingletonDetail(RetrieveUpdateDestroyAPIView):
|
||||
if self.category_slug not in category_slugs:
|
||||
raise PermissionDenied()
|
||||
|
||||
registered_settings = settings_registry.get_registered_settings(
|
||||
category_slug=self.category_slug, read_only=False,
|
||||
)
|
||||
registered_settings = settings_registry.get_registered_settings(category_slug=self.category_slug, read_only=False)
|
||||
if self.category_slug == 'user':
|
||||
return Setting.objects.filter(key__in=registered_settings, user=self.request.user)
|
||||
else:
|
||||
@@ -91,9 +84,7 @@ class SettingSingletonDetail(RetrieveUpdateDestroyAPIView):
|
||||
|
||||
def get_object(self):
|
||||
settings_qs = self.get_queryset()
|
||||
registered_settings = settings_registry.get_registered_settings(
|
||||
category_slug=self.category_slug,
|
||||
)
|
||||
registered_settings = settings_registry.get_registered_settings(category_slug=self.category_slug)
|
||||
all_settings = {}
|
||||
for setting in settings_qs:
|
||||
all_settings[setting.key] = setting.value
|
||||
@@ -117,9 +108,7 @@ class SettingSingletonDetail(RetrieveUpdateDestroyAPIView):
|
||||
for key, value in serializer.validated_data.items():
|
||||
if key == 'LICENSE' or settings_registry.is_setting_read_only(key):
|
||||
continue
|
||||
if settings_registry.is_setting_encrypted(key) and \
|
||||
isinstance(value, str) and \
|
||||
value.startswith('$encrypted$'):
|
||||
if settings_registry.is_setting_encrypted(key) and isinstance(value, str) and value.startswith('$encrypted$'):
|
||||
continue
|
||||
setattr(serializer.instance, key, value)
|
||||
setting = settings_qs.filter(key=key).order_by('pk').first()
|
||||
@@ -133,7 +122,6 @@ class SettingSingletonDetail(RetrieveUpdateDestroyAPIView):
|
||||
if settings_change_list:
|
||||
connection.on_commit(lambda: handle_setting_changes.delay(settings_change_list))
|
||||
|
||||
|
||||
def destroy(self, request, *args, **kwargs):
|
||||
instance = self.get_object()
|
||||
self.perform_destroy(instance)
|
||||
@@ -170,7 +158,7 @@ class SettingLoggingTest(GenericAPIView):
|
||||
enabled = getattr(settings, 'LOG_AGGREGATOR_ENABLED', False)
|
||||
if not enabled:
|
||||
return Response({'error': 'Logging not enabled'}, status=status.HTTP_409_CONFLICT)
|
||||
|
||||
|
||||
# Send test message to configured logger based on db settings
|
||||
try:
|
||||
default_logger = settings.LOG_AGGREGATOR_LOGGERS[0]
|
||||
@@ -179,18 +167,15 @@ class SettingLoggingTest(GenericAPIView):
|
||||
except IndexError:
|
||||
default_logger = 'awx'
|
||||
logging.getLogger(default_logger).error('AWX Connection Test Message')
|
||||
|
||||
|
||||
hostname = getattr(settings, 'LOG_AGGREGATOR_HOST', None)
|
||||
protocol = getattr(settings, 'LOG_AGGREGATOR_PROTOCOL', None)
|
||||
|
||||
try:
|
||||
subprocess.check_output(
|
||||
['rsyslogd', '-N1', '-f', '/var/lib/awx/rsyslog/rsyslog.conf'],
|
||||
stderr=subprocess.STDOUT
|
||||
)
|
||||
subprocess.check_output(['rsyslogd', '-N1', '-f', '/var/lib/awx/rsyslog/rsyslog.conf'], stderr=subprocess.STDOUT)
|
||||
except subprocess.CalledProcessError as exc:
|
||||
return Response({'error': exc.output}, status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
|
||||
# Check to ensure port is open at host
|
||||
if protocol in ['udp', 'tcp']:
|
||||
port = getattr(settings, 'LOG_AGGREGATOR_PORT', None)
|
||||
@@ -206,7 +191,7 @@ class SettingLoggingTest(GenericAPIView):
|
||||
else:
|
||||
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
|
||||
try:
|
||||
s.settimeout(.5)
|
||||
s.settimeout(0.5)
|
||||
s.connect((hostname, int(port)))
|
||||
s.shutdown(SHUT_RDWR)
|
||||
s.close()
|
||||
|
||||
1075
awx/main/access.py
1075
awx/main/access.py
File diff suppressed because it is too large
Load Diff
@@ -1 +1 @@
|
||||
from .core import all_collectors, expensive_collectors, register, gather, ship # noqa
|
||||
from .core import all_collectors, register, gather, ship # noqa
|
||||
|
||||
14
awx/main/analytics/analytics_tasks.py
Normal file
14
awx/main/analytics/analytics_tasks.py
Normal file
@@ -0,0 +1,14 @@
|
||||
# Python
|
||||
import logging
|
||||
|
||||
# AWX
|
||||
from awx.main.analytics.subsystem_metrics import Metrics
|
||||
from awx.main.dispatch.publish import task
|
||||
from awx.main.dispatch import get_local_queuename
|
||||
|
||||
logger = logging.getLogger('awx.main.scheduler')
|
||||
|
||||
|
||||
@task(queue=get_local_queuename)
|
||||
def send_subsystem_metrics():
|
||||
Metrics().send_metrics()
|
||||
@@ -24,7 +24,7 @@ logger = logging.getLogger('awx.analytics.broadcast_websocket')
|
||||
|
||||
|
||||
def dt_to_seconds(dt):
|
||||
return int((dt - datetime.datetime(1970,1,1)).total_seconds())
|
||||
return int((dt - datetime.datetime(1970, 1, 1)).total_seconds())
|
||||
|
||||
|
||||
def now_seconds():
|
||||
@@ -37,7 +37,7 @@ def safe_name(s):
|
||||
|
||||
|
||||
# Second granularity; Per-minute
|
||||
class FixedSlidingWindow():
|
||||
class FixedSlidingWindow:
|
||||
def __init__(self, start_time=None):
|
||||
self.buckets = dict()
|
||||
self.start_time = start_time or now_seconds()
|
||||
@@ -65,7 +65,7 @@ class FixedSlidingWindow():
|
||||
return sum(self.buckets.values()) or 0
|
||||
|
||||
|
||||
class BroadcastWebsocketStatsManager():
|
||||
class BroadcastWebsocketStatsManager:
|
||||
def __init__(self, event_loop, local_hostname):
|
||||
self._local_hostname = local_hostname
|
||||
|
||||
@@ -74,8 +74,7 @@ class BroadcastWebsocketStatsManager():
|
||||
self._redis_key = BROADCAST_WEBSOCKET_REDIS_KEY_NAME
|
||||
|
||||
def new_remote_host_stats(self, remote_hostname):
|
||||
self._stats[remote_hostname] = BroadcastWebsocketStats(self._local_hostname,
|
||||
remote_hostname)
|
||||
self._stats[remote_hostname] = BroadcastWebsocketStats(self._local_hostname, remote_hostname)
|
||||
return self._stats[remote_hostname]
|
||||
|
||||
def delete_remote_host_stats(self, remote_hostname):
|
||||
@@ -100,15 +99,15 @@ class BroadcastWebsocketStatsManager():
|
||||
|
||||
@classmethod
|
||||
def get_stats_sync(cls):
|
||||
'''
|
||||
"""
|
||||
Stringified verion of all the stats
|
||||
'''
|
||||
"""
|
||||
redis_conn = redis.Redis.from_url(settings.BROKER_URL)
|
||||
stats_str = redis_conn.get(BROADCAST_WEBSOCKET_REDIS_KEY_NAME) or b''
|
||||
return parser.text_string_to_metric_families(stats_str.decode('UTF-8'))
|
||||
|
||||
|
||||
class BroadcastWebsocketStats():
|
||||
class BroadcastWebsocketStats:
|
||||
def __init__(self, local_hostname, remote_hostname):
|
||||
self._local_hostname = local_hostname
|
||||
self._remote_hostname = remote_hostname
|
||||
@@ -118,24 +117,25 @@ class BroadcastWebsocketStats():
|
||||
self.name = safe_name(self._local_hostname)
|
||||
self.remote_name = safe_name(self._remote_hostname)
|
||||
|
||||
self._messages_received_total = Counter(f'awx_{self.remote_name}_messages_received_total',
|
||||
'Number of messages received, to be forwarded, by the broadcast websocket system',
|
||||
registry=self._registry)
|
||||
self._messages_received = Gauge(f'awx_{self.remote_name}_messages_received',
|
||||
'Number forwarded messages received by the broadcast websocket system, for the duration of the current connection',
|
||||
registry=self._registry)
|
||||
self._connection = Enum(f'awx_{self.remote_name}_connection',
|
||||
'Websocket broadcast connection',
|
||||
states=['disconnected', 'connected'],
|
||||
registry=self._registry)
|
||||
self._messages_received_total = Counter(
|
||||
f'awx_{self.remote_name}_messages_received_total',
|
||||
'Number of messages received, to be forwarded, by the broadcast websocket system',
|
||||
registry=self._registry,
|
||||
)
|
||||
self._messages_received = Gauge(
|
||||
f'awx_{self.remote_name}_messages_received',
|
||||
'Number forwarded messages received by the broadcast websocket system, for the duration of the current connection',
|
||||
registry=self._registry,
|
||||
)
|
||||
self._connection = Enum(
|
||||
f'awx_{self.remote_name}_connection', 'Websocket broadcast connection', states=['disconnected', 'connected'], registry=self._registry
|
||||
)
|
||||
self._connection.state('disconnected')
|
||||
self._connection_start = Gauge(f'awx_{self.remote_name}_connection_start',
|
||||
'Time the connection was established',
|
||||
registry=self._registry)
|
||||
self._connection_start = Gauge(f'awx_{self.remote_name}_connection_start', 'Time the connection was established', registry=self._registry)
|
||||
|
||||
self._messages_received_per_minute = Gauge(f'awx_{self.remote_name}_messages_received_per_minute',
|
||||
'Messages received per minute',
|
||||
registry=self._registry)
|
||||
self._messages_received_per_minute = Gauge(
|
||||
f'awx_{self.remote_name}_messages_received_per_minute', 'Messages received per minute', registry=self._registry
|
||||
)
|
||||
self._internal_messages_received_per_minute = FixedSlidingWindow()
|
||||
|
||||
def unregister(self):
|
||||
|
||||
@@ -1,22 +1,25 @@
|
||||
import io
|
||||
import json
|
||||
import os
|
||||
import os.path
|
||||
import platform
|
||||
import distro
|
||||
|
||||
from django.db import connection
|
||||
from django.db.models import Count
|
||||
from django.db.models import Count, Max, Min
|
||||
from django.conf import settings
|
||||
from django.utils.timezone import now
|
||||
from django.contrib.sessions.models import Session
|
||||
from django.utils.timezone import now, timedelta
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
|
||||
from psycopg2.errors import UntranslatableCharacter
|
||||
|
||||
from awx.conf.license import get_license
|
||||
from awx.main.utils import (get_awx_version, get_ansible_version,
|
||||
get_custom_venv_choices, camelcase_to_underscore)
|
||||
from awx.main.utils import get_awx_version, get_custom_venv_choices, camelcase_to_underscore, datetime_hook
|
||||
from awx.main import models
|
||||
from django.contrib.sessions.models import Session
|
||||
from awx.main.analytics import register
|
||||
|
||||
'''
|
||||
"""
|
||||
This module is used to define metrics collected by awx.main.analytics.gather()
|
||||
Each function is decorated with a key name, and should return a data
|
||||
structure that can be serialized to JSON
|
||||
@@ -30,10 +33,64 @@ All functions - when called - will be passed a datetime.datetime object,
|
||||
`since`, which represents the last time analytics were gathered (some metrics
|
||||
functions - like those that return metadata about playbook runs, may return
|
||||
data _since_ the last report date - i.e., new data in the last 24 hours)
|
||||
'''
|
||||
"""
|
||||
|
||||
|
||||
@register('config', '1.2', description=_('General platform configuration.'))
|
||||
def trivial_slicing(key, since, until):
|
||||
if since is not None:
|
||||
return [(since, until)]
|
||||
|
||||
from awx.conf.models import Setting
|
||||
|
||||
horizon = until - timedelta(weeks=4)
|
||||
last_entries = Setting.objects.filter(key='AUTOMATION_ANALYTICS_LAST_ENTRIES').first()
|
||||
last_entries = json.loads((last_entries.value if last_entries is not None else '') or '{}', object_hook=datetime_hook)
|
||||
last_entry = max(last_entries.get(key) or settings.AUTOMATION_ANALYTICS_LAST_GATHER or horizon, horizon)
|
||||
return [(last_entry, until)]
|
||||
|
||||
|
||||
def four_hour_slicing(key, since, until):
|
||||
if since is not None:
|
||||
last_entry = since
|
||||
else:
|
||||
from awx.conf.models import Setting
|
||||
|
||||
horizon = until - timedelta(weeks=4)
|
||||
last_entries = Setting.objects.filter(key='AUTOMATION_ANALYTICS_LAST_ENTRIES').first()
|
||||
last_entries = json.loads((last_entries.value if last_entries is not None else '') or '{}', object_hook=datetime_hook)
|
||||
last_entry = max(last_entries.get(key) or settings.AUTOMATION_ANALYTICS_LAST_GATHER or horizon, horizon)
|
||||
|
||||
start, end = last_entry, None
|
||||
while start < until:
|
||||
end = min(start + timedelta(hours=4), until)
|
||||
yield (start, end)
|
||||
start = end
|
||||
|
||||
|
||||
def events_slicing(key, since, until):
|
||||
from awx.conf.models import Setting
|
||||
|
||||
last_gather = settings.AUTOMATION_ANALYTICS_LAST_GATHER
|
||||
last_entries = Setting.objects.filter(key='AUTOMATION_ANALYTICS_LAST_ENTRIES').first()
|
||||
last_entries = json.loads((last_entries.value if last_entries is not None else '') or '{}', object_hook=datetime_hook)
|
||||
horizon = until - timedelta(weeks=4)
|
||||
|
||||
lower = since or last_gather or horizon
|
||||
if not since and last_entries.get(key):
|
||||
lower = horizon
|
||||
pk_values = models.JobEvent.objects.filter(created__gte=lower, created__lte=until).aggregate(Min('pk'), Max('pk'))
|
||||
|
||||
previous_pk = pk_values['pk__min'] - 1 if pk_values['pk__min'] is not None else 0
|
||||
if not since and last_entries.get(key):
|
||||
previous_pk = max(last_entries[key], previous_pk)
|
||||
final_pk = pk_values['pk__max'] or 0
|
||||
|
||||
step = 100000
|
||||
for start in range(previous_pk, final_pk + 1, step):
|
||||
yield (start, min(start + step, final_pk))
|
||||
|
||||
|
||||
@register('config', '1.3', description=_('General platform configuration.'))
|
||||
def config(since, **kwargs):
|
||||
license_info = get_license()
|
||||
install_type = 'traditional'
|
||||
@@ -44,7 +101,7 @@ def config(since, **kwargs):
|
||||
return {
|
||||
'platform': {
|
||||
'system': platform.system(),
|
||||
'dist': platform.dist(),
|
||||
'dist': distro.linux_distribution(),
|
||||
'release': platform.release(),
|
||||
'type': install_type,
|
||||
},
|
||||
@@ -52,7 +109,6 @@ def config(since, **kwargs):
|
||||
'instance_uuid': settings.SYSTEM_UUID,
|
||||
'tower_url_base': settings.TOWER_URL_BASE,
|
||||
'tower_version': get_awx_version(),
|
||||
'ansible_version': get_ansible_version(),
|
||||
'license_type': license_info.get('license_type', 'UNLICENSED'),
|
||||
'free_instances': license_info.get('free_instances', 0),
|
||||
'total_licensed_instances': license_info.get('instance_count', 0),
|
||||
@@ -68,96 +124,99 @@ def config(since, **kwargs):
|
||||
@register('counts', '1.0', description=_('Counts of objects such as organizations, inventories, and projects'))
|
||||
def counts(since, **kwargs):
|
||||
counts = {}
|
||||
for cls in (models.Organization, models.Team, models.User,
|
||||
models.Inventory, models.Credential, models.Project,
|
||||
models.JobTemplate, models.WorkflowJobTemplate,
|
||||
models.Host, models.Schedule, models.CustomInventoryScript,
|
||||
models.NotificationTemplate):
|
||||
for cls in (
|
||||
models.Organization,
|
||||
models.Team,
|
||||
models.User,
|
||||
models.Inventory,
|
||||
models.Credential,
|
||||
models.Project,
|
||||
models.JobTemplate,
|
||||
models.WorkflowJobTemplate,
|
||||
models.Host,
|
||||
models.Schedule,
|
||||
models.CustomInventoryScript,
|
||||
models.NotificationTemplate,
|
||||
):
|
||||
counts[camelcase_to_underscore(cls.__name__)] = cls.objects.count()
|
||||
|
||||
venvs = get_custom_venv_choices()
|
||||
counts['custom_virtualenvs'] = len([
|
||||
v for v in venvs
|
||||
if os.path.basename(v.rstrip('/')) != 'ansible'
|
||||
])
|
||||
counts['custom_virtualenvs'] = len([v for v in venvs if os.path.basename(v.rstrip('/')) != 'ansible'])
|
||||
|
||||
inv_counts = dict(models.Inventory.objects.order_by().values_list('kind').annotate(Count('kind')))
|
||||
inv_counts['normal'] = inv_counts.get('', 0)
|
||||
inv_counts.pop('', None)
|
||||
inv_counts['smart'] = inv_counts.get('smart', 0)
|
||||
counts['inventories'] = inv_counts
|
||||
|
||||
counts['unified_job'] = models.UnifiedJob.objects.exclude(launch_type='sync').count() # excludes implicit project_updates
|
||||
counts['active_host_count'] = models.Host.objects.active_count()
|
||||
|
||||
counts['unified_job'] = models.UnifiedJob.objects.exclude(launch_type='sync').count() # excludes implicit project_updates
|
||||
counts['active_host_count'] = models.Host.objects.active_count()
|
||||
active_sessions = Session.objects.filter(expire_date__gte=now()).count()
|
||||
active_user_sessions = models.UserSessionMembership.objects.select_related('session').filter(session__expire_date__gte=now()).count()
|
||||
active_anonymous_sessions = active_sessions - active_user_sessions
|
||||
counts['active_sessions'] = active_sessions
|
||||
counts['active_user_sessions'] = active_user_sessions
|
||||
counts['active_anonymous_sessions'] = active_anonymous_sessions
|
||||
counts['running_jobs'] = models.UnifiedJob.objects.exclude(launch_type='sync').filter(status__in=('running', 'waiting',)).count()
|
||||
counts['running_jobs'] = (
|
||||
models.UnifiedJob.objects.exclude(launch_type='sync')
|
||||
.filter(
|
||||
status__in=(
|
||||
'running',
|
||||
'waiting',
|
||||
)
|
||||
)
|
||||
.count()
|
||||
)
|
||||
counts['pending_jobs'] = models.UnifiedJob.objects.exclude(launch_type='sync').filter(status__in=('pending',)).count()
|
||||
return counts
|
||||
|
||||
|
||||
|
||||
@register('org_counts', '1.0', description=_('Counts of users and teams by organization'))
|
||||
def org_counts(since, **kwargs):
|
||||
counts = {}
|
||||
for org in models.Organization.objects.annotate(num_users=Count('member_role__members', distinct=True),
|
||||
num_teams=Count('teams', distinct=True)).values('name', 'id', 'num_users', 'num_teams'):
|
||||
counts[org['id']] = {'name': org['name'],
|
||||
'users': org['num_users'],
|
||||
'teams': org['num_teams']
|
||||
}
|
||||
for org in models.Organization.objects.annotate(num_users=Count('member_role__members', distinct=True), num_teams=Count('teams', distinct=True)).values(
|
||||
'name', 'id', 'num_users', 'num_teams'
|
||||
):
|
||||
counts[org['id']] = {'name': org['name'], 'users': org['num_users'], 'teams': org['num_teams']}
|
||||
return counts
|
||||
|
||||
|
||||
|
||||
|
||||
@register('cred_type_counts', '1.0', description=_('Counts of credentials by credential type'))
|
||||
def cred_type_counts(since, **kwargs):
|
||||
counts = {}
|
||||
for cred_type in models.CredentialType.objects.annotate(num_credentials=Count(
|
||||
'credentials', distinct=True)).values('name', 'id', 'managed_by_tower', 'num_credentials'):
|
||||
counts[cred_type['id']] = {'name': cred_type['name'],
|
||||
'credential_count': cred_type['num_credentials'],
|
||||
'managed_by_tower': cred_type['managed_by_tower']
|
||||
}
|
||||
for cred_type in models.CredentialType.objects.annotate(num_credentials=Count('credentials', distinct=True)).values(
|
||||
'name', 'id', 'managed_by_tower', 'num_credentials'
|
||||
):
|
||||
counts[cred_type['id']] = {
|
||||
'name': cred_type['name'],
|
||||
'credential_count': cred_type['num_credentials'],
|
||||
'managed_by_tower': cred_type['managed_by_tower'],
|
||||
}
|
||||
return counts
|
||||
|
||||
|
||||
|
||||
|
||||
@register('inventory_counts', '1.2', description=_('Inventories, their inventory sources, and host counts'))
|
||||
def inventory_counts(since, **kwargs):
|
||||
counts = {}
|
||||
for inv in models.Inventory.objects.filter(kind='').annotate(num_sources=Count('inventory_sources', distinct=True),
|
||||
num_hosts=Count('hosts', distinct=True)).only('id', 'name', 'kind'):
|
||||
for inv in (
|
||||
models.Inventory.objects.filter(kind='')
|
||||
.annotate(num_sources=Count('inventory_sources', distinct=True), num_hosts=Count('hosts', distinct=True))
|
||||
.only('id', 'name', 'kind')
|
||||
):
|
||||
source_list = []
|
||||
for source in inv.inventory_sources.filter().annotate(num_hosts=Count('hosts', distinct=True)).values('name','source', 'num_hosts'):
|
||||
for source in inv.inventory_sources.filter().annotate(num_hosts=Count('hosts', distinct=True)).values('name', 'source', 'num_hosts'):
|
||||
source_list.append(source)
|
||||
counts[inv.id] = {'name': inv.name,
|
||||
'kind': inv.kind,
|
||||
'hosts': inv.num_hosts,
|
||||
'sources': inv.num_sources,
|
||||
'source_list': source_list
|
||||
}
|
||||
counts[inv.id] = {'name': inv.name, 'kind': inv.kind, 'hosts': inv.num_hosts, 'sources': inv.num_sources, 'source_list': source_list}
|
||||
|
||||
for smart_inv in models.Inventory.objects.filter(kind='smart'):
|
||||
counts[smart_inv.id] = {'name': smart_inv.name,
|
||||
'kind': smart_inv.kind,
|
||||
'hosts': smart_inv.hosts.count(),
|
||||
'sources': 0,
|
||||
'source_list': []
|
||||
}
|
||||
counts[smart_inv.id] = {'name': smart_inv.name, 'kind': smart_inv.kind, 'hosts': smart_inv.hosts.count(), 'sources': 0, 'source_list': []}
|
||||
return counts
|
||||
|
||||
|
||||
@register('projects_by_scm_type', '1.0', description=_('Counts of projects by source control type'))
|
||||
def projects_by_scm_type(since, **kwargs):
|
||||
counts = dict(
|
||||
(t[0] or 'manual', 0)
|
||||
for t in models.Project.SCM_TYPE_CHOICES
|
||||
)
|
||||
for result in models.Project.objects.values('scm_type').annotate(
|
||||
count=Count('scm_type')
|
||||
).order_by('scm_type'):
|
||||
counts = dict((t[0] or 'manual', 0) for t in models.Project.SCM_TYPE_CHOICES)
|
||||
for result in models.Project.objects.values('scm_type').annotate(count=Count('scm_type')).order_by('scm_type'):
|
||||
counts[result['scm_type'] or 'manual'] = result['count']
|
||||
return counts
|
||||
|
||||
@@ -172,10 +231,10 @@ def _get_isolated_datetime(last_check):
|
||||
def instance_info(since, include_hostnames=False, **kwargs):
|
||||
info = {}
|
||||
instances = models.Instance.objects.values_list('hostname').values(
|
||||
'uuid', 'version', 'capacity', 'cpu', 'memory', 'managed_by_policy', 'hostname', 'last_isolated_check', 'enabled')
|
||||
'uuid', 'version', 'capacity', 'cpu', 'memory', 'managed_by_policy', 'hostname', 'last_isolated_check', 'enabled'
|
||||
)
|
||||
for instance in instances:
|
||||
consumed_capacity = sum(x.task_impact for x in models.UnifiedJob.objects.filter(execution_node=instance['hostname'],
|
||||
status__in=('running', 'waiting')))
|
||||
consumed_capacity = sum(x.task_impact for x in models.UnifiedJob.objects.filter(execution_node=instance['hostname'], status__in=('running', 'waiting')))
|
||||
instance_info = {
|
||||
'uuid': instance['uuid'],
|
||||
'version': instance['version'],
|
||||
@@ -186,7 +245,7 @@ def instance_info(since, include_hostnames=False, **kwargs):
|
||||
'last_isolated_check': _get_isolated_datetime(instance['last_isolated_check']),
|
||||
'enabled': instance['enabled'],
|
||||
'consumed_capacity': consumed_capacity,
|
||||
'remaining_capacity': instance['capacity'] - consumed_capacity
|
||||
'remaining_capacity': instance['capacity'] - consumed_capacity,
|
||||
}
|
||||
if include_hostnames is True:
|
||||
instance_info['hostname'] = instance['hostname']
|
||||
@@ -198,20 +257,22 @@ def job_counts(since, **kwargs):
|
||||
counts = {}
|
||||
counts['total_jobs'] = models.UnifiedJob.objects.exclude(launch_type='sync').count()
|
||||
counts['status'] = dict(models.UnifiedJob.objects.exclude(launch_type='sync').values_list('status').annotate(Count('status')).order_by())
|
||||
counts['launch_type'] = dict(models.UnifiedJob.objects.exclude(launch_type='sync').values_list(
|
||||
'launch_type').annotate(Count('launch_type')).order_by())
|
||||
counts['launch_type'] = dict(models.UnifiedJob.objects.exclude(launch_type='sync').values_list('launch_type').annotate(Count('launch_type')).order_by())
|
||||
return counts
|
||||
|
||||
|
||||
|
||||
|
||||
def job_instance_counts(since, **kwargs):
|
||||
counts = {}
|
||||
job_types = models.UnifiedJob.objects.exclude(launch_type='sync').values_list(
|
||||
'execution_node', 'launch_type').annotate(job_launch_type=Count('launch_type')).order_by()
|
||||
job_types = (
|
||||
models.UnifiedJob.objects.exclude(launch_type='sync')
|
||||
.values_list('execution_node', 'launch_type')
|
||||
.annotate(job_launch_type=Count('launch_type'))
|
||||
.order_by()
|
||||
)
|
||||
for job in job_types:
|
||||
counts.setdefault(job[0], {}).setdefault('launch_type', {})[job[1]] = job[2]
|
||||
|
||||
job_statuses = models.UnifiedJob.objects.exclude(launch_type='sync').values_list(
|
||||
'execution_node', 'status').annotate(job_status=Count('status')).order_by()
|
||||
|
||||
job_statuses = models.UnifiedJob.objects.exclude(launch_type='sync').values_list('execution_node', 'status').annotate(job_status=Count('status')).order_by()
|
||||
for job in job_statuses:
|
||||
counts.setdefault(job[0], {}).setdefault('status', {})[job[1]] = job[2]
|
||||
return counts
|
||||
@@ -261,12 +322,15 @@ class FileSplitter(io.StringIO):
|
||||
self.files = self.files[:-1]
|
||||
# If we only have one file, remove the suffix
|
||||
if len(self.files) == 1:
|
||||
os.rename(self.files[0],self.files[0].replace('_split0',''))
|
||||
filename = self.files.pop()
|
||||
new_filename = filename.replace('_split0', '')
|
||||
os.rename(filename, new_filename)
|
||||
self.files.append(new_filename)
|
||||
return self.files
|
||||
|
||||
def write(self, s):
|
||||
if not self.header:
|
||||
self.header = s[0:s.index('\n')]
|
||||
self.header = s[: s.index('\n')]
|
||||
self.counter += self.currentfile.write(s)
|
||||
if self.counter >= MAX_TABLE_SIZE:
|
||||
self.cycle_file()
|
||||
@@ -280,38 +344,42 @@ def _copy_table(table, query, path):
|
||||
return file.file_list()
|
||||
|
||||
|
||||
@register('events_table', '1.2', format='csv', description=_('Automation task records'), expensive=True)
|
||||
@register('events_table', '1.2', format='csv', description=_('Automation task records'), expensive=events_slicing)
|
||||
def events_table(since, full_path, until, **kwargs):
|
||||
events_query = '''COPY (SELECT main_jobevent.id,
|
||||
main_jobevent.created,
|
||||
main_jobevent.modified,
|
||||
main_jobevent.uuid,
|
||||
main_jobevent.parent_uuid,
|
||||
main_jobevent.event,
|
||||
main_jobevent.event_data::json->'task_action' AS task_action,
|
||||
(CASE WHEN event = 'playbook_on_stats' THEN event_data END) as playbook_on_stats,
|
||||
main_jobevent.failed,
|
||||
main_jobevent.changed,
|
||||
main_jobevent.playbook,
|
||||
main_jobevent.play,
|
||||
main_jobevent.task,
|
||||
main_jobevent.role,
|
||||
main_jobevent.job_id,
|
||||
main_jobevent.host_id,
|
||||
main_jobevent.host_name
|
||||
, CAST(main_jobevent.event_data::json->>'start' AS TIMESTAMP WITH TIME ZONE) AS start,
|
||||
CAST(main_jobevent.event_data::json->>'end' AS TIMESTAMP WITH TIME ZONE) AS end,
|
||||
main_jobevent.event_data::json->'duration' AS duration,
|
||||
main_jobevent.event_data::json->'res'->'warnings' AS warnings,
|
||||
main_jobevent.event_data::json->'res'->'deprecations' AS deprecations
|
||||
FROM main_jobevent
|
||||
WHERE (main_jobevent.created > '{}' AND main_jobevent.created <= '{}')
|
||||
ORDER BY main_jobevent.id ASC) TO STDOUT WITH CSV HEADER
|
||||
'''.format(since.isoformat(),until.isoformat())
|
||||
return _copy_table(table='events', query=events_query, path=full_path)
|
||||
def query(event_data):
|
||||
return f'''COPY (SELECT main_jobevent.id,
|
||||
main_jobevent.created,
|
||||
main_jobevent.modified,
|
||||
main_jobevent.uuid,
|
||||
main_jobevent.parent_uuid,
|
||||
main_jobevent.event,
|
||||
{event_data}->'task_action' AS task_action,
|
||||
(CASE WHEN event = 'playbook_on_stats' THEN event_data END) as playbook_on_stats,
|
||||
main_jobevent.failed,
|
||||
main_jobevent.changed,
|
||||
main_jobevent.playbook,
|
||||
main_jobevent.play,
|
||||
main_jobevent.task,
|
||||
main_jobevent.role,
|
||||
main_jobevent.job_id,
|
||||
main_jobevent.host_id,
|
||||
main_jobevent.host_name,
|
||||
CAST({event_data}->>'start' AS TIMESTAMP WITH TIME ZONE) AS start,
|
||||
CAST({event_data}->>'end' AS TIMESTAMP WITH TIME ZONE) AS end,
|
||||
{event_data}->'duration' AS duration,
|
||||
{event_data}->'res'->'warnings' AS warnings,
|
||||
{event_data}->'res'->'deprecations' AS deprecations
|
||||
FROM main_jobevent
|
||||
WHERE (main_jobevent.id > {since} AND main_jobevent.id <= {until})
|
||||
ORDER BY main_jobevent.id ASC) TO STDOUT WITH CSV HEADER'''
|
||||
|
||||
try:
|
||||
return _copy_table(table='events', query=query("main_jobevent.event_data::json"), path=full_path)
|
||||
except UntranslatableCharacter as exc:
|
||||
return _copy_table(table='events', query=query("replace(main_jobevent.event_data::text, '\\u0000', '')::json"), path=full_path)
|
||||
|
||||
|
||||
@register('unified_jobs_table', '1.1', format='csv', description=_('Data on jobs run'), expensive=True)
|
||||
@register('unified_jobs_table', '1.2', format='csv', description=_('Data on jobs run'), expensive=four_hour_slicing)
|
||||
def unified_jobs_table(since, full_path, until, **kwargs):
|
||||
unified_job_query = '''COPY (SELECT main_unifiedjob.id,
|
||||
main_unifiedjob.polymorphic_ctype_id,
|
||||
@@ -334,7 +402,9 @@ def unified_jobs_table(since, full_path, until, **kwargs):
|
||||
main_unifiedjob.finished,
|
||||
main_unifiedjob.elapsed,
|
||||
main_unifiedjob.job_explanation,
|
||||
main_unifiedjob.instance_group_id
|
||||
main_unifiedjob.instance_group_id,
|
||||
main_unifiedjob.installed_collections,
|
||||
main_unifiedjob.ansible_version
|
||||
FROM main_unifiedjob
|
||||
JOIN django_content_type ON main_unifiedjob.polymorphic_ctype_id = django_content_type.id
|
||||
LEFT JOIN main_job ON main_unifiedjob.id = main_job.unifiedjob_ptr_id
|
||||
@@ -344,7 +414,9 @@ def unified_jobs_table(since, full_path, until, **kwargs):
|
||||
OR (main_unifiedjob.finished > '{0}' AND main_unifiedjob.finished <= '{1}'))
|
||||
AND main_unifiedjob.launch_type != 'sync'
|
||||
ORDER BY main_unifiedjob.id ASC) TO STDOUT WITH CSV HEADER
|
||||
'''.format(since.isoformat(),until.isoformat())
|
||||
'''.format(
|
||||
since.isoformat(), until.isoformat()
|
||||
)
|
||||
return _copy_table(table='unified_jobs', query=unified_job_query, path=full_path)
|
||||
|
||||
|
||||
@@ -367,11 +439,11 @@ def unified_job_template_table(since, full_path, **kwargs):
|
||||
main_unifiedjobtemplate.status
|
||||
FROM main_unifiedjobtemplate, django_content_type
|
||||
WHERE main_unifiedjobtemplate.polymorphic_ctype_id = django_content_type.id
|
||||
ORDER BY main_unifiedjobtemplate.id ASC) TO STDOUT WITH CSV HEADER'''
|
||||
ORDER BY main_unifiedjobtemplate.id ASC) TO STDOUT WITH CSV HEADER'''
|
||||
return _copy_table(table='unified_job_template', query=unified_job_template_query, path=full_path)
|
||||
|
||||
|
||||
@register('workflow_job_node_table', '1.0', format='csv', description=_('Data on workflow runs'), expensive=True)
|
||||
@register('workflow_job_node_table', '1.0', format='csv', description=_('Data on workflow runs'), expensive=four_hour_slicing)
|
||||
def workflow_job_node_table(since, full_path, until, **kwargs):
|
||||
workflow_job_node_query = '''COPY (SELECT main_workflowjobnode.id,
|
||||
main_workflowjobnode.created,
|
||||
@@ -403,7 +475,9 @@ def workflow_job_node_table(since, full_path, until, **kwargs):
|
||||
) always_nodes ON main_workflowjobnode.id = always_nodes.from_workflowjobnode_id
|
||||
WHERE (main_workflowjobnode.modified > '{}' AND main_workflowjobnode.modified <= '{}')
|
||||
ORDER BY main_workflowjobnode.id ASC) TO STDOUT WITH CSV HEADER
|
||||
'''.format(since.isoformat(),until.isoformat())
|
||||
'''.format(
|
||||
since.isoformat(), until.isoformat()
|
||||
)
|
||||
return _copy_table(table='workflow_job_node', query=workflow_job_node_query, path=full_path)
|
||||
|
||||
|
||||
@@ -435,5 +509,5 @@ def workflow_job_template_node_table(since, full_path, **kwargs):
|
||||
FROM main_workflowjobtemplatenode_always_nodes
|
||||
GROUP BY from_workflowjobtemplatenode_id
|
||||
) always_nodes ON main_workflowjobtemplatenode.id = always_nodes.from_workflowjobtemplatenode_id
|
||||
ORDER BY main_workflowjobtemplatenode.id ASC) TO STDOUT WITH CSV HEADER'''
|
||||
ORDER BY main_workflowjobtemplatenode.id ASC) TO STDOUT WITH CSV HEADER'''
|
||||
return _copy_table(table='workflow_job_template_node', query=workflow_job_template_node_query, path=full_path)
|
||||
|
||||
@@ -1,20 +1,25 @@
|
||||
import inspect
|
||||
import io
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
import os.path
|
||||
import tempfile
|
||||
import pathlib
|
||||
import shutil
|
||||
import requests
|
||||
import tarfile
|
||||
import tempfile
|
||||
|
||||
from django.conf import settings
|
||||
from django.core.serializers.json import DjangoJSONEncoder
|
||||
from django.utils.timezone import now, timedelta
|
||||
from rest_framework.exceptions import PermissionDenied
|
||||
import requests
|
||||
|
||||
from awx.conf.license import get_license
|
||||
from awx.main.models import Job
|
||||
from awx.main.access import access_registry
|
||||
from awx.main.utils import get_awx_http_client_headers, set_environ
|
||||
from awx.main.utils import get_awx_http_client_headers, set_environ, datetime_hook
|
||||
from awx.main.utils.pglock import advisory_lock
|
||||
|
||||
__all__ = ['register', 'gather', 'ship']
|
||||
|
||||
@@ -36,29 +41,18 @@ def _valid_license():
|
||||
def all_collectors():
|
||||
from awx.main.analytics import collectors
|
||||
|
||||
collector_dict = {}
|
||||
module = collectors
|
||||
for name, func in inspect.getmembers(module):
|
||||
if inspect.isfunction(func) and hasattr(func, '__awx_analytics_key__'):
|
||||
key = func.__awx_analytics_key__
|
||||
desc = func.__awx_analytics_description__ or ''
|
||||
version = func.__awx_analytics_version__
|
||||
collector_dict[key] = { 'name': key, 'version': version, 'description': desc}
|
||||
return collector_dict
|
||||
return {
|
||||
func.__awx_analytics_key__: {
|
||||
'name': func.__awx_analytics_key__,
|
||||
'version': func.__awx_analytics_version__,
|
||||
'description': func.__awx_analytics_description__ or '',
|
||||
}
|
||||
for name, func in inspect.getmembers(collectors)
|
||||
if inspect.isfunction(func) and hasattr(func, '__awx_analytics_key__')
|
||||
}
|
||||
|
||||
|
||||
def expensive_collectors():
|
||||
from awx.main.analytics import collectors
|
||||
|
||||
ret = []
|
||||
module = collectors
|
||||
for name, func in inspect.getmembers(module):
|
||||
if inspect.isfunction(func) and hasattr(func, '__awx_analytics_key__') and func.__awx_expensive__:
|
||||
ret.append(func.__awx_analytics_key__)
|
||||
return ret
|
||||
|
||||
|
||||
def register(key, version, description=None, format='json', expensive=False):
|
||||
def register(key, version, description=None, format='json', expensive=None):
|
||||
"""
|
||||
A decorator used to register a function as a metric collector.
|
||||
|
||||
@@ -82,145 +76,217 @@ def register(key, version, description=None, format='json', expensive=False):
|
||||
return decorate
|
||||
|
||||
|
||||
def gather(dest=None, module=None, subset = None, since = None, until = now(), collection_type='scheduled'):
|
||||
def package(target, data, timestamp):
|
||||
try:
|
||||
tarname_base = f'{settings.SYSTEM_UUID}-{timestamp.strftime("%Y-%m-%d-%H%M%S%z")}'
|
||||
path = pathlib.Path(target)
|
||||
index = len(list(path.glob(f'{tarname_base}-*.*')))
|
||||
tarname = f'{tarname_base}-{index}.tar.gz'
|
||||
|
||||
manifest = {}
|
||||
with tarfile.open(target.joinpath(tarname), 'w:gz') as f:
|
||||
for name, (item, version) in data.items():
|
||||
try:
|
||||
if isinstance(item, str):
|
||||
f.add(item, arcname=f'./{name}')
|
||||
else:
|
||||
buf = json.dumps(item).encode('utf-8')
|
||||
info = tarfile.TarInfo(f'./{name}')
|
||||
info.size = len(buf)
|
||||
info.mtime = timestamp.timestamp()
|
||||
f.addfile(info, fileobj=io.BytesIO(buf))
|
||||
manifest[name] = version
|
||||
except Exception:
|
||||
logger.exception(f"Could not generate metric {name}")
|
||||
return None
|
||||
|
||||
try:
|
||||
buf = json.dumps(manifest).encode('utf-8')
|
||||
info = tarfile.TarInfo('./manifest.json')
|
||||
info.size = len(buf)
|
||||
info.mtime = timestamp.timestamp()
|
||||
f.addfile(info, fileobj=io.BytesIO(buf))
|
||||
except Exception:
|
||||
logger.exception("Could not generate manifest.json")
|
||||
return None
|
||||
|
||||
return f.name
|
||||
except Exception:
|
||||
logger.exception("Failed to write analytics archive file")
|
||||
return None
|
||||
|
||||
|
||||
def gather(dest=None, module=None, subset=None, since=None, until=None, collection_type='scheduled'):
|
||||
"""
|
||||
Gather all defined metrics and write them as JSON files in a .tgz
|
||||
|
||||
:param dest: the (optional) absolute path to write a compressed tarball
|
||||
:param dest: the (optional) absolute path to write a compressed tarball
|
||||
:param module: the module to search for registered analytic collector
|
||||
functions; defaults to awx.main.analytics.collectors
|
||||
functions; defaults to awx.main.analytics.collectors
|
||||
"""
|
||||
def _write_manifest(destdir, manifest):
|
||||
path = os.path.join(destdir, 'manifest.json')
|
||||
with open(path, 'w', encoding='utf-8') as f:
|
||||
try:
|
||||
json.dump(manifest, f)
|
||||
except Exception:
|
||||
f.close()
|
||||
os.remove(f.name)
|
||||
logger.exception("Could not generate manifest.json")
|
||||
log_level = logging.ERROR if collection_type != 'scheduled' else logging.DEBUG
|
||||
|
||||
last_run = since or settings.AUTOMATION_ANALYTICS_LAST_GATHER or (now() - timedelta(weeks=4))
|
||||
logger.debug("Last analytics run was: {}".format(settings.AUTOMATION_ANALYTICS_LAST_GATHER))
|
||||
|
||||
if _valid_license() is False:
|
||||
logger.exception("Invalid License provided, or No License Provided")
|
||||
if not _valid_license():
|
||||
logger.log(log_level, "Invalid License provided, or No License Provided")
|
||||
return None
|
||||
|
||||
if collection_type != 'dry-run' and not settings.INSIGHTS_TRACKING_STATE:
|
||||
logger.error("Automation Analytics not enabled. Use --dry-run to gather locally without sending.")
|
||||
return None
|
||||
if collection_type != 'dry-run':
|
||||
if not settings.INSIGHTS_TRACKING_STATE:
|
||||
logger.log(log_level, "Automation Analytics not enabled. Use --dry-run to gather locally without sending.")
|
||||
return None
|
||||
|
||||
collector_list = []
|
||||
if module:
|
||||
collector_module = module
|
||||
else:
|
||||
if not (settings.AUTOMATION_ANALYTICS_URL and settings.REDHAT_USERNAME and settings.REDHAT_PASSWORD):
|
||||
logger.log(log_level, "Not gathering analytics, configuration is invalid. Use --dry-run to gather locally without sending.")
|
||||
return None
|
||||
|
||||
with advisory_lock('gather_analytics_lock', wait=False) as acquired:
|
||||
if not acquired:
|
||||
logger.log(log_level, "Not gathering analytics, another task holds lock")
|
||||
return None
|
||||
|
||||
from awx.conf.models import Setting
|
||||
from awx.main.analytics import collectors
|
||||
collector_module = collectors
|
||||
for name, func in inspect.getmembers(collector_module):
|
||||
if (
|
||||
inspect.isfunction(func) and
|
||||
hasattr(func, '__awx_analytics_key__') and
|
||||
(not subset or name in subset)
|
||||
):
|
||||
collector_list.append((name, func))
|
||||
from awx.main.signals import disable_activity_stream
|
||||
|
||||
manifest = dict()
|
||||
dest = dest or tempfile.mkdtemp(prefix='awx_analytics')
|
||||
gather_dir = os.path.join(dest, 'stage')
|
||||
os.mkdir(gather_dir, 0o700)
|
||||
num_splits = 1
|
||||
for name, func in collector_list:
|
||||
if func.__awx_analytics_type__ == 'json':
|
||||
key = func.__awx_analytics_key__
|
||||
path = '{}.json'.format(os.path.join(gather_dir, key))
|
||||
with open(path, 'w', encoding='utf-8') as f:
|
||||
try:
|
||||
json.dump(func(last_run, collection_type=collection_type, until=until), f)
|
||||
manifest['{}.json'.format(key)] = func.__awx_analytics_version__
|
||||
except Exception:
|
||||
logger.exception("Could not generate metric {}.json".format(key))
|
||||
f.close()
|
||||
os.remove(f.name)
|
||||
elif func.__awx_analytics_type__ == 'csv':
|
||||
_now = now()
|
||||
|
||||
# Make sure that the endpoints are not in the future.
|
||||
until = None if until is None else min(until, _now)
|
||||
since = None if since is None else min(since, _now)
|
||||
|
||||
if since and not until:
|
||||
# If `since` is explicit but not `until`, `since` should be used to calculate the 4-week limit
|
||||
until = min(since + timedelta(weeks=4), _now)
|
||||
else:
|
||||
until = _now if until is None else until
|
||||
|
||||
horizon = until - timedelta(weeks=4)
|
||||
if since is not None:
|
||||
# Make sure the start isn't more than 4 weeks prior to `until`.
|
||||
since = max(since, horizon)
|
||||
|
||||
if since and since >= until:
|
||||
logger.warning("Start of the collection interval is later than the end, ignoring request.")
|
||||
return None
|
||||
|
||||
logger.debug("Last analytics run was: {}".format(settings.AUTOMATION_ANALYTICS_LAST_GATHER))
|
||||
# LAST_GATHER time should always get truncated to less than 4 weeks back.
|
||||
last_gather = max(settings.AUTOMATION_ANALYTICS_LAST_GATHER or horizon, horizon)
|
||||
|
||||
last_entries = Setting.objects.filter(key='AUTOMATION_ANALYTICS_LAST_ENTRIES').first()
|
||||
last_entries = json.loads((last_entries.value if last_entries is not None else '') or '{}', object_hook=datetime_hook)
|
||||
|
||||
collector_module = module if module else collectors
|
||||
collector_list = [
|
||||
func
|
||||
for name, func in inspect.getmembers(collector_module)
|
||||
if inspect.isfunction(func) and hasattr(func, '__awx_analytics_key__') and (not subset or name in subset)
|
||||
]
|
||||
if not any(c.__awx_analytics_key__ == 'config' for c in collector_list):
|
||||
# In order to ship to analytics, we must include the output of the built-in 'config' collector.
|
||||
collector_list.append(collectors.config)
|
||||
|
||||
json_collectors = [func for func in collector_list if func.__awx_analytics_type__ == 'json']
|
||||
csv_collectors = [func for func in collector_list if func.__awx_analytics_type__ == 'csv']
|
||||
|
||||
dest = pathlib.Path(dest or tempfile.mkdtemp(prefix='awx_analytics'))
|
||||
gather_dir = dest.joinpath('stage')
|
||||
gather_dir.mkdir(mode=0o700)
|
||||
tarfiles = []
|
||||
succeeded = True
|
||||
|
||||
# These json collectors are pretty compact, so collect all of them before shipping to analytics.
|
||||
data = {}
|
||||
for func in json_collectors:
|
||||
key = func.__awx_analytics_key__
|
||||
filename = f'{key}.json'
|
||||
try:
|
||||
files = func(last_run, full_path=gather_dir, until=until)
|
||||
if files:
|
||||
manifest['{}.csv'.format(key)] = func.__awx_analytics_version__
|
||||
if len(files) > num_splits:
|
||||
num_splits = len(files)
|
||||
last_entry = max(last_entries.get(key) or last_gather, horizon)
|
||||
results = (func(since or last_entry, collection_type=collection_type, until=until), func.__awx_analytics_version__)
|
||||
json.dumps(results) # throwaway check to see if the data is json-serializable
|
||||
data[filename] = results
|
||||
except Exception:
|
||||
logger.exception("Could not generate metric {}.csv".format(key))
|
||||
|
||||
if not manifest:
|
||||
# No data was collected
|
||||
logger.warning("No data from {} to {}".format(last_run, until))
|
||||
shutil.rmtree(dest)
|
||||
return None
|
||||
|
||||
# Always include config.json if we're using our collectors
|
||||
if 'config.json' not in manifest.keys() and not module:
|
||||
from awx.main.analytics import collectors
|
||||
config = collectors.config
|
||||
path = '{}.json'.format(os.path.join(gather_dir, config.__awx_analytics_key__))
|
||||
with open(path, 'w', encoding='utf-8') as f:
|
||||
try:
|
||||
json.dump(collectors.config(last_run), f)
|
||||
manifest['config.json'] = config.__awx_analytics_version__
|
||||
except Exception:
|
||||
logger.exception("Could not generate metric {}.json".format(key))
|
||||
f.close()
|
||||
os.remove(f.name)
|
||||
shutil.rmtree(dest)
|
||||
logger.exception("Could not generate metric {}".format(filename))
|
||||
if data:
|
||||
if data.get('config.json') is None:
|
||||
logger.error("'config' collector data is missing.")
|
||||
return None
|
||||
|
||||
stage_dirs = [gather_dir]
|
||||
if num_splits > 1:
|
||||
for i in range(0, num_splits):
|
||||
split_path = os.path.join(dest, 'split{}'.format(i))
|
||||
os.mkdir(split_path, 0o700)
|
||||
filtered_manifest = {}
|
||||
shutil.copy(os.path.join(gather_dir, 'config.json'), split_path)
|
||||
filtered_manifest['config.json'] = manifest['config.json']
|
||||
suffix = '_split{}'.format(i)
|
||||
for file in os.listdir(gather_dir):
|
||||
if file.endswith(suffix):
|
||||
old_file = os.path.join(gather_dir, file)
|
||||
new_filename = file.replace(suffix, '')
|
||||
new_file = os.path.join(split_path, new_filename)
|
||||
shutil.move(old_file, new_file)
|
||||
filtered_manifest[new_filename] = manifest[new_filename]
|
||||
_write_manifest(split_path, filtered_manifest)
|
||||
stage_dirs.append(split_path)
|
||||
tgzfile = package(dest.parent, data, until)
|
||||
if tgzfile is not None:
|
||||
tarfiles.append(tgzfile)
|
||||
if collection_type != 'dry-run':
|
||||
if ship(tgzfile):
|
||||
with disable_activity_stream():
|
||||
for filename in data:
|
||||
key = filename.replace('.json', '')
|
||||
last_entries[key] = max(last_entries[key], until) if last_entries.get(key) else until
|
||||
settings.AUTOMATION_ANALYTICS_LAST_ENTRIES = json.dumps(last_entries, cls=DjangoJSONEncoder)
|
||||
else:
|
||||
succeeded = False
|
||||
|
||||
for item in list(manifest.keys()):
|
||||
if not os.path.exists(os.path.join(gather_dir, item)):
|
||||
manifest.pop(item)
|
||||
_write_manifest(gather_dir, manifest)
|
||||
for func in csv_collectors:
|
||||
key = func.__awx_analytics_key__
|
||||
filename = f'{key}.csv'
|
||||
try:
|
||||
# These slicer functions may return a generator. The `since` parameter is
|
||||
# allowed to be None, and will fall back to LAST_ENTRIES[key] or to
|
||||
# LAST_GATHER (truncated appropriately to match the 4-week limit).
|
||||
if func.__awx_expensive__:
|
||||
slices = func.__awx_expensive__(key, since, until)
|
||||
else:
|
||||
slices = collectors.trivial_slicing(key, since, until)
|
||||
|
||||
tarfiles = []
|
||||
try:
|
||||
for i in range(0, len(stage_dirs)):
|
||||
stage_dir = stage_dirs[i]
|
||||
# can't use isoformat() since it has colons, which GNU tar doesn't like
|
||||
tarname = '_'.join([
|
||||
settings.SYSTEM_UUID,
|
||||
until.strftime('%Y-%m-%d-%H%M%S%z'),
|
||||
str(i)
|
||||
])
|
||||
tgz = shutil.make_archive(
|
||||
os.path.join(os.path.dirname(dest), tarname),
|
||||
'gztar',
|
||||
stage_dir
|
||||
)
|
||||
tarfiles.append(tgz)
|
||||
except Exception:
|
||||
shutil.rmtree(stage_dir, ignore_errors = True)
|
||||
logger.exception("Failed to write analytics archive file")
|
||||
finally:
|
||||
shutil.rmtree(dest, ignore_errors = True)
|
||||
return tarfiles
|
||||
for start, end in slices:
|
||||
files = func(start, full_path=gather_dir, until=end)
|
||||
|
||||
if not files:
|
||||
if collection_type != 'dry-run':
|
||||
with disable_activity_stream():
|
||||
last_entries[key] = max(last_entries[key], end) if last_entries.get(key) else end
|
||||
settings.AUTOMATION_ANALYTICS_LAST_ENTRIES = json.dumps(last_entries, cls=DjangoJSONEncoder)
|
||||
continue
|
||||
|
||||
slice_succeeded = True
|
||||
for fpath in files:
|
||||
payload = {filename: (fpath, func.__awx_analytics_version__)}
|
||||
|
||||
payload['config.json'] = data.get('config.json')
|
||||
if payload['config.json'] is None:
|
||||
logger.error("'config' collector data is missing, and is required to ship.")
|
||||
return None
|
||||
|
||||
tgzfile = package(dest.parent, payload, until)
|
||||
if tgzfile is not None:
|
||||
tarfiles.append(tgzfile)
|
||||
if not ship(tgzfile):
|
||||
slice_succeeded, succeeded = False, False
|
||||
break
|
||||
|
||||
if slice_succeeded and collection_type != 'dry-run':
|
||||
with disable_activity_stream():
|
||||
last_entries[key] = max(last_entries[key], end) if last_entries.get(key) else end
|
||||
settings.AUTOMATION_ANALYTICS_LAST_ENTRIES = json.dumps(last_entries, cls=DjangoJSONEncoder)
|
||||
except Exception:
|
||||
succeeded = False
|
||||
logger.exception("Could not generate metric {}".format(filename))
|
||||
|
||||
if collection_type != 'dry-run':
|
||||
if succeeded:
|
||||
for fpath in tarfiles:
|
||||
if os.path.exists(fpath):
|
||||
os.remove(fpath)
|
||||
with disable_activity_stream():
|
||||
if not settings.AUTOMATION_ANALYTICS_LAST_GATHER or until > settings.AUTOMATION_ANALYTICS_LAST_GATHER:
|
||||
settings.AUTOMATION_ANALYTICS_LAST_GATHER = until
|
||||
|
||||
shutil.rmtree(dest, ignore_errors=True) # clean up individual artifact files
|
||||
if not tarfiles:
|
||||
# No data was collected
|
||||
logger.warning("No data from {} to {}".format(since or last_gather, until))
|
||||
return None
|
||||
|
||||
return tarfiles
|
||||
|
||||
|
||||
def ship(path):
|
||||
@@ -229,41 +295,38 @@ def ship(path):
|
||||
"""
|
||||
if not path:
|
||||
logger.error('Automation Analytics TAR not found')
|
||||
return
|
||||
return False
|
||||
if not os.path.exists(path):
|
||||
logger.error('Automation Analytics TAR {} not found'.format(path))
|
||||
return
|
||||
return False
|
||||
if "Error:" in str(path):
|
||||
return
|
||||
try:
|
||||
logger.debug('shipping analytics file: {}'.format(path))
|
||||
url = getattr(settings, 'AUTOMATION_ANALYTICS_URL', None)
|
||||
if not url:
|
||||
logger.error('AUTOMATION_ANALYTICS_URL is not set')
|
||||
return
|
||||
rh_user = getattr(settings, 'REDHAT_USERNAME', None)
|
||||
rh_password = getattr(settings, 'REDHAT_PASSWORD', None)
|
||||
if not rh_user:
|
||||
return logger.error('REDHAT_USERNAME is not set')
|
||||
if not rh_password:
|
||||
return logger.error('REDHAT_PASSWORD is not set')
|
||||
with open(path, 'rb') as f:
|
||||
files = {'file': (os.path.basename(path), f, settings.INSIGHTS_AGENT_MIME)}
|
||||
s = requests.Session()
|
||||
s.headers = get_awx_http_client_headers()
|
||||
s.headers.pop('Content-Type')
|
||||
with set_environ(**settings.AWX_TASK_ENV):
|
||||
response = s.post(url,
|
||||
files=files,
|
||||
verify="/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem",
|
||||
auth=(rh_user, rh_password),
|
||||
headers=s.headers,
|
||||
timeout=(31, 31))
|
||||
# Accept 2XX status_codes
|
||||
if response.status_code >= 300:
|
||||
return logger.exception('Upload failed with status {}, {}'.format(response.status_code,
|
||||
response.text))
|
||||
finally:
|
||||
# cleanup tar.gz
|
||||
if os.path.exists(path):
|
||||
os.remove(path)
|
||||
return False
|
||||
|
||||
logger.debug('shipping analytics file: {}'.format(path))
|
||||
url = getattr(settings, 'AUTOMATION_ANALYTICS_URL', None)
|
||||
if not url:
|
||||
logger.error('AUTOMATION_ANALYTICS_URL is not set')
|
||||
return False
|
||||
rh_user = getattr(settings, 'REDHAT_USERNAME', None)
|
||||
rh_password = getattr(settings, 'REDHAT_PASSWORD', None)
|
||||
if not rh_user:
|
||||
logger.error('REDHAT_USERNAME is not set')
|
||||
return False
|
||||
if not rh_password:
|
||||
logger.error('REDHAT_PASSWORD is not set')
|
||||
return False
|
||||
with open(path, 'rb') as f:
|
||||
files = {'file': (os.path.basename(path), f, settings.INSIGHTS_AGENT_MIME)}
|
||||
s = requests.Session()
|
||||
s.headers = get_awx_http_client_headers()
|
||||
s.headers.pop('Content-Type')
|
||||
with set_environ(**settings.AWX_TASK_ENV):
|
||||
response = s.post(
|
||||
url, files=files, verify="/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem", auth=(rh_user, rh_password), headers=s.headers, timeout=(31, 31)
|
||||
)
|
||||
# Accept 2XX status_codes
|
||||
if response.status_code >= 300:
|
||||
logger.error('Upload failed with status {}, {}'.format(response.status_code, response.text))
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user