mirror of
https://github.com/ansible/awx.git
synced 2026-02-05 19:44:43 -03:30
Compare commits
720 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
8505783350 | ||
|
|
76ff925b77 | ||
|
|
42ff1cfd67 | ||
|
|
90bb43ce74 | ||
|
|
56e3d98e62 | ||
|
|
7d51b3b6b6 | ||
|
|
4270e3a17b | ||
|
|
098f4eb198 | ||
|
|
ae1167ab15 | ||
|
|
5b5411fecd | ||
|
|
235213bd3b | ||
|
|
2c71a27630 | ||
|
|
1a6819cdea | ||
|
|
465e605464 | ||
|
|
22f1a53266 | ||
|
|
733b4b874e | ||
|
|
3d433350d3 | ||
|
|
30a5617825 | ||
|
|
5935c410e4 | ||
|
|
c90cf7c5e2 | ||
|
|
218dfb680e | ||
|
|
b01deb393e | ||
|
|
410111b8c8 | ||
|
|
05e6eda453 | ||
|
|
4a4b44955b | ||
|
|
9d82098162 | ||
|
|
1c62c142f1 | ||
|
|
5215bbcbf6 | ||
|
|
0d2daecf49 | ||
|
|
552b69592c | ||
|
|
ffe5a92eb9 | ||
|
|
925d9efecf | ||
|
|
c1b6595a0b | ||
|
|
d4e46a35ce | ||
|
|
bf0683f7fe | ||
|
|
0ff94c63f2 | ||
|
|
16153daa14 | ||
|
|
a680d188c0 | ||
|
|
d56f1a0120 | ||
|
|
50d95ddc3f | ||
|
|
21a32f90ce | ||
|
|
09d3e6cd98 | ||
|
|
29f1d695ae | ||
|
|
e0f3e4feb7 | ||
|
|
e9ce9621f2 | ||
|
|
a02eda1bea | ||
|
|
779385ddb6 | ||
|
|
e5fd483d06 | ||
|
|
8679651d4c | ||
|
|
4c988fbc02 | ||
|
|
c40feb52b7 | ||
|
|
78b975b2a9 | ||
|
|
cfba11f8d7 | ||
|
|
73fa8521d0 | ||
|
|
894f0cf2c5 | ||
|
|
67ec811e8d | ||
|
|
31d0e55c2a | ||
|
|
3a0f2ce2fe | ||
|
|
613d48cdbc | ||
|
|
39362aab4b | ||
|
|
6cb3267ebe | ||
|
|
f8c66b826a | ||
|
|
7b288ef98a | ||
|
|
58a94be428 | ||
|
|
960845883d | ||
|
|
eda53eb548 | ||
|
|
82e41b40bb | ||
|
|
0268d575f8 | ||
|
|
6b5a6e9226 | ||
|
|
56d01cda6b | ||
|
|
194c2dcf0b | ||
|
|
b38be89d1a | ||
|
|
2a168faf6a | ||
|
|
83b5377387 | ||
|
|
2e623ad80c | ||
|
|
7e42c54868 | ||
|
|
aa5bd9f5bf | ||
|
|
13e777f01b | ||
|
|
819b318fe5 | ||
|
|
9e7bd55579 | ||
|
|
fbece6bdde | ||
|
|
9fdd00785f | ||
|
|
b478740f28 | ||
|
|
109841c350 | ||
|
|
6c951aa883 | ||
|
|
e7e83afd00 | ||
|
|
7d956a3b68 | ||
|
|
02ac139d5c | ||
|
|
605a2c7e01 | ||
|
|
484caf29b6 | ||
|
|
b2b519e48d | ||
|
|
e8e6f50573 | ||
|
|
260aec543e | ||
|
|
7c95cd008f | ||
|
|
0ff11ac026 | ||
|
|
605c5e3276 | ||
|
|
c371b869dc | ||
|
|
476dbe58c5 | ||
|
|
3c43aaef21 | ||
|
|
76d5c02e07 | ||
|
|
fe02abe630 | ||
|
|
ce9cb24995 | ||
|
|
6cb6c61e5c | ||
|
|
67e5d083b8 | ||
|
|
5932c54126 | ||
|
|
e1a8b69736 | ||
|
|
7472026cca | ||
|
|
8475bdfdc4 | ||
|
|
bd2f1568fb | ||
|
|
b3dcfc8c18 | ||
|
|
72715df751 | ||
|
|
6b3ca32827 | ||
|
|
1ccdb305e3 | ||
|
|
033bec693b | ||
|
|
f2c5859fde | ||
|
|
e18838a4b7 | ||
|
|
48300da443 | ||
|
|
5b9dc41015 | ||
|
|
01c6463b1b | ||
|
|
181399df7a | ||
|
|
9bc0a0743b | ||
|
|
c1d0768e37 | ||
|
|
d743faf33e | ||
|
|
0f66892d06 | ||
|
|
c866d85b8c | ||
|
|
3c799b007e | ||
|
|
887f16023a | ||
|
|
87b59903a5 | ||
|
|
e982f6ed06 | ||
|
|
fb5428dd63 | ||
|
|
b38aa3dfb6 | ||
|
|
c1a0e2cd16 | ||
|
|
fe69a23a4e | ||
|
|
90f555d684 | ||
|
|
4002f2071d | ||
|
|
244dfa1c92 | ||
|
|
1adb4cefec | ||
|
|
4abcbf949a | ||
|
|
19f0b9ba92 | ||
|
|
b1c4c75360 | ||
|
|
cc3659d375 | ||
|
|
b1695fe107 | ||
|
|
8cd0870253 | ||
|
|
84dc40d141 | ||
|
|
8b976031cb | ||
|
|
aaf87c0c04 | ||
|
|
7ff9f0b7d1 | ||
|
|
527594285f | ||
|
|
07dfab648c | ||
|
|
10974159b5 | ||
|
|
ac7c5f8648 | ||
|
|
57c22c20b2 | ||
|
|
c61efc0af8 | ||
|
|
772fcc9149 | ||
|
|
8e94a9e599 | ||
|
|
1e9b0c2786 | ||
|
|
5e5790e7d1 | ||
|
|
9f8b9b8d7f | ||
|
|
6d69087db8 | ||
|
|
a737663dde | ||
|
|
dce934577b | ||
|
|
3d421cc595 | ||
|
|
93c8cc9f8e | ||
|
|
1808559586 | ||
|
|
d558299b1f | ||
|
|
ef5b040f70 | ||
|
|
026cbeb018 | ||
|
|
6163cc6b5c | ||
|
|
ed1bacdc08 | ||
|
|
f39fa35d86 | ||
|
|
9266444b19 | ||
|
|
35230eded1 | ||
|
|
68057560e5 | ||
|
|
047ff7b55f | ||
|
|
ecacf64c28 | ||
|
|
d01e6ab8b6 | ||
|
|
5653b47aa3 | ||
|
|
d4a461e5b4 | ||
|
|
f9265ee329 | ||
|
|
fa70d108d7 | ||
|
|
e07f441e32 | ||
|
|
70786c53a7 | ||
|
|
342958ece3 | ||
|
|
368101812c | ||
|
|
70bf78e29f | ||
|
|
6a85fc38dd | ||
|
|
6e9930a45f | ||
|
|
d9e774c4b6 | ||
|
|
56abfa732e | ||
|
|
c819560d39 | ||
|
|
0e97dc4b84 | ||
|
|
624289bed7 | ||
|
|
6ede1dfbea | ||
|
|
c9ff3e99b8 | ||
|
|
7bc3d85913 | ||
|
|
0a8df7fde2 | ||
|
|
b39269c4c2 | ||
|
|
09981c0020 | ||
|
|
81bdbef785 | ||
|
|
3c541a4695 | ||
|
|
5a1ae9b816 | ||
|
|
8c261892ee | ||
|
|
b89d4349c0 | ||
|
|
3e98363811 | ||
|
|
7e400413db | ||
|
|
f24289b2ba | ||
|
|
9170c557a7 | ||
|
|
a47b403f8d | ||
|
|
83aa7bfac4 | ||
|
|
290a296f9f | ||
|
|
db0b2e6cb6 | ||
|
|
f391b7ace4 | ||
|
|
008c9e4320 | ||
|
|
e57d200d6e | ||
|
|
8ddc1c61ef | ||
|
|
0aa6c7b83f | ||
|
|
e43879d44e | ||
|
|
2a6f6111dc | ||
|
|
6b0659d63a | ||
|
|
4c1dddcaf9 | ||
|
|
426e901cdf | ||
|
|
ac55f93cfb | ||
|
|
c32c3db35e | ||
|
|
28596b7d5e | ||
|
|
20a999f846 | ||
|
|
81af34fce3 | ||
|
|
8fed469975 | ||
|
|
a2e274d1f9 | ||
|
|
d96cc51431 | ||
|
|
c6d4a62263 | ||
|
|
4cd6a6e566 | ||
|
|
ed138fccf6 | ||
|
|
44d223b6c9 | ||
|
|
a9b77eb706 | ||
|
|
e642af82cc | ||
|
|
b0a755d7b5 | ||
|
|
6753f1ca35 | ||
|
|
982539f444 | ||
|
|
f8d9d5f51a | ||
|
|
bad8c65321 | ||
|
|
6f0c937236 | ||
|
|
55a616cba6 | ||
|
|
4c79e6912e | ||
|
|
4b13bcdce2 | ||
|
|
18178c83b3 | ||
|
|
7aa1ae69b3 | ||
|
|
286a70f2ca | ||
|
|
87365e5969 | ||
|
|
7e829e3a9d | ||
|
|
b8cba916a5 | ||
|
|
dc96a1730e | ||
|
|
d4983ea10d | ||
|
|
209bdd00a1 | ||
|
|
c4efbd62bc | ||
|
|
287a3bc8d4 | ||
|
|
9fefc26528 | ||
|
|
e2d4ef31fd | ||
|
|
a15e257b9e | ||
|
|
a56370fed5 | ||
|
|
e7ed4811c1 | ||
|
|
9860b38438 | ||
|
|
ef80ecd3b6 | ||
|
|
50290a9063 | ||
|
|
fefa4a8bf4 | ||
|
|
546f88c74d | ||
|
|
42098bfa6d | ||
|
|
afa1fb489c | ||
|
|
b205630490 | ||
|
|
aa469d730e | ||
|
|
3571abb42b | ||
|
|
d57470ce49 | ||
|
|
21425db889 | ||
|
|
cc64657749 | ||
|
|
7300c2ccc1 | ||
|
|
7c596039c5 | ||
|
|
9857c8272e | ||
|
|
797169317c | ||
|
|
67c6591f6f | ||
|
|
15906b7e3c | ||
|
|
fdd2b84804 | ||
|
|
ac3f7d0fac | ||
|
|
09d63b4883 | ||
|
|
b96e33ea50 | ||
|
|
71d23e8c81 | ||
|
|
073feb74cb | ||
|
|
43f19cc94b | ||
|
|
ef312f0030 | ||
|
|
d0fec0f19c | ||
|
|
1e14221625 | ||
|
|
b6a901ac51 | ||
|
|
1af0ee2f8c | ||
|
|
b62ac6fbe4 | ||
|
|
e5aaeedc43 | ||
|
|
fc5c5400cd | ||
|
|
95bead2bb2 | ||
|
|
bcbda23aee | ||
|
|
5a21783013 | ||
|
|
e33604de71 | ||
|
|
c50c63a9ff | ||
|
|
916d91cbc7 | ||
|
|
79bd8b2c72 | ||
|
|
5939116b0a | ||
|
|
6759e60428 | ||
|
|
ef8af79700 | ||
|
|
dbb4d2b011 | ||
|
|
4a28065dbb | ||
|
|
5387846cbb | ||
|
|
6b247f1f24 | ||
|
|
838b793704 | ||
|
|
3cb8c98a41 | ||
|
|
18f254fc28 | ||
|
|
9c6c6ce816 | ||
|
|
6699be95bf | ||
|
|
17cd0595d7 | ||
|
|
0402064c0f | ||
|
|
e33265e12c | ||
|
|
b8c76301de | ||
|
|
51f7907a01 | ||
|
|
1a98cedc0f | ||
|
|
db974d4fd4 | ||
|
|
d6e663eff0 | ||
|
|
ccb40c8c68 | ||
|
|
6eb04de1a7 | ||
|
|
cad5c5e79a | ||
|
|
97472cb91b | ||
|
|
0c63ea0052 | ||
|
|
fa9c6287f7 | ||
|
|
2955842c44 | ||
|
|
64028dba66 | ||
|
|
2b1d2b2976 | ||
|
|
e1d50a43fd | ||
|
|
7d51b1cb9d | ||
|
|
52e531625c | ||
|
|
983b192a45 | ||
|
|
b5db652050 | ||
|
|
e699402115 | ||
|
|
d012f5cd99 | ||
|
|
e0c04df1ee | ||
|
|
4a2ca20b60 | ||
|
|
563f730268 | ||
|
|
e49dfd6ee2 | ||
|
|
fb414802fa | ||
|
|
00f400e839 | ||
|
|
234e33df0e | ||
|
|
f9b0a3121f | ||
|
|
0afdca3674 | ||
|
|
03cef6fea3 | ||
|
|
7dc0fce1aa | ||
|
|
648d27f28d | ||
|
|
5a5e5bc121 | ||
|
|
aea37654e2 | ||
|
|
89b9d7ac8b | ||
|
|
b8758044e0 | ||
|
|
2ed97aeb0c | ||
|
|
9431b0b6ff | ||
|
|
a5007ccd41 | ||
|
|
81fc4219ae | ||
|
|
c3c4d79890 | ||
|
|
b01b229fea | ||
|
|
984b7e066d | ||
|
|
67d927121d | ||
|
|
4c40791d06 | ||
|
|
ae06cff991 | ||
|
|
7ea6d7bf4d | ||
|
|
fad4a549d0 | ||
|
|
9365e477c5 | ||
|
|
d0b3cac72a | ||
|
|
de02138dfd | ||
|
|
44f0b003fc | ||
|
|
56aed597b2 | ||
|
|
f33ee03b98 | ||
|
|
69a3b0def6 | ||
|
|
6504972d82 | ||
|
|
4bb2b5768e | ||
|
|
c0a641ed52 | ||
|
|
1e8c89f536 | ||
|
|
54d3412820 | ||
|
|
1690938dfb | ||
|
|
0a9d3d47b9 | ||
|
|
2952b0a0fe | ||
|
|
1d3e8f8b87 | ||
|
|
97c040aaa1 | ||
|
|
818c95501a | ||
|
|
664bdec57f | ||
|
|
92068930a6 | ||
|
|
d07a946183 | ||
|
|
9d58b15135 | ||
|
|
a0038276a4 | ||
|
|
f0ff6ecb0a | ||
|
|
60743d6ba6 | ||
|
|
4707b5e020 | ||
|
|
ed7d7fcf00 | ||
|
|
6c2a7f3782 | ||
|
|
47875c5f9a | ||
|
|
f28f7c6184 | ||
|
|
1494c8395b | ||
|
|
2691e1d707 | ||
|
|
6d413bd412 | ||
|
|
54bf7e13d8 | ||
|
|
c6b6a3ad89 | ||
|
|
2bd656e61d | ||
|
|
35b8e40d3c | ||
|
|
c4d901bf2c | ||
|
|
1369f72885 | ||
|
|
0b30e7907b | ||
|
|
fc94b3a943 | ||
|
|
fde9099198 | ||
|
|
815cd829e0 | ||
|
|
a2fd78add4 | ||
|
|
28c612ae9c | ||
|
|
d6ed6a856d | ||
|
|
706b370f7e | ||
|
|
80a2d10742 | ||
|
|
f7259a1e78 | ||
|
|
08570fe785 | ||
|
|
987cdc6802 | ||
|
|
6e27294e2b | ||
|
|
3439ba5f3b | ||
|
|
c8e10adc96 | ||
|
|
7e261b5246 | ||
|
|
1e1839915d | ||
|
|
74bf058d62 | ||
|
|
5ec537bad2 | ||
|
|
568901af74 | ||
|
|
c2e9926330 | ||
|
|
c4ccfa1b27 | ||
|
|
478bcc0b07 | ||
|
|
0bb9c58e25 | ||
|
|
9c783aa0ce | ||
|
|
526391a072 | ||
|
|
98f8faa349 | ||
|
|
8a2a5b0fb1 | ||
|
|
07cfa6cba5 | ||
|
|
e188692acf | ||
|
|
ad70754b6a | ||
|
|
9fb24f1a4c | ||
|
|
aefa30e1e9 | ||
|
|
7eb2d86890 | ||
|
|
2fb0144914 | ||
|
|
e3a731bb9e | ||
|
|
451e9a7504 | ||
|
|
8311acfba2 | ||
|
|
77a1c405a6 | ||
|
|
1b0bca8229 | ||
|
|
bd91e8eb54 | ||
|
|
ea4cd99003 | ||
|
|
00ce244716 | ||
|
|
3b791609cd | ||
|
|
a8d4eb7c1d | ||
|
|
d35bfafcf5 | ||
|
|
9f8ef4d1e5 | ||
|
|
a978d094b4 | ||
|
|
47e422ba7a | ||
|
|
4b86815275 | ||
|
|
6c1c850c5f | ||
|
|
f4f1e0fd3c | ||
|
|
ca84e1c654 | ||
|
|
6b6e898882 | ||
|
|
9dbcc5934e | ||
|
|
fac7fd45f8 | ||
|
|
34c206fab0 | ||
|
|
a2f64f1053 | ||
|
|
334d47f3ab | ||
|
|
4724b6a3d6 | ||
|
|
ce94ba4c83 | ||
|
|
0dc4fa975b | ||
|
|
1fb890f4eb | ||
|
|
15e8fd5eca | ||
|
|
06e751fea1 | ||
|
|
fe93ef5488 | ||
|
|
9b05a41eec | ||
|
|
2c12f1b66e | ||
|
|
33dedc88c8 | ||
|
|
759867c863 | ||
|
|
d4613d448c | ||
|
|
dbd68c5747 | ||
|
|
d23d7c422d | ||
|
|
4b793dc58a | ||
|
|
112757e202 | ||
|
|
12380fe1b1 | ||
|
|
b987b7daa0 | ||
|
|
6c7851b51f | ||
|
|
1ff0591553 | ||
|
|
58ad214dcf | ||
|
|
a71cee9300 | ||
|
|
1057b93570 | ||
|
|
e0edfeac7c | ||
|
|
47f45bf9b3 | ||
|
|
8d162f9044 | ||
|
|
6269b43456 | ||
|
|
67867cf0c8 | ||
|
|
7538b4ce15 | ||
|
|
8c6a1e348d | ||
|
|
3cd80ef67a | ||
|
|
f3310236e4 | ||
|
|
ed28faa3db | ||
|
|
fc4b02b79f | ||
|
|
a3dd9eb4b7 | ||
|
|
079abc162f | ||
|
|
d773d163f7 | ||
|
|
68ada92f3b | ||
|
|
4c43afda19 | ||
|
|
91cc4689c9 | ||
|
|
febfcf709d | ||
|
|
cf1d5a29f6 | ||
|
|
1425021106 | ||
|
|
7b42316366 | ||
|
|
ce9d75c2e4 | ||
|
|
26845642f0 | ||
|
|
6fa0d9d4ed | ||
|
|
7accac2f63 | ||
|
|
044c047ac6 | ||
|
|
5a2ecd25e7 | ||
|
|
6c89935521 | ||
|
|
0641c6b0a6 | ||
|
|
4ea27e0d1b | ||
|
|
79c196fc08 | ||
|
|
249a5e5e4d | ||
|
|
51c73cb357 | ||
|
|
8d35b71321 | ||
|
|
a80d5b1b39 | ||
|
|
e5d86419c8 | ||
|
|
54a98ff612 | ||
|
|
e7077185bf | ||
|
|
4187d02b8a | ||
|
|
457359322f | ||
|
|
8a65c6e1c8 | ||
|
|
fb29f68efc | ||
|
|
1fcddba558 | ||
|
|
e20599d7bb | ||
|
|
9288b53015 | ||
|
|
82be0a8af2 | ||
|
|
35c374fc79 | ||
|
|
dbe135991b | ||
|
|
64f89b3fce | ||
|
|
aaaae87aa7 | ||
|
|
44a2d7a346 | ||
|
|
be00b1ca96 | ||
|
|
33574d70c8 | ||
|
|
bc705ad8ce | ||
|
|
78961c8037 | ||
|
|
e22486ada8 | ||
|
|
0051da95c9 | ||
|
|
122142c040 | ||
|
|
91ad0a9f89 | ||
|
|
6ea3ecbb26 | ||
|
|
e87dce023b | ||
|
|
89a05e9bbc | ||
|
|
96fbc9ea27 | ||
|
|
e70d377a53 | ||
|
|
f65ef9f75c | ||
|
|
7149c41804 | ||
|
|
1a5b5c32b8 | ||
|
|
1b44ca8ef4 | ||
|
|
d7f4707044 | ||
|
|
9d39ac83f9 | ||
|
|
ce393da6fd | ||
|
|
2f86774006 | ||
|
|
e2c63c41e7 | ||
|
|
f9685717b8 | ||
|
|
47a3ba9bd5 | ||
|
|
af3e6f792c | ||
|
|
fc56a1c170 | ||
|
|
84fb908261 | ||
|
|
cb4a38d7a7 | ||
|
|
9518c38bb8 | ||
|
|
5e37d6ea7e | ||
|
|
54e76b2534 | ||
|
|
b8ed41fa82 | ||
|
|
fbd03287ea | ||
|
|
7919433288 | ||
|
|
3568be84c8 | ||
|
|
8d2ab3de42 | ||
|
|
4c4cbaef9f | ||
|
|
aef224732c | ||
|
|
b0c1be7338 | ||
|
|
14a3a6073e | ||
|
|
fc7c2117e9 | ||
|
|
962de13965 | ||
|
|
7211ff22df | ||
|
|
003d7f0915 | ||
|
|
f019452207 | ||
|
|
c323a2393a | ||
|
|
85be3c7692 | ||
|
|
5f3ebc26e0 | ||
|
|
d282966aa1 | ||
|
|
71e132ce0f | ||
|
|
d6d84e8f5e | ||
|
|
fdc7f58bb4 | ||
|
|
6c597ad165 | ||
|
|
48ec69c4f5 | ||
|
|
1ea3d55167 | ||
|
|
7181bd1c9b | ||
|
|
9e8ac3b09b | ||
|
|
e24e1fc1f0 | ||
|
|
f28b48a473 | ||
|
|
4f58537949 | ||
|
|
0512f65c8f | ||
|
|
947bdeed3e | ||
|
|
d3a7bec674 | ||
|
|
652facba9f | ||
|
|
b1ef7506ea | ||
|
|
c95d7d465a | ||
|
|
70919638ba | ||
|
|
6ea48cd73e | ||
|
|
63ca8e4134 | ||
|
|
725cc469cf | ||
|
|
665a4d83e3 | ||
|
|
018514d657 | ||
|
|
71d428433f | ||
|
|
2f689fffbe | ||
|
|
3119d5ed22 | ||
|
|
aab27e9b93 | ||
|
|
b60a30cbd4 | ||
|
|
88acd95a72 | ||
|
|
c3fbb07535 | ||
|
|
8d043e6f85 | ||
|
|
31602c4b28 | ||
|
|
57cd8adc2d | ||
|
|
c1e20fe7a0 | ||
|
|
b1f5529aa4 | ||
|
|
350699eda8 | ||
|
|
10a7544d68 | ||
|
|
d3eea5e694 | ||
|
|
8fd9fea113 | ||
|
|
470a4b7746 | ||
|
|
38c2ea7025 | ||
|
|
5895654538 | ||
|
|
b402d9ba6d | ||
|
|
5db478a4a0 | ||
|
|
059347eec3 | ||
|
|
e8dbfa42cf | ||
|
|
3d12e040ed | ||
|
|
fceca3bcae | ||
|
|
fcd03fb1c2 | ||
|
|
2cab6982c1 | ||
|
|
3ede367df4 | ||
|
|
f6bf0ad21f | ||
|
|
817b397d20 | ||
|
|
b61fdaf721 | ||
|
|
1603106cb4 | ||
|
|
1454000b91 | ||
|
|
b2e63d5e47 | ||
|
|
e7ede6af4a | ||
|
|
5503d4efb4 | ||
|
|
54640dbca0 | ||
|
|
eab82f3efa | ||
|
|
9e3d90896b | ||
|
|
e66a1002ee | ||
|
|
82160e2072 | ||
|
|
e814f28039 | ||
|
|
03e58523b2 | ||
|
|
341ef411a4 | ||
|
|
8d19555cf1 | ||
|
|
d23fd0515d | ||
|
|
b9483c28b0 | ||
|
|
6f9fc0c3f8 | ||
|
|
766a088749 | ||
|
|
2b539cab85 | ||
|
|
2fb67a3648 | ||
|
|
64c5e3994e | ||
|
|
7b792926eb | ||
|
|
c067788428 | ||
|
|
b7071a48c2 | ||
|
|
dee4b72303 | ||
|
|
5994a77b84 | ||
|
|
f93506fe2c | ||
|
|
7c86e38b81 | ||
|
|
1c374fba7d | ||
|
|
2cc9e2ca0b | ||
|
|
335dfd564a | ||
|
|
5380d57ce8 | ||
|
|
a01f80db5b | ||
|
|
d7eba47adb | ||
|
|
5fffdec69d | ||
|
|
358ef76529 | ||
|
|
bb628c52ad | ||
|
|
d2e0b26287 | ||
|
|
f2d46baf09 | ||
|
|
c6fdadd7f2 | ||
|
|
cc8b115c6a | ||
|
|
82d05e0a10 | ||
|
|
9978b3f9ad | ||
|
|
4f4af058b3 | ||
|
|
b372cebf8d | ||
|
|
3df8e2beb1 | ||
|
|
c45fbcf2ee | ||
|
|
5efa50788f | ||
|
|
3abbe87e10 | ||
|
|
f26bdb3e96 | ||
|
|
4be4e3db7f | ||
|
|
4ea92f0dcb | ||
|
|
a0cfbb93e9 | ||
|
|
08a784d50c | ||
|
|
9ee18d02c8 | ||
|
|
4fd190e4c8 | ||
|
|
a11e33458f | ||
|
|
84fdfbb898 | ||
|
|
f4a252a331 | ||
|
|
d4fe60756b | ||
|
|
f4ab979b59 | ||
|
|
3d3d79b6b3 | ||
|
|
e06d4d7734 | ||
|
|
ab18a4a440 | ||
|
|
7438062b97 | ||
|
|
4510cd11db | ||
|
|
74f2509482 | ||
|
|
f84e42ed15 | ||
|
|
94b4dabee2 | ||
|
|
94d44e8791 | ||
|
|
d24166bd68 | ||
|
|
62f82e7a7e | ||
|
|
7a21a45781 | ||
|
|
91ec0a4482 | ||
|
|
c8f4320b58 | ||
|
|
71a725c5f8 | ||
|
|
96572fe3d4 | ||
|
|
554a9586c6 | ||
|
|
f41c8cf4f2 | ||
|
|
f2f42c2c8a |
31
.travis.yml
31
.travis.yml
@@ -1,31 +0,0 @@
|
||||
sudo: false
|
||||
language: python
|
||||
python:
|
||||
- '2.7'
|
||||
env:
|
||||
- TOXENV=api-lint
|
||||
- TOXENV=api
|
||||
- TOXENV=ui-lint
|
||||
- TOXENV=ui
|
||||
install:
|
||||
- pip install tox
|
||||
script:
|
||||
- tox
|
||||
# after_success:
|
||||
# - TOXENV=coveralls tox
|
||||
addons:
|
||||
apt:
|
||||
packages:
|
||||
- swig
|
||||
- libxmlsec1-dev
|
||||
- postgresql-9.5
|
||||
- libssl-dev
|
||||
cache:
|
||||
pip: true
|
||||
directories:
|
||||
- node_modules
|
||||
- .tox
|
||||
services:
|
||||
- mongodb
|
||||
# Enable when we stop using sqlite for API tests
|
||||
# - postgresql
|
||||
@@ -24,6 +24,7 @@ Have questions about this document or anything not covered here? Come chat with
|
||||
* [Start a shell](#start-the-shell)
|
||||
* [Create a superuser](#create-a-superuser)
|
||||
* [Load the data](#load-the-data)
|
||||
* [Building API Documentation](#build-documentation)
|
||||
* [Accessing the AWX web interface](#accessing-the-awx-web-interface)
|
||||
* [Purging containers and images](#purging-containers-and-images)
|
||||
* [What should I work on?](#what-should-i-work-on)
|
||||
@@ -57,7 +58,7 @@ For Linux platforms, refer to the following from Docker:
|
||||
|
||||
> https://docs.docker.com/engine/installation/linux/docker-ce/fedora/
|
||||
|
||||
**Centos**
|
||||
**CentOS**
|
||||
|
||||
> https://docs.docker.com/engine/installation/linux/docker-ce/centos/
|
||||
|
||||
@@ -217,7 +218,7 @@ If you want to start and use the development environment, you'll first need to b
|
||||
(container)# /bootstrap_development.sh
|
||||
```
|
||||
|
||||
The above will do all the setup tasks, including running database migrations, so it amy take a couple minutes.
|
||||
The above will do all the setup tasks, including running database migrations, so it may take a couple minutes.
|
||||
|
||||
Now you can start each service individually, or start all services in a pre-configured tmux session like so:
|
||||
|
||||
@@ -261,6 +262,20 @@ You can optionally load some demo data. This will create a demo project, invento
|
||||
> This information will persist in the database running in the `tools_postgres_1` container, until the container is removed. You may periodically need to recreate
|
||||
this container, and thus the database, if the database schema changes in an upstream commit.
|
||||
|
||||
##### Building API Documentation
|
||||
|
||||
AWX includes support for building [Swagger/OpenAPI
|
||||
documentation](https://swagger.io). To build the documentation locally, run:
|
||||
|
||||
```bash
|
||||
(container)/awx_devel$ make swagger
|
||||
```
|
||||
|
||||
This will write a file named `swagger.json` that contains the API specification
|
||||
in OpenAPI format. A variety of online tools are available for translating
|
||||
this data into more consumable formats (such as HTML). http://editor.swagger.io
|
||||
is an example of one such service.
|
||||
|
||||
### Accessing the AWX web interface
|
||||
|
||||
You can now log into the AWX web interface at [https://localhost:8043](https://localhost:8043), and access the API directly at [https://localhost:8043/api/](https://localhost:8043/api/).
|
||||
@@ -281,7 +296,7 @@ For feature work, take a look at the current [Enhancements](https://github.com/a
|
||||
|
||||
If it has someone assigned to it then that person is the person responsible for working the enhancement. If you feel like you could contribute then reach out to that person.
|
||||
|
||||
Fixing bugs, adding translations, and updating the documentation are always appreciated, so reviewing the backlog of issues is always a good place to start.
|
||||
Fixing bugs, adding translations, and updating the documentation are always appreciated, so reviewing the backlog of issues is always a good place to start. For extra information on debugging tools, see [Debugging](https://github.com/ansible/awx/blob/devel/docs/debugging.md).
|
||||
|
||||
**NOTE**
|
||||
|
||||
@@ -293,7 +308,7 @@ Fixing bugs, adding translations, and updating the documentation are always appr
|
||||
|
||||
## Submitting Pull Requests
|
||||
|
||||
Fixes and Features for AWX will go through the Github pull request process. Submit your pull request (PR) agains the `devel` branch.
|
||||
Fixes and Features for AWX will go through the Github pull request process. Submit your pull request (PR) against the `devel` branch.
|
||||
|
||||
Here are a few things you can do to help the visibility of your change, and increase the likelihood that it will be accepted:
|
||||
|
||||
@@ -312,7 +327,7 @@ It's generally a good idea to discuss features with us first by engaging us in t
|
||||
We like to keep our commit history clean, and will require resubmission of pull requests that contain merge commits. Use `git pull --rebase`, rather than
|
||||
`git pull`, and `git rebase`, rather than `git merge`.
|
||||
|
||||
Sometimes it might take us a while to fully review your PR. We try to keep the `devel` branch in good working order, and so we review requests carefuly. Please be patient.
|
||||
Sometimes it might take us a while to fully review your PR. We try to keep the `devel` branch in good working order, and so we review requests carefully. Please be patient.
|
||||
|
||||
All submitted PRs will have the linter and unit tests run against them, and the status reported in the PR.
|
||||
|
||||
|
||||
133
INSTALL.md
133
INSTALL.md
@@ -13,24 +13,30 @@ This document provides a guide for installing AWX.
|
||||
- [Choose a deployment platform](#choose-a-deployment-platform)
|
||||
- [Official vs Building Images](#official-vs-building-images)
|
||||
- [OpenShift](#openshift)
|
||||
- [Prerequisites](#prerequisites)
|
||||
- [Prerequisites](#prerequisites-1)
|
||||
- [Deploying to Minishift](#deploying-to-minishift)
|
||||
- [Pre-build steps](#pre-build-steps)
|
||||
- [PostgreSQL](#postgresql)
|
||||
- [Start the build](#start-the-build)
|
||||
- [Post build](#post-build)
|
||||
- [Accessing AWX](#accessing-awx)
|
||||
- [Docker](#docker)
|
||||
- [Kubernetes](#kubernetes)
|
||||
- [Prerequisites](#prerequisites-2)
|
||||
- [Pre-build steps](#pre-build-steps-1)
|
||||
- [Start the build](#start-the-build-1)
|
||||
- [Accessing AWX](#accessing-awx-1)
|
||||
- [SSL Termination](#ssl-termination)
|
||||
- [Docker or Docker Compose](#docker-or-docker-compose)
|
||||
- [Prerequisites](#prerequisites-3)
|
||||
- [Pre-build steps](#pre-build-steps-2)
|
||||
- [Deploying to a remote host](#deploying-to-a-remote-host)
|
||||
- [Inventory variables](#inventory-variables)
|
||||
- [Docker registry](#docker-registry)
|
||||
- [PostgreSQL](#postgresql-1)
|
||||
- [Proxy settings](#proxy-settings)
|
||||
- [Start the build](#start-the-build-1)
|
||||
- [Start the build](#start-the-build-2)
|
||||
- [Post build](#post-build-1)
|
||||
- [Accessing AWX](#accessing-awx-1)
|
||||
- [Accessing AWX](#accessing-awx-2)
|
||||
|
||||
## Getting started
|
||||
|
||||
@@ -54,7 +60,7 @@ Before you can run a deployment, you'll need the following installed in your loc
|
||||
- [Docker](https://docs.docker.com/engine/installation/)
|
||||
- [docker-py](https://github.com/docker/docker-py) Python module
|
||||
- [GNU Make](https://www.gnu.org/software/make/)
|
||||
- [Git](https://git-scm.com/)
|
||||
- [Git](https://git-scm.com/) Requires Version 1.8.4+
|
||||
|
||||
### System Requirements
|
||||
|
||||
@@ -63,7 +69,7 @@ The system that runs the AWX service will need to satisfy the following requirem
|
||||
- At leasts 4GB of memory
|
||||
- At least 2 cpu cores
|
||||
- At least 20GB of space
|
||||
- Running Docker or Openshift
|
||||
- Running Docker, Openshift, or Kubernetes
|
||||
|
||||
### AWX Tunables
|
||||
|
||||
@@ -71,11 +77,14 @@ The system that runs the AWX service will need to satisfy the following requirem
|
||||
|
||||
### Choose a deployment platform
|
||||
|
||||
We currently support running AWX as a containerized application using Docker images deployed to either an OpenShift cluster, or a standalone Docker daemon. The remainder of this document will walk you through the process of building the images, and deploying them to either platform.
|
||||
We currently support running AWX as a containerized application using Docker images deployed to either an OpenShift cluster, docker-compose or a standalone Docker daemon. The remainder of this document will walk you through the process of building the images, and deploying them to either platform.
|
||||
|
||||
The [installer](./installer) directory contains an [inventory](./installer/inventory) file, and a playbook, [install.yml](./installer/install.yml). You'll begin by setting variables in the inventory file according to the platform you wish to use, and then you'll start the image build and deployment process by running the playbook.
|
||||
|
||||
In the sections below, you'll find deployment details and instructions for each platform. To deploy to Docker, view the [Docker section](#docker), and for OpenShift, view the [OpenShift section](#openshift).
|
||||
In the sections below, you'll find deployment details and instructions for each platform:
|
||||
- [OpenShift](#openshift)
|
||||
- [Kubernetes](#kubernetes)
|
||||
- [Docker or Docker Compose](#docker-or-docker-compose).
|
||||
|
||||
### Official vs Building Images
|
||||
|
||||
@@ -133,10 +142,6 @@ Before starting the build process, review the [inventory](./installer/inventory)
|
||||
|
||||
> Name of the OpenShift project that will be created, and used as the namespace for the AWX app. Defaults to *awx*.
|
||||
|
||||
*awx_node_port*
|
||||
|
||||
> The web server port running inside the AWX pod. Defaults to *30083*.
|
||||
|
||||
*openshift_user*
|
||||
|
||||
> Username of the OpenShift user that will create the project, and deploy the application. Defaults to *developer*.
|
||||
@@ -144,7 +149,7 @@ Before starting the build process, review the [inventory](./installer/inventory)
|
||||
*docker_registry*
|
||||
|
||||
> IP address and port, or URL, for accessing a registry that the OpenShift cluster can access. Defaults to *172.30.1.1:5000*, the internal registry delivered with Minishift. This is not needed if you are using official hosted images.
|
||||
n
|
||||
|
||||
*docker_registry_repository*
|
||||
|
||||
> Namespace to use when pushing and pulling images to and from the registry. Generally this will match the project name. It defaults to *awx*. This is not needed if you are using official hosted images.
|
||||
@@ -271,16 +276,88 @@ The above example is taken from a Minishift instance. From a web browser, use `h
|
||||
|
||||
Once you access the AWX server, you will be prompted with a login dialog. The default administrator username is `admin`, and the password is `password`.
|
||||
|
||||
## Docker
|
||||
## Kubernetes
|
||||
|
||||
### Prerequisites
|
||||
|
||||
You will need the following installed on the host where AWX will be deployed:
|
||||
A Kubernetes deployment will require you to have access to a Kubernetes cluster as well as the following tools:
|
||||
|
||||
- [Docker](https://docs.docker.com/engine/installation/)
|
||||
- [docker-py](https://github.com/docker/docker-py) Python module
|
||||
- [kubectl](https://kubernetes.io/docs/tasks/tools/install-kubectl/)
|
||||
- [helm](https://docs.helm.sh/using_helm/#quickstart-guide)
|
||||
|
||||
Note: After installing Docker, the Docker service must be started.
|
||||
The installation program will reference `kubectl` directly. `helm` is only necessary if you are letting the installer configure PostgreSQL for you.
|
||||
|
||||
### Pre-build steps
|
||||
|
||||
Before starting the build process, review the [inventory](./installer/inventory) file, and uncomment and provide values for the following variables found in the `[all:vars]` section uncommenting when necessary. Make sure the openshift and standalone docker sections are commented out:
|
||||
|
||||
*kubernetes_context*
|
||||
|
||||
> Prior to running the installer, make sure you've configured the context for the cluster you'll be installing to. This is how the installer knows which cluster to connect to and what authentication to use
|
||||
|
||||
*awx_kubernetes_namespace*
|
||||
|
||||
> Name of the Kubernetes namespace where the AWX resources will be installed. This will be created if it doesn't exist
|
||||
|
||||
*docker_registry_*
|
||||
|
||||
> These settings should be used if building your own base images. You'll need access to an external registry and are responsible for making sure your kube cluster can talk to it and use it. If these are undefined and the dockerhub_ configuration settings are uncommented then the images will be pulled from dockerhub instead
|
||||
|
||||
### Start the build
|
||||
|
||||
After making changes to the `inventory` file use `ansible-playbook` to begin the install
|
||||
|
||||
```bash
|
||||
$ ansible-playbook -i inventory install.yml
|
||||
```
|
||||
|
||||
### Post build
|
||||
|
||||
After the playbook run completes, check the status of the deployment by running `kubectl get pods --namespace awx` (replace awx with the namespace you used):
|
||||
|
||||
```bash
|
||||
# View the running pods, it may take a few minutes for everything to be marked in the Running state
|
||||
$ kubectl get pods --namespace awx
|
||||
NAME READY STATUS RESTARTS AGE
|
||||
awx-2558692395-2r8ss 4/4 Running 0 29s
|
||||
awx-postgresql-355348841-kltkn 1/1 Running 0 1m
|
||||
```
|
||||
|
||||
### Accessing AWX
|
||||
|
||||
The AWX web interface is running in the AWX pod behind the `awx-web-svc` service:
|
||||
|
||||
```bash
|
||||
# View available services
|
||||
$ kubectl get svc --namespace awx
|
||||
NAME TYPE CLUSTER-IP EXTERNAL-IP PORT(S) AGE
|
||||
awx-postgresql ClusterIP 10.7.250.208 <none> 5432/TCP 2m
|
||||
awx-web-svc NodePort 10.7.241.35 <none> 80:30177/TCP 1m
|
||||
```
|
||||
|
||||
The deployment process creates an `Ingress` named `awx-web-svc` also. Some kubernetes cloud providers will automatically handle routing configuration when an Ingress is created others may require that you more explicitly configure it. You can see what kubernetes knows about things with:
|
||||
|
||||
```bash
|
||||
kubectl get ing --namespace awx
|
||||
NAME HOSTS ADDRESS PORTS AGE
|
||||
awx-web-svc * 35.227.x.y 80 3m
|
||||
```
|
||||
|
||||
If your provider is able to allocate an IP Address from the Ingress controller then you can navigate to the address and access the AWX interface. For some providers it can take a few minutes to allocate and make this accessible. For other providers it may require you to manually intervene.
|
||||
|
||||
### SSL Termination
|
||||
|
||||
Unlike Openshift's `Route` the Kubernetes `Ingress` doesn't yet handle SSL termination. As such the default configuration will only expose AWX through HTTP on port 80. You are responsible for configuring SSL support until support is added (either to Kubernetes or AWX itself).
|
||||
|
||||
|
||||
## Docker or Docker-Compose
|
||||
|
||||
### Prerequisites
|
||||
|
||||
- [Docker](https://docs.docker.com/engine/installation/) on the host where AWX will be deployed. After installing Docker, the Docker service must be started (depending on your OS, you may have to add the local user that uses Docker to the ``docker`` group, refer to the documentation for details)
|
||||
- [docker-py](https://github.com/docker/docker-py) Python module.
|
||||
|
||||
If you're installing using Docker Compose, you'll need [Docker Compose](https://docs.docker.com/compose/install/).
|
||||
|
||||
### Pre-build steps
|
||||
|
||||
@@ -323,6 +400,13 @@ Before starting the build process, review the [inventory](./installer/inventory)
|
||||
|
||||
> Provide a port number that can be mapped from the Docker daemon host to the web server running inside the AWX container. Defaults to *80*.
|
||||
|
||||
*use_docker_compose*
|
||||
|
||||
> Switch to ``true`` to use Docker Compose instead of the standalone Docker install.
|
||||
|
||||
*docker_compose_dir*
|
||||
|
||||
When using docker-compose, the `docker-compose.yml` file will be created there (default `/var/lib/awx`).
|
||||
|
||||
#### Docker registry
|
||||
|
||||
@@ -404,6 +488,8 @@ e240ed8209cd awx_task:1.0.0.8 "/tini -- /bin/sh ..." 2 minutes ago
|
||||
97e196120ab3 postgres:9.6 "docker-entrypoint..." 2 minutes ago Up 2 minutes 5432/tcp postgres
|
||||
```
|
||||
|
||||
If you're deploying using Docker Compose, container names will be prefixed by the name of the folder where the docker-compose.yml file is created (by default, `awx`).
|
||||
|
||||
Immediately after the containers start, the *awx_task* container will perform required setup tasks, including database migrations. These tasks need to complete before the web interface can be accessed. To monitor the progress, you can follow the container's STDOUT by running the following:
|
||||
|
||||
```bash
|
||||
@@ -466,3 +552,14 @@ Added instance awx to tower
|
||||
The AWX web server is accessible on the deployment host, using the *host_port* value set in the *inventory* file. The default URL is [http://localhost](http://localhost).
|
||||
|
||||
You will prompted with a login dialog. The default administrator username is `admin`, and the password is `password`.
|
||||
|
||||
### Maintenance using docker-compose
|
||||
|
||||
After the installation, maintenance operations with docker-compose can be done by using the `docker-compose.yml` file created at the location pointed by `docker_compose_dir`.
|
||||
|
||||
Among the possible operations, you may:
|
||||
|
||||
- Stop AWX : `docker-compose stop`
|
||||
- Upgrade AWX : `docker-compose pull && docker-compose up --force-recreate`
|
||||
|
||||
See the [docker-compose documentation](https://docs.docker.com/compose/) for details.
|
||||
|
||||
27
Makefile
27
Makefile
@@ -12,10 +12,10 @@ MANAGEMENT_COMMAND ?= awx-manage
|
||||
IMAGE_REPOSITORY_AUTH ?=
|
||||
IMAGE_REPOSITORY_BASE ?= https://gcr.io
|
||||
|
||||
VERSION=$(shell git describe --long)
|
||||
VERSION3=$(shell git describe --long | sed 's/\-g.*//')
|
||||
VERSION3DOT=$(shell git describe --long | sed 's/\-g.*//' | sed 's/\-/\./')
|
||||
RELEASE_VERSION=$(shell git describe --long | sed 's@\([0-9.]\{1,\}\).*@\1@')
|
||||
VERSION=$(shell git describe --long --first-parent)
|
||||
VERSION3=$(shell git describe --long --first-parent | sed 's/\-g.*//')
|
||||
VERSION3DOT=$(shell git describe --long --first-parent | sed 's/\-g.*//' | sed 's/\-/\./')
|
||||
RELEASE_VERSION=$(shell git describe --long --first-parent | sed 's@\([0-9.]\{1,\}\).*@\1@')
|
||||
|
||||
# NOTE: This defaults the container image version to the branch that's active
|
||||
COMPOSE_TAG ?= $(GIT_BRANCH)
|
||||
@@ -23,7 +23,7 @@ COMPOSE_HOST ?= $(shell hostname)
|
||||
|
||||
VENV_BASE ?= /venv
|
||||
SCL_PREFIX ?=
|
||||
CELERY_SCHEDULE_FILE ?= /celerybeat-schedule
|
||||
CELERY_SCHEDULE_FILE ?= /var/lib/awx/beat.db
|
||||
|
||||
DEV_DOCKER_TAG_BASE ?= gcr.io/ansible-tower-engineering
|
||||
# Python packages to install only from source (not from binary wheels)
|
||||
@@ -216,13 +216,11 @@ init:
|
||||
. $(VENV_BASE)/awx/bin/activate; \
|
||||
fi; \
|
||||
$(MANAGEMENT_COMMAND) provision_instance --hostname=$(COMPOSE_HOST); \
|
||||
$(MANAGEMENT_COMMAND) register_queue --queuename=tower --hostnames=$(COMPOSE_HOST);\
|
||||
$(MANAGEMENT_COMMAND) register_queue --queuename=tower --instance_percent=100;\
|
||||
if [ "$(AWX_GROUP_QUEUES)" == "tower,thepentagon" ]; then \
|
||||
$(MANAGEMENT_COMMAND) provision_instance --hostname=isolated; \
|
||||
$(MANAGEMENT_COMMAND) register_queue --queuename='thepentagon' --hostnames=isolated --controller=tower; \
|
||||
$(MANAGEMENT_COMMAND) generate_isolated_key | ssh -o "StrictHostKeyChecking no" root@isolated 'cat > /root/.ssh/authorized_keys'; \
|
||||
elif [ "$(AWX_GROUP_QUEUES)" != "tower" ]; then \
|
||||
$(MANAGEMENT_COMMAND) register_queue --queuename=$(firstword $(subst $(comma), ,$(AWX_GROUP_QUEUES))) --hostnames=$(COMPOSE_HOST); \
|
||||
fi;
|
||||
|
||||
# Refresh development environment after pulling new code.
|
||||
@@ -299,7 +297,7 @@ uwsgi: collectstatic
|
||||
@if [ "$(VENV_BASE)" ]; then \
|
||||
. $(VENV_BASE)/awx/bin/activate; \
|
||||
fi; \
|
||||
uwsgi -b 32768 --socket 127.0.0.1:8050 --module=awx.wsgi:application --home=/venv/awx --chdir=/awx_devel/ --vacuum --processes=5 --harakiri=120 --master --no-orphans --py-autoreload 1 --max-requests=1000 --stats /tmp/stats.socket --master-fifo=/awxfifo --lazy-apps --logformat "%(addr) %(method) %(uri) - %(proto) %(status)"
|
||||
uwsgi -b 32768 --socket 127.0.0.1:8050 --module=awx.wsgi:application --home=/venv/awx --chdir=/awx_devel/ --vacuum --processes=5 --harakiri=120 --master --no-orphans --py-autoreload 1 --max-requests=1000 --stats /tmp/stats.socket --master-fifo=/awxfifo --lazy-apps --logformat "%(addr) %(method) %(uri) - %(proto) %(status)" --hook-accepting1-once="exec:/bin/sh -c '[ -f /tmp/celery_pid ] && kill -1 `cat /tmp/celery_pid` || true'"
|
||||
|
||||
daphne:
|
||||
@if [ "$(VENV_BASE)" ]; then \
|
||||
@@ -322,10 +320,11 @@ runserver:
|
||||
|
||||
# Run to start the background celery worker for development.
|
||||
celeryd:
|
||||
rm -f /tmp/celery_pid
|
||||
@if [ "$(VENV_BASE)" ]; then \
|
||||
. $(VENV_BASE)/awx/bin/activate; \
|
||||
fi; \
|
||||
celery worker -A awx -l DEBUG -B -Ofair --autoscale=100,4 --schedule=$(CELERY_SCHEDULE_FILE) -Q tower_scheduler,tower_broadcast_all,$(COMPOSE_HOST),$(AWX_GROUP_QUEUES) -n celery@$(COMPOSE_HOST)
|
||||
celery worker -A awx -l DEBUG -B -Ofair --autoscale=100,4 --schedule=$(CELERY_SCHEDULE_FILE) -Q tower_broadcast_all -n celery@$(COMPOSE_HOST) --pidfile /tmp/celery_pid
|
||||
|
||||
# Run to start the zeromq callback receiver
|
||||
receiver:
|
||||
@@ -364,6 +363,12 @@ pyflakes: reports
|
||||
pylint: reports
|
||||
@(set -o pipefail && $@ | reports/$@.report)
|
||||
|
||||
swagger: reports
|
||||
@if [ "$(VENV_BASE)" ]; then \
|
||||
. $(VENV_BASE)/awx/bin/activate; \
|
||||
fi; \
|
||||
(set -o pipefail && py.test awx/conf/tests/functional awx/main/tests/functional/api awx/main/tests/docs --release=$(VERSION_TARGET) | tee reports/$@.report)
|
||||
|
||||
check: flake8 pep8 # pyflakes pylint
|
||||
|
||||
TEST_DIRS ?= awx/main/tests/unit awx/main/tests/functional awx/conf/tests awx/sso/tests
|
||||
@@ -607,7 +612,7 @@ clean-elk:
|
||||
docker rm tools_kibana_1
|
||||
|
||||
psql-container:
|
||||
docker run -it --net tools_default --rm postgres:9.4.1 sh -c 'exec psql -h "postgres" -p "5432" -U postgres'
|
||||
docker run -it --net tools_default --rm postgres:9.6 sh -c 'exec psql -h "postgres" -p "5432" -U postgres'
|
||||
|
||||
VERSION:
|
||||
@echo $(VERSION_TARGET) > $@
|
||||
|
||||
@@ -166,7 +166,13 @@ class FieldLookupBackend(BaseFilterBackend):
|
||||
elif isinstance(field, models.BooleanField):
|
||||
return to_python_boolean(value)
|
||||
elif isinstance(field, (ForeignObjectRel, ManyToManyField, GenericForeignKey, ForeignKey)):
|
||||
return self.to_python_related(value)
|
||||
try:
|
||||
return self.to_python_related(value)
|
||||
except ValueError:
|
||||
raise ParseError(_('Invalid {field_name} id: {field_id}').format(
|
||||
field_name=getattr(field, 'name', 'related field'),
|
||||
field_id=value)
|
||||
)
|
||||
else:
|
||||
return field.to_python(value)
|
||||
|
||||
@@ -243,11 +249,10 @@ class FieldLookupBackend(BaseFilterBackend):
|
||||
# Search across related objects.
|
||||
if key.endswith('__search'):
|
||||
for value in values:
|
||||
for search_term in force_text(value).replace(',', ' ').split():
|
||||
search_value, new_keys = self.value_to_python(queryset.model, key, search_term)
|
||||
assert isinstance(new_keys, list)
|
||||
for new_key in new_keys:
|
||||
search_filters.append((new_key, search_value))
|
||||
search_value, new_keys = self.value_to_python(queryset.model, key, force_text(value))
|
||||
assert isinstance(new_keys, list)
|
||||
for new_key in new_keys:
|
||||
search_filters.append((new_key, search_value))
|
||||
continue
|
||||
|
||||
# Custom chain__ and or__ filters, mutually exclusive (both can
|
||||
|
||||
@@ -5,6 +5,7 @@
|
||||
import inspect
|
||||
import logging
|
||||
import time
|
||||
import six
|
||||
|
||||
# Django
|
||||
from django.conf import settings
|
||||
@@ -21,31 +22,38 @@ from django.utils.translation import ugettext_lazy as _
|
||||
|
||||
# Django REST Framework
|
||||
from rest_framework.authentication import get_authorization_header
|
||||
from rest_framework.exceptions import PermissionDenied
|
||||
from rest_framework.exceptions import PermissionDenied, AuthenticationFailed
|
||||
from rest_framework import generics
|
||||
from rest_framework.response import Response
|
||||
from rest_framework import status
|
||||
from rest_framework import views
|
||||
from rest_framework.permissions import AllowAny
|
||||
|
||||
# cryptography
|
||||
from cryptography.fernet import InvalidToken
|
||||
|
||||
# AWX
|
||||
from awx.api.filters import FieldLookupBackend
|
||||
from awx.main.models import * # noqa
|
||||
from awx.main.access import access_registry
|
||||
from awx.main.utils import * # noqa
|
||||
from awx.main.utils.db import get_all_field_names
|
||||
from awx.api.serializers import ResourceAccessListElementSerializer
|
||||
from awx.api.serializers import ResourceAccessListElementSerializer, CopySerializer
|
||||
from awx.api.versioning import URLPathVersioning, get_request_version
|
||||
from awx.api.metadata import SublistAttachDetatchMetadata
|
||||
from awx.api.metadata import SublistAttachDetatchMetadata, Metadata
|
||||
|
||||
__all__ = ['APIView', 'GenericAPIView', 'ListAPIView', 'SimpleListAPIView',
|
||||
'ListCreateAPIView', 'SubListAPIView', 'SubListCreateAPIView',
|
||||
'SubListDestroyAPIView',
|
||||
'SubListCreateAttachDetachAPIView', 'RetrieveAPIView',
|
||||
'RetrieveUpdateAPIView', 'RetrieveDestroyAPIView',
|
||||
'RetrieveUpdateDestroyAPIView', 'DestroyAPIView',
|
||||
'RetrieveUpdateDestroyAPIView',
|
||||
'SubDetailAPIView',
|
||||
'ResourceAccessList',
|
||||
'ParentMixin',
|
||||
'DeleteLastUnattachLabelMixin',
|
||||
'SubListAttachDetachAPIView',]
|
||||
'SubListAttachDetachAPIView',
|
||||
'CopyAPIView']
|
||||
|
||||
logger = logging.getLogger('awx.api.generics')
|
||||
analytics_logger = logging.getLogger('awx.analytics.performance')
|
||||
@@ -89,8 +97,17 @@ def get_view_description(cls, request, html=False):
|
||||
return mark_safe(desc)
|
||||
|
||||
|
||||
def get_default_schema():
|
||||
if settings.SETTINGS_MODULE == 'awx.settings.development':
|
||||
from awx.api.swagger import AutoSchema
|
||||
return AutoSchema()
|
||||
else:
|
||||
return views.APIView.schema
|
||||
|
||||
|
||||
class APIView(views.APIView):
|
||||
|
||||
schema = get_default_schema()
|
||||
versioning_class = URLPathVersioning
|
||||
|
||||
def initialize_request(self, request, *args, **kwargs):
|
||||
@@ -115,6 +132,10 @@ class APIView(views.APIView):
|
||||
|
||||
drf_request = super(APIView, self).initialize_request(request, *args, **kwargs)
|
||||
request.drf_request = drf_request
|
||||
try:
|
||||
request.drf_request_user = getattr(drf_request, 'user', False)
|
||||
except AuthenticationFailed:
|
||||
request.drf_request_user = None
|
||||
return drf_request
|
||||
|
||||
def finalize_response(self, request, response, *args, **kwargs):
|
||||
@@ -140,7 +161,6 @@ class APIView(views.APIView):
|
||||
response['X-API-Query-Count'] = len(q_times)
|
||||
response['X-API-Query-Time'] = '%0.3fs' % sum(q_times)
|
||||
|
||||
analytics_logger.info("api response", extra=dict(python_objects=dict(request=request, response=response)))
|
||||
return response
|
||||
|
||||
def get_authenticate_header(self, request):
|
||||
@@ -171,27 +191,14 @@ class APIView(views.APIView):
|
||||
and in the browsable API.
|
||||
"""
|
||||
func = self.settings.VIEW_DESCRIPTION_FUNCTION
|
||||
return func(self.__class__, self._request, html)
|
||||
return func(self.__class__, getattr(self, '_request', None), html)
|
||||
|
||||
def get_description_context(self):
|
||||
return {
|
||||
'view': self,
|
||||
'docstring': type(self).__doc__ or '',
|
||||
'new_in_13': getattr(self, 'new_in_13', False),
|
||||
'new_in_14': getattr(self, 'new_in_14', False),
|
||||
'new_in_145': getattr(self, 'new_in_145', False),
|
||||
'new_in_148': getattr(self, 'new_in_148', False),
|
||||
'new_in_200': getattr(self, 'new_in_200', False),
|
||||
'new_in_210': getattr(self, 'new_in_210', False),
|
||||
'new_in_220': getattr(self, 'new_in_220', False),
|
||||
'new_in_230': getattr(self, 'new_in_230', False),
|
||||
'new_in_240': getattr(self, 'new_in_240', False),
|
||||
'new_in_300': getattr(self, 'new_in_300', False),
|
||||
'new_in_310': getattr(self, 'new_in_310', False),
|
||||
'new_in_320': getattr(self, 'new_in_320', False),
|
||||
'new_in_330': getattr(self, 'new_in_330', False),
|
||||
'new_in_api_v2': getattr(self, 'new_in_api_v2', False),
|
||||
'deprecated': getattr(self, 'deprecated', False),
|
||||
'swagger_method': getattr(self.request, 'swagger_method', None),
|
||||
}
|
||||
|
||||
def get_description(self, request, html=False):
|
||||
@@ -209,7 +216,7 @@ class APIView(views.APIView):
|
||||
context['deprecated'] = True
|
||||
|
||||
description = render_to_string(template_list, context)
|
||||
if context.get('deprecated'):
|
||||
if context.get('deprecated') and context.get('swagger_method') is None:
|
||||
# render deprecation messages at the very top
|
||||
description = '\n'.join([render_to_string('api/_deprecated.md', context), description])
|
||||
return description
|
||||
@@ -269,12 +276,17 @@ class GenericAPIView(generics.GenericAPIView, APIView):
|
||||
return serializer
|
||||
|
||||
def get_queryset(self):
|
||||
#if hasattr(self.request.user, 'get_queryset'):
|
||||
# return self.request.user.get_queryset(self.model)
|
||||
if self.queryset is not None:
|
||||
return self.queryset._clone()
|
||||
elif self.model is not None:
|
||||
return self.model._default_manager.all()
|
||||
qs = self.model._default_manager
|
||||
if self.model in access_registry:
|
||||
access_class = access_registry[self.model]
|
||||
if access_class.select_related:
|
||||
qs = qs.select_related(*access_class.select_related)
|
||||
if access_class.prefetch_related:
|
||||
qs = qs.prefetch_related(*access_class.prefetch_related)
|
||||
return qs
|
||||
else:
|
||||
return super(GenericAPIView, self).get_queryset()
|
||||
|
||||
@@ -442,6 +454,41 @@ class SubListAPIView(ParentMixin, ListAPIView):
|
||||
return qs & sublist_qs
|
||||
|
||||
|
||||
class DestroyAPIView(generics.DestroyAPIView):
|
||||
|
||||
def has_delete_permission(self, obj):
|
||||
return self.request.user.can_access(self.model, 'delete', obj)
|
||||
|
||||
def perform_destroy(self, instance, check_permission=True):
|
||||
if check_permission and not self.has_delete_permission(instance):
|
||||
raise PermissionDenied()
|
||||
super(DestroyAPIView, self).perform_destroy(instance)
|
||||
|
||||
|
||||
class SubListDestroyAPIView(DestroyAPIView, SubListAPIView):
|
||||
"""
|
||||
Concrete view for deleting everything related by `relationship`.
|
||||
"""
|
||||
check_sub_obj_permission = True
|
||||
|
||||
def destroy(self, request, *args, **kwargs):
|
||||
instance_list = self.get_queryset()
|
||||
if (not self.check_sub_obj_permission and
|
||||
not request.user.can_access(self.parent_model, 'delete', self.get_parent_object())):
|
||||
raise PermissionDenied()
|
||||
self.perform_list_destroy(instance_list)
|
||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||
|
||||
def perform_list_destroy(self, instance_list):
|
||||
if self.check_sub_obj_permission:
|
||||
# Check permissions for all before deleting, avoiding half-deleted lists
|
||||
for instance in instance_list:
|
||||
if self.has_delete_permission(instance):
|
||||
raise PermissionDenied()
|
||||
for instance in instance_list:
|
||||
self.perform_destroy(instance, check_permission=False)
|
||||
|
||||
|
||||
class SubListCreateAPIView(SubListAPIView, ListCreateAPIView):
|
||||
# Base class for a sublist view that allows for creating subobjects
|
||||
# associated with the parent object.
|
||||
@@ -680,22 +727,11 @@ class RetrieveUpdateAPIView(RetrieveAPIView, generics.RetrieveUpdateAPIView):
|
||||
pass
|
||||
|
||||
|
||||
class RetrieveDestroyAPIView(RetrieveAPIView, generics.RetrieveDestroyAPIView):
|
||||
|
||||
def destroy(self, request, *args, **kwargs):
|
||||
# somewhat lame that delete has to call it's own permissions check
|
||||
obj = self.get_object()
|
||||
if not request.user.can_access(self.model, 'delete', obj):
|
||||
raise PermissionDenied()
|
||||
obj.delete()
|
||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||
|
||||
|
||||
class RetrieveUpdateDestroyAPIView(RetrieveUpdateAPIView, RetrieveDestroyAPIView):
|
||||
class RetrieveDestroyAPIView(RetrieveAPIView, DestroyAPIView):
|
||||
pass
|
||||
|
||||
|
||||
class DestroyAPIView(GenericAPIView, generics.DestroyAPIView):
|
||||
class RetrieveUpdateDestroyAPIView(RetrieveUpdateAPIView, DestroyAPIView):
|
||||
pass
|
||||
|
||||
|
||||
@@ -713,3 +749,152 @@ class ResourceAccessList(ParentMixin, ListAPIView):
|
||||
for r in roles:
|
||||
ancestors.update(set(r.ancestors.all()))
|
||||
return User.objects.filter(roles__in=list(ancestors)).distinct()
|
||||
|
||||
|
||||
def trigger_delayed_deep_copy(*args, **kwargs):
|
||||
from awx.main.tasks import deep_copy_model_obj
|
||||
connection.on_commit(lambda: deep_copy_model_obj.delay(*args, **kwargs))
|
||||
|
||||
|
||||
class CopyAPIView(GenericAPIView):
|
||||
|
||||
serializer_class = CopySerializer
|
||||
permission_classes = (AllowAny,)
|
||||
copy_return_serializer_class = None
|
||||
new_in_330 = True
|
||||
new_in_api_v2 = True
|
||||
|
||||
def _get_copy_return_serializer(self, *args, **kwargs):
|
||||
if not self.copy_return_serializer_class:
|
||||
return self.get_serializer(*args, **kwargs)
|
||||
serializer_class_store = self.serializer_class
|
||||
self.serializer_class = self.copy_return_serializer_class
|
||||
ret = self.get_serializer(*args, **kwargs)
|
||||
self.serializer_class = serializer_class_store
|
||||
return ret
|
||||
|
||||
@staticmethod
|
||||
def _decrypt_model_field_if_needed(obj, field_name, field_val):
|
||||
if field_name in getattr(type(obj), 'REENCRYPTION_BLACKLIST_AT_COPY', []):
|
||||
return field_val
|
||||
if isinstance(field_val, dict):
|
||||
for sub_field in field_val:
|
||||
if isinstance(sub_field, six.string_types) \
|
||||
and isinstance(field_val[sub_field], six.string_types):
|
||||
try:
|
||||
field_val[sub_field] = decrypt_field(obj, field_name, sub_field)
|
||||
except InvalidToken:
|
||||
# Catching the corner case with v1 credential fields
|
||||
field_val[sub_field] = decrypt_field(obj, sub_field)
|
||||
elif isinstance(field_val, six.string_types):
|
||||
field_val = decrypt_field(obj, field_name)
|
||||
return field_val
|
||||
|
||||
def _build_create_dict(self, obj):
|
||||
ret = {}
|
||||
if self.copy_return_serializer_class:
|
||||
all_fields = Metadata().get_serializer_info(
|
||||
self._get_copy_return_serializer(), method='POST'
|
||||
)
|
||||
for field_name, field_info in all_fields.items():
|
||||
if not hasattr(obj, field_name) or field_info.get('read_only', True):
|
||||
continue
|
||||
ret[field_name] = CopyAPIView._decrypt_model_field_if_needed(
|
||||
obj, field_name, getattr(obj, field_name)
|
||||
)
|
||||
return ret
|
||||
|
||||
@staticmethod
|
||||
def copy_model_obj(old_parent, new_parent, model, obj, creater, copy_name='', create_kwargs=None):
|
||||
fields_to_preserve = set(getattr(model, 'FIELDS_TO_PRESERVE_AT_COPY', []))
|
||||
fields_to_discard = set(getattr(model, 'FIELDS_TO_DISCARD_AT_COPY', []))
|
||||
m2m_to_preserve = {}
|
||||
o2m_to_preserve = {}
|
||||
create_kwargs = create_kwargs or {}
|
||||
for field_name in fields_to_discard:
|
||||
create_kwargs.pop(field_name, None)
|
||||
for field in model._meta.get_fields():
|
||||
try:
|
||||
field_val = getattr(obj, field.name)
|
||||
except AttributeError:
|
||||
continue
|
||||
# Adjust copy blacklist fields here.
|
||||
if field.name in fields_to_discard or field.name in [
|
||||
'id', 'pk', 'polymorphic_ctype', 'unifiedjobtemplate_ptr', 'created_by', 'modified_by'
|
||||
] or field.name.endswith('_role'):
|
||||
create_kwargs.pop(field.name, None)
|
||||
continue
|
||||
if field.one_to_many:
|
||||
if field.name in fields_to_preserve:
|
||||
o2m_to_preserve[field.name] = field_val
|
||||
elif field.many_to_many:
|
||||
if field.name in fields_to_preserve and not old_parent:
|
||||
m2m_to_preserve[field.name] = field_val
|
||||
elif field.many_to_one and not field_val:
|
||||
create_kwargs.pop(field.name, None)
|
||||
elif field.many_to_one and field_val == old_parent:
|
||||
create_kwargs[field.name] = new_parent
|
||||
elif field.name == 'name' and not old_parent:
|
||||
create_kwargs[field.name] = copy_name or field_val + ' copy'
|
||||
elif field.name in fields_to_preserve:
|
||||
create_kwargs[field.name] = CopyAPIView._decrypt_model_field_if_needed(
|
||||
obj, field.name, field_val
|
||||
)
|
||||
new_obj = model.objects.create(**create_kwargs)
|
||||
# Need to save separatedly because Djang-crum get_current_user would
|
||||
# not work properly in non-request-response-cycle context.
|
||||
new_obj.created_by = creater
|
||||
new_obj.save()
|
||||
for m2m in m2m_to_preserve:
|
||||
for related_obj in m2m_to_preserve[m2m].all():
|
||||
getattr(new_obj, m2m).add(related_obj)
|
||||
if not old_parent:
|
||||
sub_objects = []
|
||||
for o2m in o2m_to_preserve:
|
||||
for sub_obj in o2m_to_preserve[o2m].all():
|
||||
sub_model = type(sub_obj)
|
||||
sub_objects.append((sub_model.__module__, sub_model.__name__, sub_obj.pk))
|
||||
return new_obj, sub_objects
|
||||
ret = {obj: new_obj}
|
||||
for o2m in o2m_to_preserve:
|
||||
for sub_obj in o2m_to_preserve[o2m].all():
|
||||
ret.update(CopyAPIView.copy_model_obj(obj, new_obj, type(sub_obj), sub_obj, creater))
|
||||
return ret
|
||||
|
||||
def get(self, request, *args, **kwargs):
|
||||
obj = self.get_object()
|
||||
create_kwargs = self._build_create_dict(obj)
|
||||
for key in create_kwargs:
|
||||
create_kwargs[key] = getattr(create_kwargs[key], 'pk', None) or create_kwargs[key]
|
||||
return Response({'can_copy': request.user.can_access(self.model, 'add', create_kwargs)})
|
||||
|
||||
def post(self, request, *args, **kwargs):
|
||||
obj = self.get_object()
|
||||
create_kwargs = self._build_create_dict(obj)
|
||||
create_kwargs_check = {}
|
||||
for key in create_kwargs:
|
||||
create_kwargs_check[key] = getattr(create_kwargs[key], 'pk', None) or create_kwargs[key]
|
||||
if not request.user.can_access(self.model, 'add', create_kwargs_check):
|
||||
raise PermissionDenied()
|
||||
serializer = self.get_serializer(data=request.data)
|
||||
if not serializer.is_valid():
|
||||
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
||||
new_obj, sub_objs = CopyAPIView.copy_model_obj(
|
||||
None, None, self.model, obj, request.user, create_kwargs=create_kwargs,
|
||||
copy_name=serializer.validated_data.get('name', '')
|
||||
)
|
||||
if hasattr(new_obj, 'admin_role') and request.user not in new_obj.admin_role:
|
||||
new_obj.admin_role.members.add(request.user)
|
||||
if sub_objs:
|
||||
permission_check_func = None
|
||||
if hasattr(type(self), 'deep_copy_permission_check_func'):
|
||||
permission_check_func = (
|
||||
type(self).__module__, type(self).__name__, 'deep_copy_permission_check_func'
|
||||
)
|
||||
trigger_delayed_deep_copy(
|
||||
self.model.__module__, self.model.__name__,
|
||||
obj.pk, new_obj.pk, request.user.pk, sub_objs,
|
||||
permission_check_func=permission_check_func
|
||||
)
|
||||
serializer = self._get_copy_return_serializer(new_obj)
|
||||
return Response(serializer.data, status=status.HTTP_201_CREATED)
|
||||
|
||||
@@ -190,23 +190,6 @@ class Metadata(metadata.SimpleMetadata):
|
||||
finally:
|
||||
delattr(view, '_request')
|
||||
|
||||
# Add version number in which view was added to Tower.
|
||||
added_in_version = '1.2'
|
||||
for version in ('3.2.0', '3.1.0', '3.0.0', '2.4.0', '2.3.0', '2.2.0',
|
||||
'2.1.0', '2.0.0', '1.4.8', '1.4.5', '1.4', '1.3'):
|
||||
if getattr(view, 'new_in_%s' % version.replace('.', ''), False):
|
||||
added_in_version = version
|
||||
break
|
||||
metadata['added_in_version'] = added_in_version
|
||||
|
||||
# Add API version number in which view was added to Tower.
|
||||
added_in_api_version = 'v1'
|
||||
for version in ('v2',):
|
||||
if getattr(view, 'new_in_api_%s' % version, False):
|
||||
added_in_api_version = version
|
||||
break
|
||||
metadata['added_in_api_version'] = added_in_api_version
|
||||
|
||||
# Add type(s) handled by this view/serializer.
|
||||
if hasattr(view, 'get_serializer'):
|
||||
serializer = view.get_serializer()
|
||||
|
||||
@@ -33,7 +33,7 @@ class OrderedDictLoader(yaml.SafeLoader):
|
||||
key = self.construct_object(key_node, deep=deep)
|
||||
try:
|
||||
hash(key)
|
||||
except TypeError, exc:
|
||||
except TypeError as exc:
|
||||
raise yaml.constructor.ConstructorError(
|
||||
"while constructing a mapping", node.start_mark,
|
||||
"found unacceptable key (%s)" % exc, key_node.start_mark
|
||||
|
||||
@@ -5,6 +5,8 @@
|
||||
from rest_framework import renderers
|
||||
from rest_framework.request import override_method
|
||||
|
||||
import six
|
||||
|
||||
|
||||
class BrowsableAPIRenderer(renderers.BrowsableAPIRenderer):
|
||||
'''
|
||||
@@ -69,8 +71,8 @@ class PlainTextRenderer(renderers.BaseRenderer):
|
||||
format = 'txt'
|
||||
|
||||
def render(self, data, media_type=None, renderer_context=None):
|
||||
if not isinstance(data, basestring):
|
||||
data = unicode(data)
|
||||
if not isinstance(data, six.string_types):
|
||||
data = six.text_type(data)
|
||||
return data.encode(self.charset)
|
||||
|
||||
|
||||
|
||||
@@ -9,7 +9,6 @@ import re
|
||||
import six
|
||||
import urllib
|
||||
from collections import OrderedDict
|
||||
from dateutil import rrule
|
||||
|
||||
# Django
|
||||
from django.conf import settings
|
||||
@@ -44,7 +43,7 @@ from awx.main.fields import ImplicitRoleField
|
||||
from awx.main.utils import (
|
||||
get_type_for_model, get_model_for_type, timestamp_apiformat,
|
||||
camelcase_to_underscore, getattrd, parse_yaml_or_json,
|
||||
has_model_field_prefetched, extract_ansible_vars)
|
||||
has_model_field_prefetched, extract_ansible_vars, encrypt_dict)
|
||||
from awx.main.utils.filters import SmartFilter
|
||||
from awx.main.redact import REPLACE_STR
|
||||
|
||||
@@ -131,6 +130,22 @@ def reverse_gfk(content_object, request):
|
||||
}
|
||||
|
||||
|
||||
class CopySerializer(serializers.Serializer):
|
||||
|
||||
name = serializers.CharField()
|
||||
|
||||
def validate(self, attrs):
|
||||
name = attrs.get('name')
|
||||
view = self.context.get('view', None)
|
||||
obj = view.get_object()
|
||||
if name == obj.name:
|
||||
raise serializers.ValidationError(_(
|
||||
'The original object is already named {}, a copy from'
|
||||
' it cannot have the same name.'.format(name)
|
||||
))
|
||||
return attrs
|
||||
|
||||
|
||||
class BaseSerializerMetaclass(serializers.SerializerMetaclass):
|
||||
'''
|
||||
Custom metaclass to enable attribute inheritance from Meta objects on
|
||||
@@ -345,7 +360,9 @@ class BaseSerializer(serializers.ModelSerializer):
|
||||
continue
|
||||
summary_fields[fk] = OrderedDict()
|
||||
for field in related_fields:
|
||||
if field == 'credential_type_id' and fk == 'credential' and self.version < 2: # TODO: remove version check in 3.3
|
||||
if (
|
||||
self.version < 2 and field == 'credential_type_id' and
|
||||
fk in ['credential', 'vault_credential']): # TODO: remove version check in 3.3
|
||||
continue
|
||||
|
||||
fval = getattr(fkval, field, None)
|
||||
@@ -612,14 +629,12 @@ class UnifiedJobTemplateSerializer(BaseSerializer):
|
||||
class UnifiedJobSerializer(BaseSerializer):
|
||||
show_capabilities = ['start', 'delete']
|
||||
|
||||
result_stdout = serializers.SerializerMethodField()
|
||||
|
||||
class Meta:
|
||||
model = UnifiedJob
|
||||
fields = ('*', 'unified_job_template', 'launch_type', 'status',
|
||||
'failed', 'started', 'finished', 'elapsed', 'job_args',
|
||||
'job_cwd', 'job_env', 'job_explanation', 'result_stdout',
|
||||
'execution_node', 'result_traceback')
|
||||
'job_cwd', 'job_env', 'job_explanation', 'execution_node',
|
||||
'result_traceback')
|
||||
extra_kwargs = {
|
||||
'unified_job_template': {
|
||||
'source': 'unified_job_template_id',
|
||||
@@ -700,25 +715,17 @@ class UnifiedJobSerializer(BaseSerializer):
|
||||
|
||||
return ret
|
||||
|
||||
def get_result_stdout(self, obj):
|
||||
obj_size = obj.result_stdout_size
|
||||
if obj_size > settings.STDOUT_MAX_BYTES_DISPLAY:
|
||||
return _("Standard Output too large to display (%(text_size)d bytes), "
|
||||
"only download supported for sizes over %(supported_size)d bytes") % {
|
||||
'text_size': obj_size, 'supported_size': settings.STDOUT_MAX_BYTES_DISPLAY}
|
||||
return obj.result_stdout
|
||||
|
||||
|
||||
class UnifiedJobListSerializer(UnifiedJobSerializer):
|
||||
|
||||
class Meta:
|
||||
fields = ('*', '-job_args', '-job_cwd', '-job_env', '-result_traceback', '-result_stdout')
|
||||
fields = ('*', '-job_args', '-job_cwd', '-job_env', '-result_traceback')
|
||||
|
||||
def get_field_names(self, declared_fields, info):
|
||||
field_names = super(UnifiedJobListSerializer, self).get_field_names(declared_fields, info)
|
||||
# Meta multiple inheritance and -field_name options don't seem to be
|
||||
# taking effect above, so remove the undesired fields here.
|
||||
return tuple(x for x in field_names if x not in ('job_args', 'job_cwd', 'job_env', 'result_traceback', 'result_stdout'))
|
||||
return tuple(x for x in field_names if x not in ('job_args', 'job_cwd', 'job_env', 'result_traceback'))
|
||||
|
||||
def get_types(self):
|
||||
if type(self) is UnifiedJobListSerializer:
|
||||
@@ -758,14 +765,6 @@ class UnifiedJobStdoutSerializer(UnifiedJobSerializer):
|
||||
class Meta:
|
||||
fields = ('result_stdout',)
|
||||
|
||||
def get_result_stdout(self, obj):
|
||||
obj_size = obj.result_stdout_size
|
||||
if obj_size > settings.STDOUT_MAX_BYTES_DISPLAY:
|
||||
return _("Standard Output too large to display (%(text_size)d bytes), "
|
||||
"only download supported for sizes over %(supported_size)d bytes") % {
|
||||
'text_size': obj_size, 'supported_size': settings.STDOUT_MAX_BYTES_DISPLAY}
|
||||
return obj.result_stdout
|
||||
|
||||
def get_types(self):
|
||||
if type(self) is UnifiedJobStdoutSerializer:
|
||||
return ['project_update', 'inventory_update', 'job', 'ad_hoc_command', 'system_job']
|
||||
@@ -912,7 +911,7 @@ class OrganizationSerializer(BaseSerializer):
|
||||
|
||||
class Meta:
|
||||
model = Organization
|
||||
fields = ('*',)
|
||||
fields = ('*', 'custom_virtualenv',)
|
||||
|
||||
def get_related(self, obj):
|
||||
res = super(OrganizationSerializer, self).get_related(obj)
|
||||
@@ -1000,7 +999,7 @@ class ProjectSerializer(UnifiedJobTemplateSerializer, ProjectOptionsSerializer):
|
||||
class Meta:
|
||||
model = Project
|
||||
fields = ('*', 'organization', 'scm_delete_on_next_update', 'scm_update_on_launch',
|
||||
'scm_update_cache_timeout', 'scm_revision',) + \
|
||||
'scm_update_cache_timeout', 'scm_revision', 'custom_virtualenv',) + \
|
||||
('last_update_failed', 'last_updated') # Backwards compatibility
|
||||
read_only_fields = ('scm_delete_on_next_update',)
|
||||
|
||||
@@ -1020,6 +1019,7 @@ class ProjectSerializer(UnifiedJobTemplateSerializer, ProjectOptionsSerializer):
|
||||
notification_templates_error = self.reverse('api:project_notification_templates_error_list', kwargs={'pk': obj.pk}),
|
||||
access_list = self.reverse('api:project_access_list', kwargs={'pk': obj.pk}),
|
||||
object_roles = self.reverse('api:project_object_roles_list', kwargs={'pk': obj.pk}),
|
||||
copy = self.reverse('api:project_copy', kwargs={'pk': obj.pk}),
|
||||
))
|
||||
if obj.organization:
|
||||
res['organization'] = self.reverse('api:organization_detail',
|
||||
@@ -1111,11 +1111,17 @@ class ProjectUpdateSerializer(UnifiedJobSerializer, ProjectOptionsSerializer):
|
||||
|
||||
def get_related(self, obj):
|
||||
res = super(ProjectUpdateSerializer, self).get_related(obj)
|
||||
try:
|
||||
res.update(dict(
|
||||
project = self.reverse('api:project_detail', kwargs={'pk': obj.project.pk}),
|
||||
))
|
||||
except ObjectDoesNotExist:
|
||||
pass
|
||||
res.update(dict(
|
||||
project = self.reverse('api:project_detail', kwargs={'pk': obj.project.pk}),
|
||||
cancel = self.reverse('api:project_update_cancel', kwargs={'pk': obj.pk}),
|
||||
scm_inventory_updates = self.reverse('api:project_update_scm_inventory_updates', kwargs={'pk': obj.pk}),
|
||||
notifications = self.reverse('api:project_update_notifications_list', kwargs={'pk': obj.pk}),
|
||||
events = self.reverse('api:project_update_events_list', kwargs={'pk': obj.pk}),
|
||||
))
|
||||
return res
|
||||
|
||||
@@ -1167,6 +1173,7 @@ class InventorySerializer(BaseSerializerWithVariables):
|
||||
access_list = self.reverse('api:inventory_access_list', kwargs={'pk': obj.pk}),
|
||||
object_roles = self.reverse('api:inventory_object_roles_list', kwargs={'pk': obj.pk}),
|
||||
instance_groups = self.reverse('api:inventory_instance_groups_list', kwargs={'pk': obj.pk}),
|
||||
copy = self.reverse('api:inventory_copy', kwargs={'pk': obj.pk}),
|
||||
))
|
||||
if obj.insights_credential:
|
||||
res['insights_credential'] = self.reverse('api:credential_detail', kwargs={'pk': obj.insights_credential.pk})
|
||||
@@ -1184,7 +1191,7 @@ class InventorySerializer(BaseSerializerWithVariables):
|
||||
if host_filter:
|
||||
try:
|
||||
SmartFilter().query_from_string(host_filter)
|
||||
except RuntimeError, e:
|
||||
except RuntimeError as e:
|
||||
raise models.base.ValidationError(e)
|
||||
return host_filter
|
||||
|
||||
@@ -1234,8 +1241,9 @@ class HostSerializer(BaseSerializerWithVariables):
|
||||
model = Host
|
||||
fields = ('*', 'inventory', 'enabled', 'instance_id', 'variables',
|
||||
'has_active_failures', 'has_inventory_sources', 'last_job',
|
||||
'last_job_host_summary', 'insights_system_id')
|
||||
read_only_fields = ('last_job', 'last_job_host_summary', 'insights_system_id',)
|
||||
'last_job_host_summary', 'insights_system_id', 'ansible_facts_modified',)
|
||||
read_only_fields = ('last_job', 'last_job_host_summary', 'insights_system_id',
|
||||
'ansible_facts_modified',)
|
||||
|
||||
def build_relational_field(self, field_name, relation_info):
|
||||
field_class, field_kwargs = super(HostSerializer, self).build_relational_field(field_name, relation_info)
|
||||
@@ -1523,6 +1531,7 @@ class CustomInventoryScriptSerializer(BaseSerializer):
|
||||
res = super(CustomInventoryScriptSerializer, self).get_related(obj)
|
||||
res.update(dict(
|
||||
object_roles = self.reverse('api:inventory_script_object_roles_list', kwargs={'pk': obj.pk}),
|
||||
copy = self.reverse('api:inventory_script_copy', kwargs={'pk': obj.pk}),
|
||||
))
|
||||
|
||||
if obj.organization:
|
||||
@@ -1726,10 +1735,18 @@ class InventoryUpdateSerializer(UnifiedJobSerializer, InventorySourceOptionsSeri
|
||||
|
||||
def get_related(self, obj):
|
||||
res = super(InventoryUpdateSerializer, self).get_related(obj)
|
||||
try:
|
||||
res.update(dict(
|
||||
inventory_source = self.reverse(
|
||||
'api:inventory_source_detail', kwargs={'pk': obj.inventory_source.pk}
|
||||
),
|
||||
))
|
||||
except ObjectDoesNotExist:
|
||||
pass
|
||||
res.update(dict(
|
||||
inventory_source = self.reverse('api:inventory_source_detail', kwargs={'pk': obj.inventory_source.pk}),
|
||||
cancel = self.reverse('api:inventory_update_cancel', kwargs={'pk': obj.pk}),
|
||||
notifications = self.reverse('api:inventory_update_notifications_list', kwargs={'pk': obj.pk}),
|
||||
events = self.reverse('api:inventory_update_events_list', kwargs={'pk': obj.pk}),
|
||||
))
|
||||
if obj.source_project_update_id:
|
||||
res['source_project_update'] = self.reverse('api:project_update_detail',
|
||||
@@ -2072,6 +2089,7 @@ class CredentialSerializer(BaseSerializer):
|
||||
object_roles = self.reverse('api:credential_object_roles_list', kwargs={'pk': obj.pk}),
|
||||
owner_users = self.reverse('api:credential_owner_users_list', kwargs={'pk': obj.pk}),
|
||||
owner_teams = self.reverse('api:credential_owner_teams_list', kwargs={'pk': obj.pk}),
|
||||
copy = self.reverse('api:credential_copy', kwargs={'pk': obj.pk}),
|
||||
))
|
||||
|
||||
# TODO: remove when API v1 is removed
|
||||
@@ -2125,7 +2143,7 @@ class CredentialSerializer(BaseSerializer):
|
||||
|
||||
def to_internal_value(self, data):
|
||||
# TODO: remove when API v1 is removed
|
||||
if 'credential_type' not in data:
|
||||
if 'credential_type' not in data and self.version == 1:
|
||||
# If `credential_type` is not provided, assume the payload is a
|
||||
# v1 credential payload that specifies a `kind` and a flat list
|
||||
# of field values
|
||||
@@ -2162,10 +2180,23 @@ class CredentialSerializer(BaseSerializer):
|
||||
|
||||
def validate_credential_type(self, credential_type):
|
||||
if self.instance and credential_type.pk != self.instance.credential_type.pk:
|
||||
raise ValidationError(
|
||||
_('You cannot change the credential type of the credential, as it may break the functionality'
|
||||
' of the resources using it.'),
|
||||
)
|
||||
for rel in (
|
||||
'ad_hoc_commands',
|
||||
'insights_inventories',
|
||||
'inventorysources',
|
||||
'inventoryupdates',
|
||||
'unifiedjobs',
|
||||
'unifiedjobtemplates',
|
||||
'projects',
|
||||
'projectupdates',
|
||||
'workflowjobnodes'
|
||||
):
|
||||
if getattr(self.instance, rel).count() > 0:
|
||||
raise ValidationError(
|
||||
_('You cannot change the credential type of the credential, as it may break the functionality'
|
||||
' of the resources using it.'),
|
||||
)
|
||||
|
||||
return credential_type
|
||||
|
||||
|
||||
@@ -2346,14 +2377,30 @@ class JobOptionsSerializer(LabelsListMixin, BaseSerializer):
|
||||
def get_related(self, obj):
|
||||
res = super(JobOptionsSerializer, self).get_related(obj)
|
||||
res['labels'] = self.reverse('api:job_template_label_list', kwargs={'pk': obj.pk})
|
||||
if obj.inventory:
|
||||
res['inventory'] = self.reverse('api:inventory_detail', kwargs={'pk': obj.inventory.pk})
|
||||
if obj.project:
|
||||
res['project'] = self.reverse('api:project_detail', kwargs={'pk': obj.project.pk})
|
||||
if obj.credential:
|
||||
res['credential'] = self.reverse('api:credential_detail', kwargs={'pk': obj.credential})
|
||||
if obj.vault_credential:
|
||||
res['vault_credential'] = self.reverse('api:credential_detail', kwargs={'pk': obj.vault_credential})
|
||||
try:
|
||||
if obj.inventory:
|
||||
res['inventory'] = self.reverse('api:inventory_detail', kwargs={'pk': obj.inventory.pk})
|
||||
except ObjectDoesNotExist:
|
||||
setattr(obj, 'inventory', None)
|
||||
try:
|
||||
if obj.project:
|
||||
res['project'] = self.reverse('api:project_detail', kwargs={'pk': obj.project.pk})
|
||||
except ObjectDoesNotExist:
|
||||
setattr(obj, 'project', None)
|
||||
try:
|
||||
if obj.credential:
|
||||
res['credential'] = self.reverse(
|
||||
'api:credential_detail', kwargs={'pk': obj.credential}
|
||||
)
|
||||
except ObjectDoesNotExist:
|
||||
setattr(obj, 'credential', None)
|
||||
try:
|
||||
if obj.vault_credential:
|
||||
res['vault_credential'] = self.reverse(
|
||||
'api:credential_detail', kwargs={'pk': obj.vault_credential}
|
||||
)
|
||||
except ObjectDoesNotExist:
|
||||
setattr(obj, 'vault_credential', None)
|
||||
if self.version > 1:
|
||||
if isinstance(obj, UnifiedJobTemplate):
|
||||
res['extra_credentials'] = self.reverse(
|
||||
@@ -2504,7 +2551,7 @@ class JobTemplateSerializer(JobTemplateMixin, UnifiedJobTemplateSerializer, JobO
|
||||
fields = ('*', 'host_config_key', 'ask_diff_mode_on_launch', 'ask_variables_on_launch', 'ask_limit_on_launch', 'ask_tags_on_launch',
|
||||
'ask_skip_tags_on_launch', 'ask_job_type_on_launch', 'ask_verbosity_on_launch', 'ask_inventory_on_launch',
|
||||
'ask_credential_on_launch', 'survey_enabled', 'become_enabled', 'diff_mode',
|
||||
'allow_simultaneous')
|
||||
'allow_simultaneous', 'custom_virtualenv')
|
||||
|
||||
def get_related(self, obj):
|
||||
res = super(JobTemplateSerializer, self).get_related(obj)
|
||||
@@ -2521,6 +2568,7 @@ class JobTemplateSerializer(JobTemplateMixin, UnifiedJobTemplateSerializer, JobO
|
||||
labels = self.reverse('api:job_template_label_list', kwargs={'pk': obj.pk}),
|
||||
object_roles = self.reverse('api:job_template_object_roles_list', kwargs={'pk': obj.pk}),
|
||||
instance_groups = self.reverse('api:job_template_instance_groups_list', kwargs={'pk': obj.pk}),
|
||||
copy = self.reverse('api:job_template_copy', kwargs={'pk': obj.pk}),
|
||||
))
|
||||
if obj.host_config_key:
|
||||
res['callback'] = self.reverse('api:job_template_callback', kwargs={'pk': obj.pk})
|
||||
@@ -2608,15 +2656,23 @@ class JobSerializer(UnifiedJobSerializer, JobOptionsSerializer):
|
||||
notifications = self.reverse('api:job_notifications_list', kwargs={'pk': obj.pk}),
|
||||
labels = self.reverse('api:job_label_list', kwargs={'pk': obj.pk}),
|
||||
))
|
||||
if obj.job_template:
|
||||
res['job_template'] = self.reverse('api:job_template_detail',
|
||||
kwargs={'pk': obj.job_template.pk})
|
||||
try:
|
||||
if obj.job_template:
|
||||
res['job_template'] = self.reverse('api:job_template_detail',
|
||||
kwargs={'pk': obj.job_template.pk})
|
||||
except ObjectDoesNotExist:
|
||||
setattr(obj, 'job_template', None)
|
||||
if (obj.can_start or True) and self.version == 1: # TODO: remove in 3.3
|
||||
res['start'] = self.reverse('api:job_start', kwargs={'pk': obj.pk})
|
||||
if obj.can_cancel or True:
|
||||
res['cancel'] = self.reverse('api:job_cancel', kwargs={'pk': obj.pk})
|
||||
if obj.project_update:
|
||||
res['project_update'] = self.reverse('api:project_update_detail', kwargs={'pk': obj.project_update.pk})
|
||||
try:
|
||||
if obj.project_update:
|
||||
res['project_update'] = self.reverse(
|
||||
'api:project_update_detail', kwargs={'pk': obj.project_update.pk}
|
||||
)
|
||||
except ObjectDoesNotExist:
|
||||
pass
|
||||
res['create_schedule'] = self.reverse('api:job_create_schedule', kwargs={'pk': obj.pk})
|
||||
res['relaunch'] = self.reverse('api:job_relaunch', kwargs={'pk': obj.pk})
|
||||
return res
|
||||
@@ -2756,8 +2812,6 @@ class JobRelaunchSerializer(BaseSerializer):
|
||||
|
||||
def validate(self, attrs):
|
||||
obj = self.context.get('obj')
|
||||
if not obj.credential:
|
||||
raise serializers.ValidationError(dict(credential=[_("Credential not found or deleted.")]))
|
||||
if obj.project is None:
|
||||
raise serializers.ValidationError(dict(errors=[_("Job Template Project is missing or undefined.")]))
|
||||
if obj.inventory is None or obj.inventory.pending_deletion:
|
||||
@@ -2914,9 +2968,11 @@ class SystemJobTemplateSerializer(UnifiedJobTemplateSerializer):
|
||||
|
||||
class SystemJobSerializer(UnifiedJobSerializer):
|
||||
|
||||
result_stdout = serializers.SerializerMethodField()
|
||||
|
||||
class Meta:
|
||||
model = SystemJob
|
||||
fields = ('*', 'system_job_template', 'job_type', 'extra_vars')
|
||||
fields = ('*', 'system_job_template', 'job_type', 'extra_vars', 'result_stdout')
|
||||
|
||||
def get_related(self, obj):
|
||||
res = super(SystemJobSerializer, self).get_related(obj)
|
||||
@@ -2926,8 +2982,19 @@ class SystemJobSerializer(UnifiedJobSerializer):
|
||||
res['notifications'] = self.reverse('api:system_job_notifications_list', kwargs={'pk': obj.pk})
|
||||
if obj.can_cancel or True:
|
||||
res['cancel'] = self.reverse('api:system_job_cancel', kwargs={'pk': obj.pk})
|
||||
res['events'] = self.reverse('api:system_job_events_list', kwargs={'pk': obj.pk})
|
||||
return res
|
||||
|
||||
def get_result_stdout(self, obj):
|
||||
try:
|
||||
return obj.result_stdout
|
||||
except StdoutMaxBytesExceeded as e:
|
||||
return _(
|
||||
"Standard Output too large to display ({text_size} bytes), "
|
||||
"only download supported for sizes over {supported_size} bytes").format(
|
||||
text_size=e.total, supported_size=e.supported
|
||||
)
|
||||
|
||||
|
||||
class SystemJobCancelSerializer(SystemJobSerializer):
|
||||
|
||||
@@ -3065,15 +3132,60 @@ class LaunchConfigurationBaseSerializer(BaseSerializer):
|
||||
ret['extra_data'] = obj.display_extra_data()
|
||||
return ret
|
||||
|
||||
def get_summary_fields(self, obj):
|
||||
summary_fields = super(LaunchConfigurationBaseSerializer, self).get_summary_fields(obj)
|
||||
# Credential would be an empty dictionary in this case
|
||||
summary_fields.pop('credential', None)
|
||||
return summary_fields
|
||||
|
||||
def validate(self, attrs):
|
||||
attrs = super(LaunchConfigurationBaseSerializer, self).validate(attrs)
|
||||
|
||||
# Build unsaved version of this config, use it to detect prompts errors
|
||||
ujt = None
|
||||
if 'unified_job_template' in attrs:
|
||||
ujt = attrs['unified_job_template']
|
||||
elif self.instance:
|
||||
ujt = self.instance.unified_job_template
|
||||
|
||||
# Replace $encrypted$ submissions with db value if exists
|
||||
# build additional field survey_passwords to track redacted variables
|
||||
if 'extra_data' in attrs:
|
||||
extra_data = parse_yaml_or_json(attrs.get('extra_data', {}))
|
||||
if hasattr(ujt, 'survey_password_variables'):
|
||||
# Prepare additional field survey_passwords for save
|
||||
password_dict = {}
|
||||
for key in ujt.survey_password_variables():
|
||||
if key in extra_data:
|
||||
password_dict[key] = REPLACE_STR
|
||||
if not self.instance or password_dict != self.instance.survey_passwords:
|
||||
attrs['survey_passwords'] = password_dict.copy()
|
||||
# Force dict type (cannot preserve YAML formatting if passwords are involved)
|
||||
if not isinstance(attrs['extra_data'], dict):
|
||||
attrs['extra_data'] = parse_yaml_or_json(attrs['extra_data'])
|
||||
# Encrypt the extra_data for save, only current password vars in JT survey
|
||||
encrypt_dict(attrs['extra_data'], password_dict.keys())
|
||||
# For any raw $encrypted$ string, either
|
||||
# - replace with existing DB value
|
||||
# - raise a validation error
|
||||
# - remove key from extra_data if survey default is present
|
||||
if self.instance:
|
||||
db_extra_data = parse_yaml_or_json(self.instance.extra_data)
|
||||
else:
|
||||
db_extra_data = {}
|
||||
for key in password_dict.keys():
|
||||
if attrs['extra_data'].get(key, None) == REPLACE_STR:
|
||||
if key not in db_extra_data:
|
||||
element = ujt.pivot_spec(ujt.survey_spec)[key]
|
||||
if 'default' in element and element['default']:
|
||||
attrs['survey_passwords'].pop(key, None)
|
||||
attrs['extra_data'].pop(key, None)
|
||||
else:
|
||||
raise serializers.ValidationError(
|
||||
{"extra_data": _('Provided variable {} has no database value to replace with.').format(key)})
|
||||
else:
|
||||
attrs['extra_data'][key] = db_extra_data[key]
|
||||
|
||||
# Build unsaved version of this config, use it to detect prompts errors
|
||||
mock_obj = self._build_mock_obj(attrs)
|
||||
accepted, rejected, errors = ujt._accept_or_ignore_job_kwargs(
|
||||
_exclude_errors=self.exclude_errors, **mock_obj.prompts_dict())
|
||||
@@ -3085,19 +3197,9 @@ class LaunchConfigurationBaseSerializer(BaseSerializer):
|
||||
raise serializers.ValidationError(errors)
|
||||
|
||||
# Model `.save` needs the container dict, not the psuedo fields
|
||||
attrs['char_prompts'] = mock_obj.char_prompts
|
||||
if mock_obj.char_prompts:
|
||||
attrs['char_prompts'] = mock_obj.char_prompts
|
||||
|
||||
# Insert survey_passwords to track redacted variables
|
||||
# TODO: perform encryption on save
|
||||
if 'extra_data' in attrs:
|
||||
extra_data = parse_yaml_or_json(attrs.get('extra_data', {}))
|
||||
if hasattr(ujt, 'survey_password_variables'):
|
||||
password_dict = {}
|
||||
for key in ujt.survey_password_variables():
|
||||
if key in extra_data:
|
||||
password_dict[key] = REPLACE_STR
|
||||
if not self.instance or password_dict != self.instance.survey_passwords:
|
||||
attrs['survey_passwords'] = password_dict
|
||||
return attrs
|
||||
|
||||
|
||||
@@ -3108,7 +3210,7 @@ class WorkflowJobTemplateNodeSerializer(LaunchConfigurationBaseSerializer):
|
||||
success_nodes = serializers.PrimaryKeyRelatedField(many=True, read_only=True)
|
||||
failure_nodes = serializers.PrimaryKeyRelatedField(many=True, read_only=True)
|
||||
always_nodes = serializers.PrimaryKeyRelatedField(many=True, read_only=True)
|
||||
exclude_errors = ('required') # required variables may be provided by WFJT or on launch
|
||||
exclude_errors = ('required',) # required variables may be provided by WFJT or on launch
|
||||
|
||||
class Meta:
|
||||
model = WorkflowJobTemplateNode
|
||||
@@ -3162,6 +3264,9 @@ class WorkflowJobTemplateNodeSerializer(LaunchConfigurationBaseSerializer):
|
||||
cred = deprecated_fields['credential']
|
||||
attrs['credential'] = cred
|
||||
if cred is not None:
|
||||
if not ujt_obj.ask_credential_on_launch:
|
||||
raise serializers.ValidationError({"credential": _(
|
||||
"Related template is not configured to accept credentials on launch.")})
|
||||
cred = Credential.objects.get(pk=cred)
|
||||
view = self.context.get('view', None)
|
||||
if (not view) or (not view.request) or (view.request.user not in cred.use_role):
|
||||
@@ -3360,6 +3465,41 @@ class JobEventWebSocketSerializer(JobEventSerializer):
|
||||
return 'job_events'
|
||||
|
||||
|
||||
class ProjectUpdateEventSerializer(JobEventSerializer):
|
||||
|
||||
class Meta:
|
||||
model = ProjectUpdateEvent
|
||||
fields = ('*', '-name', '-description', '-job', '-job_id',
|
||||
'-parent_uuid', '-parent', '-host', 'project_update')
|
||||
|
||||
def get_related(self, obj):
|
||||
res = super(JobEventSerializer, self).get_related(obj)
|
||||
res['project_update'] = self.reverse(
|
||||
'api:project_update_detail', kwargs={'pk': obj.project_update_id}
|
||||
)
|
||||
return res
|
||||
|
||||
|
||||
class ProjectUpdateEventWebSocketSerializer(ProjectUpdateEventSerializer):
|
||||
created = serializers.SerializerMethodField()
|
||||
modified = serializers.SerializerMethodField()
|
||||
event_name = serializers.CharField(source='event')
|
||||
group_name = serializers.SerializerMethodField()
|
||||
|
||||
class Meta:
|
||||
model = ProjectUpdateEvent
|
||||
fields = ('*', 'event_name', 'group_name',)
|
||||
|
||||
def get_created(self, obj):
|
||||
return obj.created.isoformat()
|
||||
|
||||
def get_modified(self, obj):
|
||||
return obj.modified.isoformat()
|
||||
|
||||
def get_group_name(self, obj):
|
||||
return 'project_update_events'
|
||||
|
||||
|
||||
class AdHocCommandEventSerializer(BaseSerializer):
|
||||
|
||||
event_display = serializers.CharField(source='get_event_display', read_only=True)
|
||||
@@ -3419,6 +3559,76 @@ class AdHocCommandEventWebSocketSerializer(AdHocCommandEventSerializer):
|
||||
return 'ad_hoc_command_events'
|
||||
|
||||
|
||||
class InventoryUpdateEventSerializer(AdHocCommandEventSerializer):
|
||||
|
||||
class Meta:
|
||||
model = InventoryUpdateEvent
|
||||
fields = ('*', '-name', '-description', '-ad_hoc_command', '-host',
|
||||
'-host_name', 'inventory_update')
|
||||
|
||||
def get_related(self, obj):
|
||||
res = super(AdHocCommandEventSerializer, self).get_related(obj)
|
||||
res['inventory_update'] = self.reverse(
|
||||
'api:inventory_update_detail', kwargs={'pk': obj.inventory_update_id}
|
||||
)
|
||||
return res
|
||||
|
||||
|
||||
class InventoryUpdateEventWebSocketSerializer(InventoryUpdateEventSerializer):
|
||||
created = serializers.SerializerMethodField()
|
||||
modified = serializers.SerializerMethodField()
|
||||
event_name = serializers.CharField(source='event')
|
||||
group_name = serializers.SerializerMethodField()
|
||||
|
||||
class Meta:
|
||||
model = InventoryUpdateEvent
|
||||
fields = ('*', 'event_name', 'group_name',)
|
||||
|
||||
def get_created(self, obj):
|
||||
return obj.created.isoformat()
|
||||
|
||||
def get_modified(self, obj):
|
||||
return obj.modified.isoformat()
|
||||
|
||||
def get_group_name(self, obj):
|
||||
return 'inventory_update_events'
|
||||
|
||||
|
||||
class SystemJobEventSerializer(AdHocCommandEventSerializer):
|
||||
|
||||
class Meta:
|
||||
model = SystemJobEvent
|
||||
fields = ('*', '-name', '-description', '-ad_hoc_command', '-host',
|
||||
'-host_name', 'system_job')
|
||||
|
||||
def get_related(self, obj):
|
||||
res = super(AdHocCommandEventSerializer, self).get_related(obj)
|
||||
res['system_job'] = self.reverse(
|
||||
'api:system_job_detail', kwargs={'pk': obj.system_job_id}
|
||||
)
|
||||
return res
|
||||
|
||||
|
||||
class SystemJobEventWebSocketSerializer(SystemJobEventSerializer):
|
||||
created = serializers.SerializerMethodField()
|
||||
modified = serializers.SerializerMethodField()
|
||||
event_name = serializers.CharField(source='event')
|
||||
group_name = serializers.SerializerMethodField()
|
||||
|
||||
class Meta:
|
||||
model = SystemJobEvent
|
||||
fields = ('*', 'event_name', 'group_name',)
|
||||
|
||||
def get_created(self, obj):
|
||||
return obj.created.isoformat()
|
||||
|
||||
def get_modified(self, obj):
|
||||
return obj.modified.isoformat()
|
||||
|
||||
def get_group_name(self, obj):
|
||||
return 'system_job_events'
|
||||
|
||||
|
||||
class JobLaunchSerializer(BaseSerializer):
|
||||
|
||||
# Representational fields
|
||||
@@ -3483,15 +3693,16 @@ class JobLaunchSerializer(BaseSerializer):
|
||||
id=getattrd(obj, '%s.pk' % field_name, None))
|
||||
elif field_name == 'credentials':
|
||||
if self.version > 1:
|
||||
defaults_dict[field_name] = [
|
||||
dict(
|
||||
for cred in obj.credentials.all():
|
||||
cred_dict = dict(
|
||||
id=cred.id,
|
||||
name=cred.name,
|
||||
credential_type=cred.credential_type.pk,
|
||||
passwords_needed=cred.passwords_needed
|
||||
)
|
||||
for cred in obj.credentials.all()
|
||||
]
|
||||
if cred.credential_type.managed_by_tower and 'vault_id' in cred.credential_type.defined_fields:
|
||||
cred_dict['vault_id'] = cred.inputs.get('vault_id') or None
|
||||
defaults_dict.setdefault(field_name, []).append(cred_dict)
|
||||
else:
|
||||
defaults_dict[field_name] = getattr(obj, field_name)
|
||||
return defaults_dict
|
||||
@@ -3506,7 +3717,7 @@ class JobLaunchSerializer(BaseSerializer):
|
||||
template = self.context.get('template')
|
||||
|
||||
accepted, rejected, errors = template._accept_or_ignore_job_kwargs(
|
||||
_exclude_errors=['prompts', 'required'], # make several error types non-blocking
|
||||
_exclude_errors=['prompts'], # make several error types non-blocking
|
||||
**attrs)
|
||||
self._ignored_fields = rejected
|
||||
|
||||
@@ -3520,15 +3731,30 @@ class JobLaunchSerializer(BaseSerializer):
|
||||
distinct_cred_kinds = []
|
||||
for cred in accepted.get('credentials', []):
|
||||
if cred.unique_hash() in distinct_cred_kinds:
|
||||
errors['credentials'] = _('Cannot assign multiple %s credentials.' % cred.credential_type.name)
|
||||
errors.setdefault('credentials', []).append(_(
|
||||
'Cannot assign multiple {} credentials.'
|
||||
).format(cred.unique_hash(display=True)))
|
||||
distinct_cred_kinds.append(cred.unique_hash())
|
||||
|
||||
# Prohibit removing credentials from the JT list (unsupported for now)
|
||||
template_credentials = template.credentials.all()
|
||||
if 'credentials' in attrs:
|
||||
removed_creds = set(template_credentials) - set(attrs['credentials'])
|
||||
provided_mapping = Credential.unique_dict(attrs['credentials'])
|
||||
for cred in removed_creds:
|
||||
if cred.unique_hash() in provided_mapping.keys():
|
||||
continue # User replaced credential with new of same type
|
||||
errors.setdefault('credentials', []).append(_(
|
||||
'Removing {} credential at launch time without replacement is not supported. '
|
||||
'Provided list lacked credential(s): {}.'
|
||||
).format(cred.unique_hash(display=True), ', '.join([str(c) for c in removed_creds])))
|
||||
|
||||
# verify that credentials (either provided or existing) don't
|
||||
# require launch-time passwords that have not been provided
|
||||
if 'credentials' in accepted:
|
||||
launch_credentials = accepted['credentials']
|
||||
else:
|
||||
launch_credentials = template.credentials.all()
|
||||
launch_credentials = template_credentials
|
||||
passwords = attrs.get('credential_passwords', {}) # get from original attrs
|
||||
passwords_lacking = []
|
||||
for cred in launch_credentials:
|
||||
@@ -3618,6 +3844,7 @@ class NotificationTemplateSerializer(BaseSerializer):
|
||||
res.update(dict(
|
||||
test = self.reverse('api:notification_template_test', kwargs={'pk': obj.pk}),
|
||||
notifications = self.reverse('api:notification_template_notification_list', kwargs={'pk': obj.pk}),
|
||||
copy = self.reverse('api:notification_template_copy', kwargs={'pk': obj.pk}),
|
||||
))
|
||||
if obj.organization:
|
||||
res['organization'] = self.reverse('api:organization_detail', kwargs={'pk': obj.organization.pk})
|
||||
@@ -3708,7 +3935,70 @@ class LabelSerializer(BaseSerializer):
|
||||
return res
|
||||
|
||||
|
||||
class ScheduleSerializer(LaunchConfigurationBaseSerializer):
|
||||
class SchedulePreviewSerializer(BaseSerializer):
|
||||
|
||||
class Meta:
|
||||
model = Schedule
|
||||
fields = ('rrule',)
|
||||
|
||||
# We reject rrules if:
|
||||
# - DTSTART is not include
|
||||
# - INTERVAL is not included
|
||||
# - SECONDLY is used
|
||||
# - TZID is used
|
||||
# - BYDAY prefixed with a number (MO is good but not 20MO)
|
||||
# - BYYEARDAY
|
||||
# - BYWEEKNO
|
||||
# - Multiple DTSTART or RRULE elements
|
||||
# - Can't contain both COUNT and UNTIL
|
||||
# - COUNT > 999
|
||||
def validate_rrule(self, value):
|
||||
rrule_value = value
|
||||
multi_by_month_day = ".*?BYMONTHDAY[\:\=][0-9]+,-*[0-9]+"
|
||||
multi_by_month = ".*?BYMONTH[\:\=][0-9]+,[0-9]+"
|
||||
by_day_with_numeric_prefix = ".*?BYDAY[\:\=][0-9]+[a-zA-Z]{2}"
|
||||
match_count = re.match(".*?(COUNT\=[0-9]+)", rrule_value)
|
||||
match_multiple_dtstart = re.findall(".*?(DTSTART(;[^:]+)?\:[0-9]+T[0-9]+Z?)", rrule_value)
|
||||
match_native_dtstart = re.findall(".*?(DTSTART:[0-9]+T[0-9]+) ", rrule_value)
|
||||
match_multiple_rrule = re.findall(".*?(RRULE\:)", rrule_value)
|
||||
if not len(match_multiple_dtstart):
|
||||
raise serializers.ValidationError(_('Valid DTSTART required in rrule. Value should start with: DTSTART:YYYYMMDDTHHMMSSZ'))
|
||||
if len(match_native_dtstart):
|
||||
raise serializers.ValidationError(_('DTSTART cannot be a naive datetime. Specify ;TZINFO= or YYYYMMDDTHHMMSSZZ.'))
|
||||
if len(match_multiple_dtstart) > 1:
|
||||
raise serializers.ValidationError(_('Multiple DTSTART is not supported.'))
|
||||
if not len(match_multiple_rrule):
|
||||
raise serializers.ValidationError(_('RRULE required in rrule.'))
|
||||
if len(match_multiple_rrule) > 1:
|
||||
raise serializers.ValidationError(_('Multiple RRULE is not supported.'))
|
||||
if 'interval' not in rrule_value.lower():
|
||||
raise serializers.ValidationError(_('INTERVAL required in rrule.'))
|
||||
if 'secondly' in rrule_value.lower():
|
||||
raise serializers.ValidationError(_('SECONDLY is not supported.'))
|
||||
if re.match(multi_by_month_day, rrule_value):
|
||||
raise serializers.ValidationError(_('Multiple BYMONTHDAYs not supported.'))
|
||||
if re.match(multi_by_month, rrule_value):
|
||||
raise serializers.ValidationError(_('Multiple BYMONTHs not supported.'))
|
||||
if re.match(by_day_with_numeric_prefix, rrule_value):
|
||||
raise serializers.ValidationError(_("BYDAY with numeric prefix not supported."))
|
||||
if 'byyearday' in rrule_value.lower():
|
||||
raise serializers.ValidationError(_("BYYEARDAY not supported."))
|
||||
if 'byweekno' in rrule_value.lower():
|
||||
raise serializers.ValidationError(_("BYWEEKNO not supported."))
|
||||
if 'COUNT' in rrule_value and 'UNTIL' in rrule_value:
|
||||
raise serializers.ValidationError(_("RRULE may not contain both COUNT and UNTIL"))
|
||||
if match_count:
|
||||
count_val = match_count.groups()[0].strip().split("=")
|
||||
if int(count_val[1]) > 999:
|
||||
raise serializers.ValidationError(_("COUNT > 999 is unsupported."))
|
||||
try:
|
||||
Schedule.rrulestr(rrule_value)
|
||||
except Exception as e:
|
||||
raise serializers.ValidationError(_("rrule parsing failed validation: {}").format(e))
|
||||
return value
|
||||
|
||||
|
||||
class ScheduleSerializer(LaunchConfigurationBaseSerializer, SchedulePreviewSerializer):
|
||||
show_capabilities = ['edit', 'delete']
|
||||
|
||||
class Meta:
|
||||
@@ -3722,6 +4012,15 @@ class ScheduleSerializer(LaunchConfigurationBaseSerializer):
|
||||
))
|
||||
if obj.unified_job_template:
|
||||
res['unified_job_template'] = obj.unified_job_template.get_absolute_url(self.context.get('request'))
|
||||
try:
|
||||
if obj.unified_job_template.project:
|
||||
res['project'] = obj.unified_job_template.project.get_absolute_url(self.context.get('request'))
|
||||
except ObjectDoesNotExist:
|
||||
pass
|
||||
if obj.inventory:
|
||||
res['inventory'] = obj.inventory.get_absolute_url(self.context.get('request'))
|
||||
elif obj.unified_job_template and getattr(obj.unified_job_template, 'inventory', None):
|
||||
res['inventory'] = obj.unified_job_template.inventory.get_absolute_url(self.context.get('request'))
|
||||
return res
|
||||
|
||||
def validate_unified_job_template(self, value):
|
||||
@@ -3731,60 +4030,8 @@ class ScheduleSerializer(LaunchConfigurationBaseSerializer):
|
||||
raise serializers.ValidationError(_('Manual Project cannot have a schedule set.'))
|
||||
elif type(value) == InventorySource and value.source == 'scm' and value.update_on_project_update:
|
||||
raise serializers.ValidationError(_(
|
||||
'Inventory sources with `update_on_project_update` cannot be scheduled. '
|
||||
'Schedule its source project `{}` instead.'.format(value.source_project.name)))
|
||||
return value
|
||||
|
||||
# We reject rrules if:
|
||||
# - DTSTART is not include
|
||||
# - INTERVAL is not included
|
||||
# - SECONDLY is used
|
||||
# - TZID is used
|
||||
# - BYDAY prefixed with a number (MO is good but not 20MO)
|
||||
# - BYYEARDAY
|
||||
# - BYWEEKNO
|
||||
# - Multiple DTSTART or RRULE elements
|
||||
# - COUNT > 999
|
||||
def validate_rrule(self, value):
|
||||
rrule_value = value
|
||||
multi_by_month_day = ".*?BYMONTHDAY[\:\=][0-9]+,-*[0-9]+"
|
||||
multi_by_month = ".*?BYMONTH[\:\=][0-9]+,[0-9]+"
|
||||
by_day_with_numeric_prefix = ".*?BYDAY[\:\=][0-9]+[a-zA-Z]{2}"
|
||||
match_count = re.match(".*?(COUNT\=[0-9]+)", rrule_value)
|
||||
match_multiple_dtstart = re.findall(".*?(DTSTART\:[0-9]+T[0-9]+Z)", rrule_value)
|
||||
match_multiple_rrule = re.findall(".*?(RRULE\:)", rrule_value)
|
||||
if not len(match_multiple_dtstart):
|
||||
raise serializers.ValidationError(_('DTSTART required in rrule. Value should match: DTSTART:YYYYMMDDTHHMMSSZ'))
|
||||
if len(match_multiple_dtstart) > 1:
|
||||
raise serializers.ValidationError(_('Multiple DTSTART is not supported.'))
|
||||
if not len(match_multiple_rrule):
|
||||
raise serializers.ValidationError(_('RRULE require in rrule.'))
|
||||
if len(match_multiple_rrule) > 1:
|
||||
raise serializers.ValidationError(_('Multiple RRULE is not supported.'))
|
||||
if 'interval' not in rrule_value.lower():
|
||||
raise serializers.ValidationError(_('INTERVAL required in rrule.'))
|
||||
if 'tzid' in rrule_value.lower():
|
||||
raise serializers.ValidationError(_('TZID is not supported.'))
|
||||
if 'secondly' in rrule_value.lower():
|
||||
raise serializers.ValidationError(_('SECONDLY is not supported.'))
|
||||
if re.match(multi_by_month_day, rrule_value):
|
||||
raise serializers.ValidationError(_('Multiple BYMONTHDAYs not supported.'))
|
||||
if re.match(multi_by_month, rrule_value):
|
||||
raise serializers.ValidationError(_('Multiple BYMONTHs not supported.'))
|
||||
if re.match(by_day_with_numeric_prefix, rrule_value):
|
||||
raise serializers.ValidationError(_("BYDAY with numeric prefix not supported."))
|
||||
if 'byyearday' in rrule_value.lower():
|
||||
raise serializers.ValidationError(_("BYYEARDAY not supported."))
|
||||
if 'byweekno' in rrule_value.lower():
|
||||
raise serializers.ValidationError(_("BYWEEKNO not supported."))
|
||||
if match_count:
|
||||
count_val = match_count.groups()[0].strip().split("=")
|
||||
if int(count_val[1]) > 999:
|
||||
raise serializers.ValidationError(_("COUNT > 999 is unsupported."))
|
||||
try:
|
||||
rrule.rrulestr(rrule_value)
|
||||
except Exception:
|
||||
raise serializers.ValidationError(_("rrule parsing failed validation."))
|
||||
six.text_type('Inventory sources with `update_on_project_update` cannot be scheduled. '
|
||||
'Schedule its source project `{}` instead.').format(value.source_project.name)))
|
||||
return value
|
||||
|
||||
|
||||
@@ -3796,8 +4043,10 @@ class InstanceSerializer(BaseSerializer):
|
||||
|
||||
class Meta:
|
||||
model = Instance
|
||||
fields = ("id", "type", "url", "related", "uuid", "hostname", "created", "modified",
|
||||
"version", "capacity", "consumed_capacity", "percent_capacity_remaining", "jobs_running")
|
||||
read_only_fields = ('uuid', 'hostname', 'version')
|
||||
fields = ("id", "type", "url", "related", "uuid", "hostname", "created", "modified", 'capacity_adjustment',
|
||||
"version", "capacity", "consumed_capacity", "percent_capacity_remaining", "jobs_running",
|
||||
"cpu", "memory", "cpu_capacity", "mem_capacity", "enabled")
|
||||
|
||||
def get_related(self, obj):
|
||||
res = super(InstanceSerializer, self).get_related(obj)
|
||||
@@ -3820,6 +4069,7 @@ class InstanceSerializer(BaseSerializer):
|
||||
|
||||
class InstanceGroupSerializer(BaseSerializer):
|
||||
|
||||
committed_capacity = serializers.SerializerMethodField()
|
||||
consumed_capacity = serializers.SerializerMethodField()
|
||||
percent_capacity_remaining = serializers.SerializerMethodField()
|
||||
jobs_running = serializers.SerializerMethodField()
|
||||
@@ -3827,8 +4077,10 @@ class InstanceGroupSerializer(BaseSerializer):
|
||||
|
||||
class Meta:
|
||||
model = InstanceGroup
|
||||
fields = ("id", "type", "url", "related", "name", "created", "modified", "capacity", "consumed_capacity",
|
||||
"percent_capacity_remaining", "jobs_running", "instances", "controller")
|
||||
fields = ("id", "type", "url", "related", "name", "created", "modified",
|
||||
"capacity", "committed_capacity", "consumed_capacity",
|
||||
"percent_capacity_remaining", "jobs_running", "instances", "controller",
|
||||
"policy_instance_percentage", "policy_instance_minimum", "policy_instance_list")
|
||||
|
||||
def get_related(self, obj):
|
||||
res = super(InstanceGroupSerializer, self).get_related(obj)
|
||||
@@ -3856,7 +4108,10 @@ class InstanceGroupSerializer(BaseSerializer):
|
||||
return self.context['capacity_map']
|
||||
|
||||
def get_consumed_capacity(self, obj):
|
||||
return self.get_capacity_dict()[obj.name]['consumed_capacity']
|
||||
return self.get_capacity_dict()[obj.name]['running_capacity']
|
||||
|
||||
def get_committed_capacity(self, obj):
|
||||
return self.get_capacity_dict()[obj.name]['committed_capacity']
|
||||
|
||||
def get_percent_capacity_remaining(self, obj):
|
||||
if not obj.capacity:
|
||||
@@ -3954,6 +4209,11 @@ class ActivityStreamSerializer(BaseSerializer):
|
||||
|
||||
if fk == 'schedule':
|
||||
rel['unified_job_template'] = thisItem.unified_job_template.get_absolute_url(self.context.get('request'))
|
||||
if obj.setting and obj.setting.get('category', None):
|
||||
rel['setting'] = self.reverse(
|
||||
'api:setting_singleton_detail',
|
||||
kwargs={'category_slug': obj.setting['category']}
|
||||
)
|
||||
return rel
|
||||
|
||||
def _get_rel(self, obj, fk):
|
||||
@@ -4005,6 +4265,8 @@ class ActivityStreamSerializer(BaseSerializer):
|
||||
username = obj.actor.username,
|
||||
first_name = obj.actor.first_name,
|
||||
last_name = obj.actor.last_name)
|
||||
if obj.setting:
|
||||
summary_fields['setting'] = [obj.setting]
|
||||
return summary_fields
|
||||
|
||||
|
||||
|
||||
103
awx/api/swagger.py
Normal file
103
awx/api/swagger.py
Normal file
@@ -0,0 +1,103 @@
|
||||
import json
|
||||
import warnings
|
||||
|
||||
from coreapi.document import Object, Link
|
||||
|
||||
from rest_framework import exceptions
|
||||
from rest_framework.permissions import AllowAny
|
||||
from rest_framework.renderers import CoreJSONRenderer
|
||||
from rest_framework.response import Response
|
||||
from rest_framework.schemas import SchemaGenerator, AutoSchema as DRFAuthSchema
|
||||
from rest_framework.views import APIView
|
||||
|
||||
from rest_framework_swagger import renderers
|
||||
|
||||
|
||||
class AutoSchema(DRFAuthSchema):
|
||||
|
||||
def get_link(self, path, method, base_url):
|
||||
link = super(AutoSchema, self).get_link(path, method, base_url)
|
||||
try:
|
||||
serializer = self.view.get_serializer()
|
||||
except Exception:
|
||||
serializer = None
|
||||
warnings.warn('{}.get_serializer() raised an exception during '
|
||||
'schema generation. Serializer fields will not be '
|
||||
'generated for {} {}.'
|
||||
.format(self.view.__class__.__name__, method, path))
|
||||
|
||||
link.__dict__['deprecated'] = getattr(self.view, 'deprecated', False)
|
||||
|
||||
# auto-generate a topic/tag for the serializer based on its model
|
||||
if hasattr(self.view, 'swagger_topic'):
|
||||
link.__dict__['topic'] = str(self.view.swagger_topic).title()
|
||||
elif serializer and hasattr(serializer, 'Meta'):
|
||||
link.__dict__['topic'] = str(
|
||||
serializer.Meta.model._meta.verbose_name_plural
|
||||
).title()
|
||||
elif hasattr(self.view, 'model'):
|
||||
link.__dict__['topic'] = str(self.view.model._meta.verbose_name_plural).title()
|
||||
else:
|
||||
warnings.warn('Could not determine a Swagger tag for path {}'.format(path))
|
||||
return link
|
||||
|
||||
def get_description(self, path, method):
|
||||
self.view._request = self.view.request
|
||||
setattr(self.view.request, 'swagger_method', method)
|
||||
description = super(AutoSchema, self).get_description(path, method)
|
||||
return description
|
||||
|
||||
|
||||
class SwaggerSchemaView(APIView):
|
||||
_ignore_model_permissions = True
|
||||
exclude_from_schema = True
|
||||
permission_classes = [AllowAny]
|
||||
renderer_classes = [
|
||||
CoreJSONRenderer,
|
||||
renderers.OpenAPIRenderer,
|
||||
renderers.SwaggerUIRenderer
|
||||
]
|
||||
|
||||
def get(self, request):
|
||||
generator = SchemaGenerator(
|
||||
title='Ansible Tower API',
|
||||
patterns=None,
|
||||
urlconf=None
|
||||
)
|
||||
schema = generator.get_schema(request=request)
|
||||
# python core-api doesn't support the deprecation yet, so track it
|
||||
# ourselves and return it in a response header
|
||||
_deprecated = []
|
||||
|
||||
# By default, DRF OpenAPI serialization places all endpoints in
|
||||
# a single node based on their root path (/api). Instead, we want to
|
||||
# group them by topic/tag so that they're categorized in the rendered
|
||||
# output
|
||||
document = schema._data.pop('api')
|
||||
for path, node in document.items():
|
||||
if isinstance(node, Object):
|
||||
for action in node.values():
|
||||
topic = getattr(action, 'topic', None)
|
||||
if topic:
|
||||
schema._data.setdefault(topic, Object())
|
||||
schema._data[topic]._data[path] = node
|
||||
|
||||
if isinstance(action, Object):
|
||||
for link in action.links.values():
|
||||
if link.deprecated:
|
||||
_deprecated.append(link.url)
|
||||
elif isinstance(node, Link):
|
||||
topic = getattr(node, 'topic', None)
|
||||
if topic:
|
||||
schema._data.setdefault(topic, Object())
|
||||
schema._data[topic]._data[path] = node
|
||||
|
||||
if not schema:
|
||||
raise exceptions.ValidationError(
|
||||
'The schema generator did not return a schema Document'
|
||||
)
|
||||
|
||||
return Response(
|
||||
schema,
|
||||
headers={'X-Deprecated-Paths': json.dumps(_deprecated)}
|
||||
)
|
||||
@@ -1,9 +1,9 @@
|
||||
The resulting data structure contains:
|
||||
|
||||
{
|
||||
"count": 99,
|
||||
"next": null,
|
||||
"previous": null,
|
||||
"count": 99,
|
||||
"next": null,
|
||||
"previous": null,
|
||||
"results": [
|
||||
...
|
||||
]
|
||||
@@ -60,6 +60,10 @@ _Added in AWX 1.4_
|
||||
|
||||
?related__search=findme
|
||||
|
||||
Note: If you want to provide more than one search terms, please use multiple
|
||||
search fields with the same key, like `?related__search=foo&related__search=bar`,
|
||||
All search terms with the same key will be ORed together.
|
||||
|
||||
## Filtering
|
||||
|
||||
Any additional query string parameters may be used to filter the list of
|
||||
@@ -70,7 +74,7 @@ in the specified value should be url-encoded. For example:
|
||||
?field=value%20xyz
|
||||
|
||||
Fields may also span relations, only for fields and relationships defined in
|
||||
the database:
|
||||
the database:
|
||||
|
||||
?other__field=value
|
||||
|
||||
|
||||
@@ -1,14 +0,0 @@
|
||||
{% if not version_label_flag or version_label_flag == 'true' %}
|
||||
{% if new_in_13 %}> _Added in AWX 1.3_{% endif %}
|
||||
{% if new_in_14 %}> _Added in AWX 1.4_{% endif %}
|
||||
{% if new_in_145 %}> _Added in Ansible Tower 1.4.5_{% endif %}
|
||||
{% if new_in_148 %}> _Added in Ansible Tower 1.4.8_{% endif %}
|
||||
{% if new_in_200 %}> _Added in Ansible Tower 2.0.0_{% endif %}
|
||||
{% if new_in_220 %}> _Added in Ansible Tower 2.2.0_{% endif %}
|
||||
{% if new_in_230 %}> _Added in Ansible Tower 2.3.0_{% endif %}
|
||||
{% if new_in_240 %}> _Added in Ansible Tower 2.4.0_{% endif %}
|
||||
{% if new_in_300 %}> _Added in Ansible Tower 3.0.0_{% endif %}
|
||||
{% if new_in_310 %}> _New in Ansible Tower 3.1.0_{% endif %}
|
||||
{% if new_in_320 %}> _New in Ansible Tower 3.2.0_{% endif %}
|
||||
{% if new_in_330 %}> _New in Ansible Tower 3.3.0_{% endif %}
|
||||
{% endif %}
|
||||
3
awx/api/templates/api/ad_hoc_command_relaunch.md
Normal file
3
awx/api/templates/api/ad_hoc_command_relaunch.md
Normal file
@@ -0,0 +1,3 @@
|
||||
Relaunch an Ad Hoc Command:
|
||||
|
||||
Make a POST request to this resource to launch a job. If any passwords or variables are required then they should be passed in via POST data. In order to determine what values are required in order to launch a job based on this job template you may make a GET request to this endpoint.
|
||||
@@ -1,4 +1,5 @@
|
||||
Site configuration settings and general information.
|
||||
{% ifmeth GET %}
|
||||
# Site configuration settings and general information
|
||||
|
||||
Make a GET request to this resource to retrieve the configuration containing
|
||||
the following fields (some fields may not be visible to all users):
|
||||
@@ -11,6 +12,10 @@ the following fields (some fields may not be visible to all users):
|
||||
* `license_info`: Information about the current license.
|
||||
* `version`: Version of Ansible Tower package installed.
|
||||
* `eula`: The current End-User License Agreement
|
||||
{% endifmeth %}
|
||||
|
||||
{% ifmeth POST %}
|
||||
# Install or update an existing license
|
||||
|
||||
(_New in Ansible Tower 2.0.0_) Make a POST request to this resource as a super
|
||||
user to install or update the existing license. The license data itself can
|
||||
@@ -18,3 +23,11 @@ be POSTed as a normal json data structure.
|
||||
|
||||
(_New in Ansible Tower 2.1.1_) The POST must include a `eula_accepted` boolean
|
||||
element indicating acceptance of the End-User License Agreement.
|
||||
{% endifmeth %}
|
||||
|
||||
{% ifmeth DELETE %}
|
||||
# Delete an existing license
|
||||
|
||||
(_New in Ansible Tower 2.0.0_) Make a DELETE request to this resource as a super
|
||||
user to delete the existing license
|
||||
{% endifmeth %}
|
||||
|
||||
@@ -1,3 +1 @@
|
||||
{{ docstring }}
|
||||
|
||||
{% include "api/_new_in_awx.md" %}
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
{% ifmeth POST %}
|
||||
# Generate an Auth Token
|
||||
Make a POST request to this resource with `username` and `password` fields to
|
||||
obtain an authentication token to use for subsequent requests.
|
||||
|
||||
@@ -32,6 +34,10 @@ agent that originally obtained it.
|
||||
Each request that uses the token for authentication will refresh its expiration
|
||||
timestamp and keep it from expiring. A token only expires when it is not used
|
||||
for the configured timeout interval (default 1800 seconds).
|
||||
{% endifmeth %}
|
||||
|
||||
A DELETE request with the token set will cause the token to be invalidated and
|
||||
no further requests can be made with it.
|
||||
{% ifmeth DELETE %}
|
||||
# Delete an Auth Token
|
||||
A DELETE request with the token header set will cause the token to be
|
||||
invalidated and no further requests can be made with it.
|
||||
{% endifmeth %}
|
||||
|
||||
@@ -1,9 +1,13 @@
|
||||
{% ifmeth GET %}
|
||||
# Retrieve {{ model_verbose_name|title }} Variable Data:
|
||||
|
||||
Make a GET request to this resource to retrieve all variables defined for this
|
||||
Make a GET request to this resource to retrieve all variables defined for a
|
||||
{{ model_verbose_name }}.
|
||||
{% endifmeth %}
|
||||
|
||||
{% ifmeth PUT PATCH %}
|
||||
# Update {{ model_verbose_name|title }} Variable Data:
|
||||
|
||||
Make a PUT request to this resource to update variables defined for this
|
||||
Make a PUT or PATCH request to this resource to update variables defined for a
|
||||
{{ model_verbose_name }}.
|
||||
{% endifmeth %}
|
||||
|
||||
@@ -38,5 +38,3 @@ Data about failed and successfull hosts by inventory will be given as:
|
||||
"id": 2,
|
||||
"name": "Test Inventory"
|
||||
},
|
||||
|
||||
{% include "api/_new_in_awx.md" %}
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
# View Statistics for Job Runs
|
||||
|
||||
Make a GET request to this resource to retrieve aggregate statistics about job runs suitable for graphing.
|
||||
|
||||
## Parmeters and Filtering
|
||||
@@ -33,5 +35,3 @@ Data will be returned in the following format:
|
||||
|
||||
Each element contains an epoch timestamp represented in seconds and a numerical value indicating
|
||||
the number of events during that time period
|
||||
|
||||
{% include "api/_new_in_awx.md" %}
|
||||
|
||||
@@ -1,3 +1 @@
|
||||
Make a GET request to this resource to retrieve aggregate statistics for Tower.
|
||||
|
||||
{% include "api/_new_in_awx.md" %}
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# List All {{ model_verbose_name_plural|title }} for this {{ parent_model_verbose_name|title }}:
|
||||
# List All {{ model_verbose_name_plural|title }} for {{ parent_model_verbose_name|title|anora }}:
|
||||
|
||||
Make a GET request to this resource to retrieve a list of all
|
||||
{{ model_verbose_name_plural }} directly or indirectly belonging to this
|
||||
|
||||
@@ -1,9 +1,7 @@
|
||||
# List Potential Child Groups for this {{ parent_model_verbose_name|title }}:
|
||||
# List Potential Child Groups for {{ parent_model_verbose_name|title|anora }}:
|
||||
|
||||
Make a GET request to this resource to retrieve a list of
|
||||
{{ model_verbose_name_plural }} available to be added as children of the
|
||||
current {{ parent_model_verbose_name }}.
|
||||
|
||||
{% include "api/_list_common.md" %}
|
||||
|
||||
{% include "api/_new_in_awx.md" %}
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# List All {{ model_verbose_name_plural|title }} for this {{ parent_model_verbose_name|title }}:
|
||||
# List All {{ model_verbose_name_plural|title }} for {{ parent_model_verbose_name|title|anora }}:
|
||||
|
||||
Make a GET request to this resource to retrieve a list of all
|
||||
{{ model_verbose_name_plural }} of which the selected
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
# List Fact Scans for a Host Specific Host Scan
|
||||
|
||||
Make a GET request to this resource to retrieve system tracking data for a particular scan
|
||||
|
||||
You may filter by datetime:
|
||||
@@ -7,5 +9,3 @@ You may filter by datetime:
|
||||
and module
|
||||
|
||||
`?datetime=2015-06-01&module=ansible`
|
||||
|
||||
{% include "api/_new_in_awx.md" %}
|
||||
@@ -1,3 +1,5 @@
|
||||
# List Fact Scans for a Host by Module and Date
|
||||
|
||||
Make a GET request to this resource to retrieve system tracking scans by module and date/time
|
||||
|
||||
You may filter scan runs using the `from` and `to` properties:
|
||||
@@ -7,5 +9,3 @@ You may filter scan runs using the `from` and `to` properties:
|
||||
You may also filter by module
|
||||
|
||||
`?module=packages`
|
||||
|
||||
{% include "api/_new_in_awx.md" %}
|
||||
1
awx/api/templates/api/host_insights.md
Normal file
1
awx/api/templates/api/host_insights.md
Normal file
@@ -0,0 +1 @@
|
||||
# List Red Hat Insights for a Host
|
||||
@@ -29,5 +29,3 @@ Response code from this action will be:
|
||||
- 202 if some inventory source updates were successful, but some failed
|
||||
- 400 if all of the inventory source updates failed
|
||||
- 400 if there are no inventory sources in the inventory
|
||||
|
||||
{% include "api/_new_in_awx.md" %}
|
||||
|
||||
@@ -1,7 +1,9 @@
|
||||
# List Root {{ model_verbose_name_plural|title }} for this {{ parent_model_verbose_name|title }}:
|
||||
{% ifmeth GET %}
|
||||
# List Root {{ model_verbose_name_plural|title }} for {{ parent_model_verbose_name|title|anora }}:
|
||||
|
||||
Make a GET request to this resource to retrieve a list of root (top-level)
|
||||
{{ model_verbose_name_plural }} associated with this
|
||||
{{ parent_model_verbose_name }}.
|
||||
|
||||
{% include "api/_list_common.md" %}
|
||||
{% endifmeth %}
|
||||
|
||||
@@ -9,5 +9,3 @@ cancelled. The response will include the following field:
|
||||
Make a POST request to this resource to cancel a pending or running inventory
|
||||
update. The response status code will be 202 if successful, or 405 if the
|
||||
update cannot be canceled.
|
||||
|
||||
{% include "api/_new_in_awx.md" %}
|
||||
|
||||
@@ -9,5 +9,3 @@ from its inventory source. The response will include the following field:
|
||||
Make a POST request to this resource to update the inventory source. If
|
||||
successful, the response status code will be 202. If the inventory source is
|
||||
not defined or cannot be updated, a 405 status code will be returned.
|
||||
|
||||
{% include "api/_new_in_awx.md" %}
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Group Tree for this {{ model_verbose_name|title }}:
|
||||
# Group Tree for {{ model_verbose_name|title|anora }}:
|
||||
|
||||
Make a GET request to this resource to retrieve a hierarchical view of groups
|
||||
associated with the selected {{ model_verbose_name }}.
|
||||
@@ -11,5 +11,3 @@ also containing a list of its children.
|
||||
Each group data structure includes the following fields:
|
||||
|
||||
{% include "api/_result_fields_common.md" %}
|
||||
|
||||
{% include "api/_new_in_awx.md" %}
|
||||
|
||||
@@ -1,10 +1,15 @@
|
||||
# Cancel Job
|
||||
{% ifmeth GET %}
|
||||
# Determine if a Job can be cancelled
|
||||
|
||||
Make a GET request to this resource to determine if the job can be cancelled.
|
||||
The response will include the following field:
|
||||
|
||||
* `can_cancel`: Indicates whether this job can be canceled (boolean, read-only)
|
||||
{% endifmeth %}
|
||||
|
||||
{% ifmeth POST %}
|
||||
# Cancel a Job
|
||||
Make a POST request to this resource to cancel a pending or running job. The
|
||||
response status code will be 202 if successful, or 405 if the job cannot be
|
||||
canceled.
|
||||
{% endifmeth %}
|
||||
|
||||
@@ -23,5 +23,3 @@ Will show only failed plays. Alternatively `false` may be used.
|
||||
?play__icontains=test
|
||||
|
||||
Will filter plays matching the substring `test`
|
||||
|
||||
{% include "api/_new_in_awx.md" %}
|
||||
|
||||
@@ -25,5 +25,3 @@ Will show only failed plays. Alternatively `false` may be used.
|
||||
?task__icontains=test
|
||||
|
||||
Will filter tasks matching the substring `test`
|
||||
|
||||
{% include "api/_new_in_awx.md" %}
|
||||
|
||||
@@ -1,3 +1,3 @@
|
||||
Relaunch a job:
|
||||
Relaunch a Job:
|
||||
|
||||
Make a POST request to this resource to launch a job. If any passwords or variables are required then they should be passed in via POST data. In order to determine what values are required in order to launch a job based on this job template you may make a GET request to this endpoint.
|
||||
Make a POST request to this resource to launch a job. If any passwords or variables are required then they should be passed in via POST data. In order to determine what values are required in order to launch a job based on this job template you may make a GET request to this endpoint.
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
# Start Job
|
||||
{% ifmeth GET %}
|
||||
# Determine if a Job can be started
|
||||
|
||||
Make a GET request to this resource to determine if the job can be started and
|
||||
whether any passwords are required to start the job. The response will include
|
||||
@@ -7,10 +8,14 @@ the following fields:
|
||||
* `can_start`: Flag indicating if this job can be started (boolean, read-only)
|
||||
* `passwords_needed_to_start`: Password names required to start the job (array,
|
||||
read-only)
|
||||
{% endifmeth %}
|
||||
|
||||
{% ifmeth POST %}
|
||||
# Start a Job
|
||||
Make a POST request to this resource to start the job. If any passwords are
|
||||
required, they must be passed via POST data.
|
||||
|
||||
If successful, the response status code will be 202. If any required passwords
|
||||
are not provided, a 400 status code will be returned. If the job cannot be
|
||||
started, a 405 status code will be returned.
|
||||
{% endifmeth %}
|
||||
|
||||
@@ -1,13 +1,7 @@
|
||||
{% with 'false' as version_label_flag %}
|
||||
{% include "api/sub_list_create_api_view.md" %}
|
||||
{% endwith %}
|
||||
|
||||
Labels not associated with any other resources are deleted. A label can become disassociated with a resource as a result of 3 events.
|
||||
|
||||
1. A label is explicitly disassociated with a related job template
|
||||
2. A job is deleted with labels
|
||||
3. A cleanup job deletes a job with labels
|
||||
|
||||
{% with 'true' as version_label_flag %}
|
||||
{% include "api/_new_in_awx.md" %}
|
||||
{% endwith %}
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
{% ifmeth GET %}
|
||||
# List {{ model_verbose_name_plural|title }}:
|
||||
|
||||
Make a GET request to this resource to retrieve the list of
|
||||
{{ model_verbose_name_plural }}.
|
||||
|
||||
{% include "api/_list_common.md" %}
|
||||
|
||||
{% include "api/_new_in_awx.md" %}
|
||||
{% endifmeth %}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{% include "api/list_api_view.md" %}
|
||||
|
||||
# Create {{ model_verbose_name_plural|title }}:
|
||||
# Create {{ model_verbose_name|title|anora }}:
|
||||
|
||||
Make a POST request to this resource with the following {{ model_verbose_name }}
|
||||
fields to create a new {{ model_verbose_name }}:
|
||||
@@ -8,5 +8,3 @@ fields to create a new {{ model_verbose_name }}:
|
||||
{% with write_only=1 %}
|
||||
{% include "api/_result_fields_common.md" with serializer_fields=serializer_create_fields %}
|
||||
{% endwith %}
|
||||
|
||||
{% include "api/_new_in_awx.md" %}
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Retrieve {{ model_verbose_name|title }} Playbooks:
|
||||
|
||||
Make GET request to this resource to retrieve a list of playbooks available
|
||||
for this {{ model_verbose_name }}.
|
||||
for {{ model_verbose_name|anora }}.
|
||||
|
||||
@@ -9,5 +9,3 @@ cancelled. The response will include the following field:
|
||||
Make a POST request to this resource to cancel a pending or running project
|
||||
update. The response status code will be 202 if successful, or 405 if the
|
||||
update cannot be canceled.
|
||||
|
||||
{% include "api/_new_in_awx.md" %}
|
||||
|
||||
@@ -8,5 +8,3 @@ from its SCM source. The response will include the following field:
|
||||
|
||||
Make a POST request to this resource to update the project. If the project
|
||||
cannot be updated, a 405 status code will be returned.
|
||||
|
||||
{% include "api/_new_in_awx.md" %}
|
||||
|
||||
@@ -2,11 +2,9 @@
|
||||
### Note: starting from api v2, this resource object can be accessed via its named URL.
|
||||
{% endif %}
|
||||
|
||||
# Retrieve {{ model_verbose_name|title }}:
|
||||
# Retrieve {{ model_verbose_name|title|anora }}:
|
||||
|
||||
Make GET request to this resource to retrieve a single {{ model_verbose_name }}
|
||||
record containing the following fields:
|
||||
|
||||
{% include "api/_result_fields_common.md" %}
|
||||
|
||||
{% include "api/_new_in_awx.md" %}
|
||||
|
||||
@@ -2,15 +2,17 @@
|
||||
### Note: starting from api v2, this resource object can be accessed via its named URL.
|
||||
{% endif %}
|
||||
|
||||
# Retrieve {{ model_verbose_name|title }}:
|
||||
{% ifmeth GET %}
|
||||
# Retrieve {{ model_verbose_name|title|anora }}:
|
||||
|
||||
Make GET request to this resource to retrieve a single {{ model_verbose_name }}
|
||||
record containing the following fields:
|
||||
|
||||
{% include "api/_result_fields_common.md" %}
|
||||
{% endifmeth %}
|
||||
|
||||
# Delete {{ model_verbose_name|title }}:
|
||||
{% ifmeth DELETE %}
|
||||
# Delete {{ model_verbose_name|title|anora }}:
|
||||
|
||||
Make a DELETE request to this resource to delete this {{ model_verbose_name }}.
|
||||
|
||||
{% include "api/_new_in_awx.md" %}
|
||||
{% endifmeth %}
|
||||
|
||||
@@ -2,14 +2,17 @@
|
||||
### Note: starting from api v2, this resource object can be accessed via its named URL.
|
||||
{% endif %}
|
||||
|
||||
# Retrieve {{ model_verbose_name|title }}:
|
||||
{% ifmeth GET %}
|
||||
# Retrieve {{ model_verbose_name|title|anora }}:
|
||||
|
||||
Make GET request to this resource to retrieve a single {{ model_verbose_name }}
|
||||
record containing the following fields:
|
||||
|
||||
{% include "api/_result_fields_common.md" %}
|
||||
{% endifmeth %}
|
||||
|
||||
# Update {{ model_verbose_name|title }}:
|
||||
{% ifmeth PUT PATCH %}
|
||||
# Update {{ model_verbose_name|title|anora }}:
|
||||
|
||||
Make a PUT or PATCH request to this resource to update this
|
||||
{{ model_verbose_name }}. The following fields may be modified:
|
||||
@@ -17,9 +20,12 @@ Make a PUT or PATCH request to this resource to update this
|
||||
{% with write_only=1 %}
|
||||
{% include "api/_result_fields_common.md" with serializer_fields=serializer_update_fields %}
|
||||
{% endwith %}
|
||||
{% endifmeth %}
|
||||
|
||||
{% ifmeth PUT %}
|
||||
For a PUT request, include **all** fields in the request.
|
||||
{% endifmeth %}
|
||||
|
||||
{% ifmeth PATCH %}
|
||||
For a PATCH request, include only the fields that are being modified.
|
||||
|
||||
{% include "api/_new_in_awx.md" %}
|
||||
{% endifmeth %}
|
||||
|
||||
@@ -2,14 +2,17 @@
|
||||
### Note: starting from api v2, this resource object can be accessed via its named URL.
|
||||
{% endif %}
|
||||
|
||||
# Retrieve {{ model_verbose_name|title }}:
|
||||
{% ifmeth GET %}
|
||||
# Retrieve {{ model_verbose_name|title|anora }}:
|
||||
|
||||
Make GET request to this resource to retrieve a single {{ model_verbose_name }}
|
||||
record containing the following fields:
|
||||
|
||||
{% include "api/_result_fields_common.md" %}
|
||||
{% endifmeth %}
|
||||
|
||||
# Update {{ model_verbose_name|title }}:
|
||||
{% ifmeth PUT PATCH %}
|
||||
# Update {{ model_verbose_name|title|anora }}:
|
||||
|
||||
Make a PUT or PATCH request to this resource to update this
|
||||
{{ model_verbose_name }}. The following fields may be modified:
|
||||
@@ -17,13 +20,18 @@ Make a PUT or PATCH request to this resource to update this
|
||||
{% with write_only=1 %}
|
||||
{% include "api/_result_fields_common.md" with serializer_fields=serializer_update_fields %}
|
||||
{% endwith %}
|
||||
{% endifmeth %}
|
||||
|
||||
{% ifmeth PUT %}
|
||||
For a PUT request, include **all** fields in the request.
|
||||
{% endifmeth %}
|
||||
|
||||
{% ifmeth PATCH %}
|
||||
For a PATCH request, include only the fields that are being modified.
|
||||
{% endifmeth %}
|
||||
|
||||
# Delete {{ model_verbose_name|title }}:
|
||||
{% ifmeth DELETE %}
|
||||
# Delete {{ model_verbose_name|title|anora }}:
|
||||
|
||||
Make a DELETE request to this resource to delete this {{ model_verbose_name }}.
|
||||
|
||||
{% include "api/_new_in_awx.md" %}
|
||||
{% endifmeth %}
|
||||
|
||||
1
awx/api/templates/api/setting_logging_test.md
Normal file
1
awx/api/templates/api/setting_logging_test.md
Normal file
@@ -0,0 +1 @@
|
||||
# Test Logging Configuration
|
||||
@@ -1,9 +1,9 @@
|
||||
# List {{ model_verbose_name_plural|title }} for this {{ parent_model_verbose_name|title }}:
|
||||
{% ifmeth GET %}
|
||||
# List {{ model_verbose_name_plural|title }} for {{ parent_model_verbose_name|title|anora }}:
|
||||
|
||||
Make a GET request to this resource to retrieve a list of
|
||||
{{ model_verbose_name_plural }} associated with the selected
|
||||
{{ parent_model_verbose_name }}.
|
||||
|
||||
{% include "api/_list_common.md" %}
|
||||
|
||||
{% include "api/_new_in_awx.md" %}
|
||||
{% endifmeth %}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{% include "api/sub_list_api_view.md" %}
|
||||
|
||||
# Create {{ model_verbose_name_plural|title }} for this {{ parent_model_verbose_name|title }}:
|
||||
# Create {{ model_verbose_name|title|anora }} for {{ parent_model_verbose_name|title|anora }}:
|
||||
|
||||
Make a POST request to this resource with the following {{ model_verbose_name }}
|
||||
fields to create a new {{ model_verbose_name }} associated with this
|
||||
@@ -25,7 +25,7 @@ delete the associated {{ model_verbose_name }}.
|
||||
}
|
||||
|
||||
{% else %}
|
||||
# Add {{ model_verbose_name_plural|title }} for this {{ parent_model_verbose_name|title }}:
|
||||
# Add {{ model_verbose_name_plural|title }} for {{ parent_model_verbose_name|title|anora }}:
|
||||
|
||||
Make a POST request to this resource with only an `id` field to associate an
|
||||
existing {{ model_verbose_name }} with this {{ parent_model_verbose_name }}.
|
||||
@@ -37,5 +37,3 @@ remove the {{ model_verbose_name }} from this {{ parent_model_verbose_name }}
|
||||
{% if model_verbose_name != "label" %} without deleting the {{ model_verbose_name }}{% endif %}.
|
||||
{% endif %}
|
||||
{% endif %}
|
||||
|
||||
{% include "api/_new_in_awx.md" %}
|
||||
|
||||
6
awx/api/templates/api/sub_list_destroy_api_view.md
Normal file
6
awx/api/templates/api/sub_list_destroy_api_view.md
Normal file
@@ -0,0 +1,6 @@
|
||||
{% include "api/sub_list_create_api_view.md" %}
|
||||
|
||||
# Delete all {{ model_verbose_name_plural }} of this {{ parent_model_verbose_name|title }}:
|
||||
|
||||
Make a DELETE request to this resource to delete all {{ model_verbose_name_plural }} show in the list.
|
||||
The {{ parent_model_verbose_name|title }} will not be deleted by this request.
|
||||
@@ -1,12 +1,16 @@
|
||||
# List Roles for this Team:
|
||||
# List Roles for a Team:
|
||||
|
||||
{% ifmeth GET %}
|
||||
Make a GET request to this resource to retrieve a list of roles associated with the selected team.
|
||||
|
||||
{% include "api/_list_common.md" %}
|
||||
{% endifmeth %}
|
||||
|
||||
{% ifmeth POST %}
|
||||
# Associate Roles with this Team:
|
||||
|
||||
Make a POST request to this resource to add or remove a role from this team. The following fields may be modified:
|
||||
|
||||
* `id`: The Role ID to add to the team. (int, required)
|
||||
* `disassociate`: Provide if you want to remove the role. (any value, optional)
|
||||
{% endifmeth %}
|
||||
|
||||
@@ -25,5 +25,3 @@ dark background.
|
||||
Files over {{ settings.STDOUT_MAX_BYTES_DISPLAY|filesizeformat }} (configurable)
|
||||
will not display in the browser. Use the `txt_download` or `ansi_download`
|
||||
formats to download the file directly to view it.
|
||||
|
||||
{% include "api/_new_in_awx.md" %}
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
# Retrieve Information about the current User
|
||||
|
||||
Make a GET request to retrieve user information about the current user.
|
||||
|
||||
One result should be returned containing the following fields:
|
||||
|
||||
@@ -1,12 +1,16 @@
|
||||
# List Roles for this User:
|
||||
# List Roles for a User:
|
||||
|
||||
{% ifmeth GET %}
|
||||
Make a GET request to this resource to retrieve a list of roles associated with the selected user.
|
||||
|
||||
{% include "api/_list_common.md" %}
|
||||
{% endifmeth %}
|
||||
|
||||
{% ifmeth POST %}
|
||||
# Associate Roles with this User:
|
||||
|
||||
Make a POST request to this resource to add or remove a role from this user. The following fields may be modified:
|
||||
|
||||
* `id`: The Role ID to add to the user. (int, required)
|
||||
* `disassociate`: Provide if you want to remove the role. (any value, optional)
|
||||
{% endifmeth %}
|
||||
|
||||
@@ -11,6 +11,7 @@ from awx.api.views import (
|
||||
CredentialObjectRolesList,
|
||||
CredentialOwnerUsersList,
|
||||
CredentialOwnerTeamsList,
|
||||
CredentialCopy,
|
||||
)
|
||||
|
||||
|
||||
@@ -22,6 +23,7 @@ urls = [
|
||||
url(r'^(?P<pk>[0-9]+)/object_roles/$', CredentialObjectRolesList.as_view(), name='credential_object_roles_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/owner_users/$', CredentialOwnerUsersList.as_view(), name='credential_owner_users_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/owner_teams/$', CredentialOwnerTeamsList.as_view(), name='credential_owner_teams_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/copy/$', CredentialCopy.as_view(), name='credential_copy'),
|
||||
]
|
||||
|
||||
__all__ = ['urls']
|
||||
|
||||
@@ -20,6 +20,7 @@ from awx.api.views import (
|
||||
InventoryAccessList,
|
||||
InventoryObjectRolesList,
|
||||
InventoryInstanceGroupsList,
|
||||
InventoryCopy,
|
||||
)
|
||||
|
||||
|
||||
@@ -40,6 +41,7 @@ urls = [
|
||||
url(r'^(?P<pk>[0-9]+)/access_list/$', InventoryAccessList.as_view(), name='inventory_access_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/object_roles/$', InventoryObjectRolesList.as_view(), name='inventory_object_roles_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/instance_groups/$', InventoryInstanceGroupsList.as_view(), name='inventory_instance_groups_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/copy/$', InventoryCopy.as_view(), name='inventory_copy'),
|
||||
]
|
||||
|
||||
__all__ = ['urls']
|
||||
|
||||
@@ -7,6 +7,7 @@ from awx.api.views import (
|
||||
InventoryScriptList,
|
||||
InventoryScriptDetail,
|
||||
InventoryScriptObjectRolesList,
|
||||
InventoryScriptCopy,
|
||||
)
|
||||
|
||||
|
||||
@@ -14,6 +15,7 @@ urls = [
|
||||
url(r'^$', InventoryScriptList.as_view(), name='inventory_script_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/$', InventoryScriptDetail.as_view(), name='inventory_script_detail'),
|
||||
url(r'^(?P<pk>[0-9]+)/object_roles/$', InventoryScriptObjectRolesList.as_view(), name='inventory_script_object_roles_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/copy/$', InventoryScriptCopy.as_view(), name='inventory_script_copy'),
|
||||
]
|
||||
|
||||
__all__ = ['urls']
|
||||
|
||||
@@ -9,6 +9,7 @@ from awx.api.views import (
|
||||
InventoryUpdateCancel,
|
||||
InventoryUpdateStdout,
|
||||
InventoryUpdateNotificationsList,
|
||||
InventoryUpdateEventsList,
|
||||
)
|
||||
|
||||
|
||||
@@ -18,6 +19,7 @@ urls = [
|
||||
url(r'^(?P<pk>[0-9]+)/cancel/$', InventoryUpdateCancel.as_view(), name='inventory_update_cancel'),
|
||||
url(r'^(?P<pk>[0-9]+)/stdout/$', InventoryUpdateStdout.as_view(), name='inventory_update_stdout'),
|
||||
url(r'^(?P<pk>[0-9]+)/notifications/$', InventoryUpdateNotificationsList.as_view(), name='inventory_update_notifications_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/events/$', InventoryUpdateEventsList.as_view(), name='inventory_update_events_list'),
|
||||
]
|
||||
|
||||
__all__ = ['urls']
|
||||
|
||||
@@ -19,6 +19,7 @@ from awx.api.views import (
|
||||
JobTemplateAccessList,
|
||||
JobTemplateObjectRolesList,
|
||||
JobTemplateLabelList,
|
||||
JobTemplateCopy,
|
||||
)
|
||||
|
||||
|
||||
@@ -41,6 +42,7 @@ urls = [
|
||||
url(r'^(?P<pk>[0-9]+)/access_list/$', JobTemplateAccessList.as_view(), name='job_template_access_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/object_roles/$', JobTemplateObjectRolesList.as_view(), name='job_template_object_roles_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/labels/$', JobTemplateLabelList.as_view(), name='job_template_label_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/copy/$', JobTemplateCopy.as_view(), name='job_template_copy'),
|
||||
]
|
||||
|
||||
__all__ = ['urls']
|
||||
|
||||
@@ -8,6 +8,7 @@ from awx.api.views import (
|
||||
NotificationTemplateDetail,
|
||||
NotificationTemplateTest,
|
||||
NotificationTemplateNotificationList,
|
||||
NotificationTemplateCopy,
|
||||
)
|
||||
|
||||
|
||||
@@ -16,6 +17,7 @@ urls = [
|
||||
url(r'^(?P<pk>[0-9]+)/$', NotificationTemplateDetail.as_view(), name='notification_template_detail'),
|
||||
url(r'^(?P<pk>[0-9]+)/test/$', NotificationTemplateTest.as_view(), name='notification_template_test'),
|
||||
url(r'^(?P<pk>[0-9]+)/notifications/$', NotificationTemplateNotificationList.as_view(), name='notification_template_notification_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/copy/$', NotificationTemplateCopy.as_view(), name='notification_template_copy'),
|
||||
]
|
||||
|
||||
__all__ = ['urls']
|
||||
|
||||
@@ -19,10 +19,11 @@ from awx.api.views import (
|
||||
ProjectNotificationTemplatesSuccessList,
|
||||
ProjectObjectRolesList,
|
||||
ProjectAccessList,
|
||||
ProjectCopy,
|
||||
)
|
||||
|
||||
|
||||
urls = [
|
||||
urls = [
|
||||
url(r'^$', ProjectList.as_view(), name='project_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/$', ProjectDetail.as_view(), name='project_detail'),
|
||||
url(r'^(?P<pk>[0-9]+)/playbooks/$', ProjectPlaybooks.as_view(), name='project_playbooks'),
|
||||
@@ -39,6 +40,7 @@ urls = [
|
||||
name='project_notification_templates_success_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/object_roles/$', ProjectObjectRolesList.as_view(), name='project_object_roles_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/access_list/$', ProjectAccessList.as_view(), name='project_access_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/copy/$', ProjectCopy.as_view(), name='project_copy'),
|
||||
]
|
||||
|
||||
__all__ = ['urls']
|
||||
|
||||
@@ -10,6 +10,7 @@ from awx.api.views import (
|
||||
ProjectUpdateStdout,
|
||||
ProjectUpdateScmInventoryUpdates,
|
||||
ProjectUpdateNotificationsList,
|
||||
ProjectUpdateEventsList,
|
||||
)
|
||||
|
||||
|
||||
@@ -20,6 +21,7 @@ urls = [
|
||||
url(r'^(?P<pk>[0-9]+)/stdout/$', ProjectUpdateStdout.as_view(), name='project_update_stdout'),
|
||||
url(r'^(?P<pk>[0-9]+)/scm_inventory_updates/$', ProjectUpdateScmInventoryUpdates.as_view(), name='project_update_scm_inventory_updates'),
|
||||
url(r'^(?P<pk>[0-9]+)/notifications/$', ProjectUpdateNotificationsList.as_view(), name='project_update_notifications_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/events/$', ProjectUpdateEventsList.as_view(), name='project_update_events_list'),
|
||||
]
|
||||
|
||||
__all__ = ['urls']
|
||||
|
||||
@@ -8,6 +8,7 @@ from awx.api.views import (
|
||||
SystemJobDetail,
|
||||
SystemJobCancel,
|
||||
SystemJobNotificationsList,
|
||||
SystemJobEventsList
|
||||
)
|
||||
|
||||
|
||||
@@ -16,6 +17,7 @@ urls = [
|
||||
url(r'^(?P<pk>[0-9]+)/$', SystemJobDetail.as_view(), name='system_job_detail'),
|
||||
url(r'^(?P<pk>[0-9]+)/cancel/$', SystemJobCancel.as_view(), name='system_job_cancel'),
|
||||
url(r'^(?P<pk>[0-9]+)/notifications/$', SystemJobNotificationsList.as_view(), name='system_job_notifications_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/events/$', SystemJobEventsList.as_view(), name='system_job_events_list'),
|
||||
]
|
||||
|
||||
__all__ = ['urls']
|
||||
|
||||
@@ -2,6 +2,7 @@
|
||||
# All Rights Reserved.
|
||||
|
||||
from __future__ import absolute_import, unicode_literals
|
||||
from django.conf import settings
|
||||
from django.conf.urls import include, url
|
||||
|
||||
from awx.api.views import (
|
||||
@@ -22,6 +23,8 @@ from awx.api.views import (
|
||||
JobExtraCredentialsList,
|
||||
JobTemplateCredentialsList,
|
||||
JobTemplateExtraCredentialsList,
|
||||
SchedulePreview,
|
||||
ScheduleZoneInfo,
|
||||
)
|
||||
|
||||
from .organization import urls as organization_urls
|
||||
@@ -113,11 +116,18 @@ v2_urls = [
|
||||
url(r'^jobs/(?P<pk>[0-9]+)/credentials/$', JobCredentialsList.as_view(), name='job_credentials_list'),
|
||||
url(r'^job_templates/(?P<pk>[0-9]+)/extra_credentials/$', JobTemplateExtraCredentialsList.as_view(), name='job_template_extra_credentials_list'),
|
||||
url(r'^job_templates/(?P<pk>[0-9]+)/credentials/$', JobTemplateCredentialsList.as_view(), name='job_template_credentials_list'),
|
||||
url(r'^schedules/preview/$', SchedulePreview.as_view(), name='schedule_rrule'),
|
||||
url(r'^schedules/zoneinfo/$', ScheduleZoneInfo.as_view(), name='schedule_zoneinfo'),
|
||||
]
|
||||
|
||||
app_name = 'api'
|
||||
urlpatterns = [
|
||||
url(r'^$', ApiRootView.as_view(), name='api_root_view'),
|
||||
url(r'^(?P<version>(v2))/', include(v2_urls)),
|
||||
url(r'^(?P<version>(v1|v2))/', include(v1_urls))
|
||||
url(r'^(?P<version>(v1|v2))/', include(v1_urls)),
|
||||
]
|
||||
if settings.SETTINGS_MODULE == 'awx.settings.development':
|
||||
from awx.api.swagger import SwaggerSchemaView
|
||||
urlpatterns += [
|
||||
url(r'^swagger/$', SwaggerSchemaView.as_view(), name='swagger_view'),
|
||||
]
|
||||
|
||||
716
awx/api/views.py
716
awx/api/views.py
File diff suppressed because it is too large
Load Diff
@@ -1,6 +1,7 @@
|
||||
# Python
|
||||
import logging
|
||||
import urlparse
|
||||
from collections import OrderedDict
|
||||
|
||||
# Django
|
||||
from django.core.validators import URLValidator
|
||||
@@ -139,6 +140,8 @@ class KeyValueField(DictField):
|
||||
ret = super(KeyValueField, self).to_internal_value(data)
|
||||
for value in data.values():
|
||||
if not isinstance(value, six.string_types + six.integer_types + (float,)):
|
||||
if isinstance(value, OrderedDict):
|
||||
value = dict(value)
|
||||
self.fail('invalid_child', input=value)
|
||||
return ret
|
||||
|
||||
|
||||
@@ -120,6 +120,9 @@ class SettingsRegistry(object):
|
||||
def is_setting_read_only(self, setting):
|
||||
return bool(self._registry.get(setting, {}).get('read_only', False))
|
||||
|
||||
def get_setting_category(self, setting):
|
||||
return self._registry.get(setting, {}).get('category_slug', None)
|
||||
|
||||
def get_setting_field(self, setting, mixin_class=None, for_user=False, **kwargs):
|
||||
from rest_framework.fields import empty
|
||||
field_kwargs = {}
|
||||
|
||||
@@ -1,6 +1,8 @@
|
||||
# Django REST Framework
|
||||
from rest_framework import serializers
|
||||
|
||||
import six
|
||||
|
||||
# Tower
|
||||
from awx.api.fields import VerbatimField
|
||||
from awx.api.serializers import BaseSerializer
|
||||
@@ -45,12 +47,12 @@ class SettingFieldMixin(object):
|
||||
"""Mixin to use a registered setting field class for API display/validation."""
|
||||
|
||||
def to_representation(self, obj):
|
||||
if getattr(self, 'encrypted', False) and isinstance(obj, basestring) and obj:
|
||||
if getattr(self, 'encrypted', False) and isinstance(obj, six.string_types) and obj:
|
||||
return '$encrypted$'
|
||||
return obj
|
||||
|
||||
def to_internal_value(self, value):
|
||||
if getattr(self, 'encrypted', False) and isinstance(value, basestring) and value.startswith('$encrypted$'):
|
||||
if getattr(self, 'encrypted', False) and isinstance(value, six.string_types) and value.startswith('$encrypted$'):
|
||||
raise serializers.SkipField()
|
||||
obj = super(SettingFieldMixin, self).to_internal_value(value)
|
||||
return super(SettingFieldMixin, self).to_representation(obj)
|
||||
@@ -87,8 +89,10 @@ class SettingSingletonSerializer(serializers.Serializer):
|
||||
if self.instance and not hasattr(self.instance, key):
|
||||
continue
|
||||
extra_kwargs = {}
|
||||
# Make LICENSE read-only here; update via /api/v1/config/ only.
|
||||
if key == 'LICENSE':
|
||||
# Make LICENSE and AWX_ISOLATED_KEY_GENERATION read-only here;
|
||||
# LICENSE is only updated via /api/v1/config/
|
||||
# AWX_ISOLATED_KEY_GENERATION is only set/unset via the setup playbook
|
||||
if key in ('LICENSE', 'AWX_ISOLATED_KEY_GENERATION'):
|
||||
extra_kwargs['read_only'] = True
|
||||
field = settings_registry.get_setting_field(key, mixin_class=SettingFieldMixin, for_user=bool(category_slug == 'user'), **extra_kwargs)
|
||||
fields[key] = field
|
||||
|
||||
@@ -14,6 +14,7 @@ from django.conf import settings, UserSettingsHolder
|
||||
from django.core.cache import cache as django_cache
|
||||
from django.core.exceptions import ImproperlyConfigured
|
||||
from django.db import ProgrammingError, OperationalError
|
||||
from django.utils.functional import cached_property
|
||||
|
||||
# Django REST Framework
|
||||
from rest_framework.fields import empty, SkipField
|
||||
@@ -230,7 +231,8 @@ class SettingsWrapper(UserSettingsHolder):
|
||||
self.__dict__['cache'] = EncryptedCacheProxy(cache, registry)
|
||||
self.__dict__['registry'] = registry
|
||||
|
||||
def _get_supported_settings(self):
|
||||
@cached_property
|
||||
def all_supported_settings(self):
|
||||
return self.registry.get_registered_settings()
|
||||
|
||||
def _preload_cache(self):
|
||||
@@ -273,7 +275,7 @@ class SettingsWrapper(UserSettingsHolder):
|
||||
setting_ids[setting.key] = setting.id
|
||||
try:
|
||||
value = decrypt_field(setting, 'value')
|
||||
except ValueError, e:
|
||||
except ValueError as e:
|
||||
#TODO: Remove in Tower 3.3
|
||||
logger.debug('encountered error decrypting field: %s - attempting fallback to old', e)
|
||||
value = old_decrypt_field(setting, 'value')
|
||||
@@ -382,7 +384,7 @@ class SettingsWrapper(UserSettingsHolder):
|
||||
|
||||
def __getattr__(self, name):
|
||||
value = empty
|
||||
if name in self._get_supported_settings():
|
||||
if name in self.all_supported_settings:
|
||||
with _log_database_error():
|
||||
value = self._get_local(name)
|
||||
if value is not empty:
|
||||
@@ -414,7 +416,7 @@ class SettingsWrapper(UserSettingsHolder):
|
||||
# post_save handler will delete from cache when changed.
|
||||
|
||||
def __setattr__(self, name, value):
|
||||
if name in self._get_supported_settings():
|
||||
if name in self.all_supported_settings:
|
||||
with _log_database_error():
|
||||
self._set_local(name, value)
|
||||
else:
|
||||
@@ -430,7 +432,7 @@ class SettingsWrapper(UserSettingsHolder):
|
||||
# pre_delete handler will delete from cache.
|
||||
|
||||
def __delattr__(self, name):
|
||||
if name in self._get_supported_settings():
|
||||
if name in self.all_supported_settings:
|
||||
with _log_database_error():
|
||||
self._del_local(name)
|
||||
else:
|
||||
@@ -440,7 +442,7 @@ class SettingsWrapper(UserSettingsHolder):
|
||||
keys = []
|
||||
with _log_database_error():
|
||||
for setting in Setting.objects.filter(
|
||||
key__in=self._get_supported_settings(), user__isnull=True):
|
||||
key__in=self.all_supported_settings, user__isnull=True):
|
||||
# Skip returning settings that have been overridden but are
|
||||
# considered to be "not set".
|
||||
if setting.value is None and SETTING_CACHE_NOTSET == SETTING_CACHE_NONE:
|
||||
@@ -454,7 +456,7 @@ class SettingsWrapper(UserSettingsHolder):
|
||||
|
||||
def is_overridden(self, setting):
|
||||
set_locally = False
|
||||
if setting in self._get_supported_settings():
|
||||
if setting in self.all_supported_settings:
|
||||
with _log_database_error():
|
||||
set_locally = Setting.objects.filter(key=setting, user__isnull=True).exists()
|
||||
set_on_default = getattr(self.default_settings, 'is_overridden', lambda s: False)(setting)
|
||||
|
||||
@@ -6,14 +6,16 @@ import glob
|
||||
import os
|
||||
import shutil
|
||||
|
||||
# RedBaron
|
||||
from redbaron import RedBaron, indent
|
||||
import six
|
||||
|
||||
__all__ = ['comment_assignments']
|
||||
# AWX
|
||||
from awx.conf.registry import settings_registry
|
||||
|
||||
__all__ = ['comment_assignments', 'conf_to_dict']
|
||||
|
||||
|
||||
def comment_assignments(patterns, assignment_names, dry_run=True, backup_suffix='.old'):
|
||||
if isinstance(patterns, basestring):
|
||||
if isinstance(patterns, six.string_types):
|
||||
patterns = [patterns]
|
||||
diffs = []
|
||||
for pattern in patterns:
|
||||
@@ -30,7 +32,9 @@ def comment_assignments(patterns, assignment_names, dry_run=True, backup_suffix=
|
||||
|
||||
|
||||
def comment_assignments_in_file(filename, assignment_names, dry_run=True, backup_filename=None):
|
||||
if isinstance(assignment_names, basestring):
|
||||
from redbaron import RedBaron, indent
|
||||
|
||||
if isinstance(assignment_names, six.string_types):
|
||||
assignment_names = [assignment_names]
|
||||
else:
|
||||
assignment_names = assignment_names[:]
|
||||
@@ -103,6 +107,13 @@ def comment_assignments_in_file(filename, assignment_names, dry_run=True, backup
|
||||
return '\n'.join(diff_lines)
|
||||
|
||||
|
||||
def conf_to_dict(obj):
|
||||
return {
|
||||
'category': settings_registry.get_setting_category(obj.key),
|
||||
'name': obj.key,
|
||||
}
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
pattern = os.path.join(os.path.dirname(__file__), '..', 'settings', 'local_*.py')
|
||||
diffs = comment_assignments(pattern, ['AUTH_LDAP_ORGANIZATION_MAP'])
|
||||
|
||||
@@ -21,7 +21,7 @@ from awx.api.generics import * # noqa
|
||||
from awx.api.permissions import IsSuperUser
|
||||
from awx.api.versioning import reverse, get_request_version
|
||||
from awx.main.utils import * # noqa
|
||||
from awx.main.utils.handlers import BaseHTTPSHandler, LoggingConnectivityException
|
||||
from awx.main.utils.handlers import BaseHTTPSHandler, UDPHandler, LoggingConnectivityException
|
||||
from awx.main.tasks import handle_setting_changes
|
||||
from awx.conf.license import get_licensed_features
|
||||
from awx.conf.models import Setting
|
||||
@@ -44,7 +44,6 @@ class SettingCategoryList(ListAPIView):
|
||||
model = Setting # Not exactly, but needed for the view.
|
||||
serializer_class = SettingCategorySerializer
|
||||
filter_backends = []
|
||||
new_in_310 = True
|
||||
view_name = _('Setting Categories')
|
||||
|
||||
def get_queryset(self):
|
||||
@@ -69,7 +68,6 @@ class SettingSingletonDetail(RetrieveUpdateDestroyAPIView):
|
||||
model = Setting # Not exactly, but needed for the view.
|
||||
serializer_class = SettingSingletonSerializer
|
||||
filter_backends = []
|
||||
new_in_310 = True
|
||||
view_name = _('Setting Detail')
|
||||
|
||||
def get_queryset(self):
|
||||
@@ -170,7 +168,6 @@ class SettingLoggingTest(GenericAPIView):
|
||||
serializer_class = SettingSingletonSerializer
|
||||
permission_classes = (IsSuperUser,)
|
||||
filter_backends = []
|
||||
new_in_320 = True
|
||||
|
||||
def post(self, request, *args, **kwargs):
|
||||
defaults = dict()
|
||||
@@ -202,7 +199,11 @@ class SettingLoggingTest(GenericAPIView):
|
||||
for k, v in serializer.validated_data.items():
|
||||
setattr(mock_settings, k, v)
|
||||
mock_settings.LOG_AGGREGATOR_LEVEL = 'DEBUG'
|
||||
BaseHTTPSHandler.perform_test(mock_settings)
|
||||
if mock_settings.LOG_AGGREGATOR_PROTOCOL.upper() == 'UDP':
|
||||
UDPHandler.perform_test(mock_settings)
|
||||
return Response(status=status.HTTP_201_CREATED)
|
||||
else:
|
||||
BaseHTTPSHandler.perform_test(mock_settings)
|
||||
except LoggingConnectivityException as e:
|
||||
return Response({'error': str(e)}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
|
||||
return Response(status=status.HTTP_200_OK)
|
||||
|
||||
@@ -29,6 +29,8 @@ import threading
|
||||
import uuid
|
||||
import memcache
|
||||
|
||||
from six.moves import xrange
|
||||
|
||||
__all__ = ['event_context']
|
||||
|
||||
|
||||
@@ -123,6 +125,8 @@ class EventContext(object):
|
||||
event_data['job_id'] = int(os.getenv('JOB_ID', '0'))
|
||||
if os.getenv('AD_HOC_COMMAND_ID', ''):
|
||||
event_data['ad_hoc_command_id'] = int(os.getenv('AD_HOC_COMMAND_ID', '0'))
|
||||
if os.getenv('PROJECT_UPDATE_ID', ''):
|
||||
event_data['project_update_id'] = int(os.getenv('PROJECT_UPDATE_ID', '0'))
|
||||
event_data.setdefault('pid', os.getpid())
|
||||
event_data.setdefault('uuid', str(uuid.uuid4()))
|
||||
event_data.setdefault('created', datetime.datetime.utcnow().isoformat())
|
||||
@@ -145,7 +149,7 @@ class EventContext(object):
|
||||
event_data['res'] = {}
|
||||
event_dict = dict(event=event, event_data=event_data)
|
||||
for key in event_data.keys():
|
||||
if key in ('job_id', 'ad_hoc_command_id', 'uuid', 'parent_uuid', 'created',):
|
||||
if key in ('job_id', 'ad_hoc_command_id', 'project_update_id', 'uuid', 'parent_uuid', 'created',):
|
||||
event_dict[key] = event_data.pop(key)
|
||||
elif key in ('verbosity', 'pid'):
|
||||
event_dict[key] = event_data[key]
|
||||
|
||||
@@ -25,4 +25,5 @@ import ansible
|
||||
|
||||
# Because of the way Ansible loads plugins, it's not possible to import
|
||||
# ansible.plugins.callback.minimal when being loaded as the minimal plugin. Ugh.
|
||||
execfile(os.path.join(os.path.dirname(ansible.__file__), 'plugins', 'callback', 'minimal.py'))
|
||||
with open(os.path.join(os.path.dirname(ansible.__file__), 'plugins', 'callback', 'minimal.py')) as in_file:
|
||||
exec(in_file.read())
|
||||
|
||||
@@ -18,7 +18,11 @@
|
||||
from __future__ import (absolute_import, division, print_function)
|
||||
|
||||
# Python
|
||||
import codecs
|
||||
import contextlib
|
||||
import json
|
||||
import os
|
||||
import stat
|
||||
import sys
|
||||
import uuid
|
||||
from copy import copy
|
||||
@@ -292,10 +296,22 @@ class BaseCallbackModule(CallbackBase):
|
||||
failures=stats.failures,
|
||||
ok=stats.ok,
|
||||
processed=stats.processed,
|
||||
skipped=stats.skipped,
|
||||
artifact_data=stats.custom.get('_run', {}) if hasattr(stats, 'custom') else {}
|
||||
skipped=stats.skipped
|
||||
)
|
||||
|
||||
# write custom set_stat artifact data to the local disk so that it can
|
||||
# be persisted by awx after the process exits
|
||||
custom_artifact_data = stats.custom.get('_run', {}) if hasattr(stats, 'custom') else {}
|
||||
if custom_artifact_data:
|
||||
# create the directory for custom stats artifacts to live in (if it doesn't exist)
|
||||
custom_artifacts_dir = os.path.join(os.getenv('AWX_PRIVATE_DATA_DIR'), 'artifacts')
|
||||
os.makedirs(custom_artifacts_dir, mode=stat.S_IXUSR + stat.S_IWUSR + stat.S_IRUSR)
|
||||
|
||||
custom_artifacts_path = os.path.join(custom_artifacts_dir, 'custom')
|
||||
with codecs.open(custom_artifacts_path, 'w', encoding='utf-8') as f:
|
||||
os.chmod(custom_artifacts_path, stat.S_IRUSR | stat.S_IWUSR)
|
||||
json.dump(custom_artifact_data, f)
|
||||
|
||||
with self.capture_event_data('playbook_on_stats', **event_data):
|
||||
super(BaseCallbackModule, self).v2_playbook_on_stats(stats)
|
||||
|
||||
|
||||
@@ -7,7 +7,9 @@ from collections import OrderedDict
|
||||
import json
|
||||
import mock
|
||||
import os
|
||||
import shutil
|
||||
import sys
|
||||
import tempfile
|
||||
|
||||
import pytest
|
||||
|
||||
@@ -259,3 +261,26 @@ def test_callback_plugin_strips_task_environ_variables(executor, cache, playbook
|
||||
assert len(cache)
|
||||
for event in cache.values():
|
||||
assert os.environ['PATH'] not in json.dumps(event)
|
||||
|
||||
|
||||
@pytest.mark.parametrize('playbook', [
|
||||
{'custom_set_stat.yml': '''
|
||||
- name: custom set_stat calls should persist to the local disk so awx can save them
|
||||
connection: local
|
||||
hosts: all
|
||||
tasks:
|
||||
- set_stats:
|
||||
data:
|
||||
foo: "bar"
|
||||
'''}, # noqa
|
||||
])
|
||||
def test_callback_plugin_saves_custom_stats(executor, cache, playbook):
|
||||
try:
|
||||
private_data_dir = tempfile.mkdtemp()
|
||||
with mock.patch.dict(os.environ, {'AWX_PRIVATE_DATA_DIR': private_data_dir}):
|
||||
executor.run()
|
||||
artifacts_path = os.path.join(private_data_dir, 'artifacts', 'custom')
|
||||
with open(artifacts_path, 'r') as f:
|
||||
assert json.load(f) == {'foo': 'bar'}
|
||||
finally:
|
||||
shutil.rmtree(os.path.join(private_data_dir))
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@@ -12,6 +12,7 @@ from django.db.models import Q, Prefetch
|
||||
from django.contrib.auth.models import User
|
||||
from django.contrib.contenttypes.models import ContentType
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
from django.core.exceptions import ObjectDoesNotExist
|
||||
|
||||
# Django REST Framework
|
||||
from rest_framework.exceptions import ParseError, PermissionDenied, ValidationError
|
||||
@@ -31,7 +32,7 @@ from awx.conf.license import LicenseForbids, feature_enabled
|
||||
|
||||
__all__ = ['get_user_queryset', 'check_user_access', 'check_user_access_with_errors',
|
||||
'user_accessible_objects', 'consumer_access',
|
||||
'user_admin_role', 'StateConflict',]
|
||||
'user_admin_role', 'ActiveJobConflict',]
|
||||
|
||||
logger = logging.getLogger('awx.main.access')
|
||||
|
||||
@@ -71,9 +72,15 @@ def get_object_from_data(field, Model, data, obj=None):
|
||||
raise ParseError(_("Bad data found in related field %s." % field))
|
||||
|
||||
|
||||
class StateConflict(ValidationError):
|
||||
class ActiveJobConflict(ValidationError):
|
||||
status_code = 409
|
||||
|
||||
def __init__(self, active_jobs):
|
||||
super(ActiveJobConflict, self).__init__({
|
||||
"conflict": _("Resource is being used by running jobs."),
|
||||
"active_jobs": active_jobs
|
||||
})
|
||||
|
||||
|
||||
def register_access(model_class, access_class):
|
||||
access_registry[model_class] = access_class
|
||||
@@ -301,7 +308,7 @@ class BaseAccess(object):
|
||||
if check_expiration and validation_info.get('time_remaining', None) is None:
|
||||
raise PermissionDenied(_("License is missing."))
|
||||
if check_expiration and validation_info.get("grace_period_remaining") <= 0:
|
||||
raise PermissionDenied(_("License has expired."))
|
||||
logger.error(_("License has expired."))
|
||||
|
||||
free_instances = validation_info.get('free_instances', 0)
|
||||
available_instances = validation_info.get('available_instances', 0)
|
||||
@@ -309,11 +316,11 @@ class BaseAccess(object):
|
||||
if add_host_name:
|
||||
host_exists = Host.objects.filter(name=add_host_name).exists()
|
||||
if not host_exists and free_instances == 0:
|
||||
raise PermissionDenied(_("License count of %s instances has been reached.") % available_instances)
|
||||
logger.error(_("License count of %s instances has been reached.") % available_instances)
|
||||
elif not host_exists and free_instances < 0:
|
||||
raise PermissionDenied(_("License count of %s instances has been exceeded.") % available_instances)
|
||||
logger.error(_("License count of %s instances has been exceeded.") % available_instances)
|
||||
elif not add_host_name and free_instances < 0:
|
||||
raise PermissionDenied(_("Host count exceeds available instances."))
|
||||
raise logger.error(_("Host count exceeds available instances."))
|
||||
|
||||
if feature is not None:
|
||||
if "features" in validation_info and not validation_info["features"].get(feature, False):
|
||||
@@ -417,6 +424,18 @@ class InstanceAccess(BaseAccess):
|
||||
return Instance.objects.filter(
|
||||
rampart_groups__in=self.user.get_queryset(InstanceGroup)).distinct()
|
||||
|
||||
|
||||
def can_attach(self, obj, sub_obj, relationship, data,
|
||||
skip_sub_obj_read_check=False):
|
||||
if relationship == 'rampart_groups' and isinstance(sub_obj, InstanceGroup):
|
||||
return self.user.is_superuser
|
||||
return super(InstanceAccess, self).can_attach(obj, sub_obj, relationship, *args, **kwargs)
|
||||
|
||||
def can_unattach(self, obj, sub_obj, relationship, data=None):
|
||||
if relationship == 'rampart_groups' and isinstance(sub_obj, InstanceGroup):
|
||||
return self.user.is_superuser
|
||||
return super(InstanceAccess, self).can_unattach(obj, sub_obj, relationship, *args, **kwargs)
|
||||
|
||||
def can_add(self, data):
|
||||
return False
|
||||
|
||||
@@ -437,13 +456,13 @@ class InstanceGroupAccess(BaseAccess):
|
||||
organization__in=Organization.accessible_pk_qs(self.user, 'admin_role'))
|
||||
|
||||
def can_add(self, data):
|
||||
return False
|
||||
return self.user.is_superuser
|
||||
|
||||
def can_change(self, obj, data):
|
||||
return False
|
||||
return self.user.is_superuser
|
||||
|
||||
def can_delete(self, obj):
|
||||
return False
|
||||
return self.user.is_superuser
|
||||
|
||||
|
||||
class UserAccess(BaseAccess):
|
||||
@@ -568,8 +587,7 @@ class OrganizationAccess(BaseAccess):
|
||||
active_jobs.extend([dict(type="inventory_update", id=o.id)
|
||||
for o in InventoryUpdate.objects.filter(inventory_source__inventory__organization=obj, status__in=ACTIVE_STATES)])
|
||||
if len(active_jobs) > 0:
|
||||
raise StateConflict({"conflict": _("Resource is being used by running jobs"),
|
||||
"active_jobs": active_jobs})
|
||||
raise ActiveJobConflict(active_jobs)
|
||||
return True
|
||||
|
||||
def can_attach(self, obj, sub_obj, relationship, *args, **kwargs):
|
||||
@@ -590,6 +608,7 @@ class InventoryAccess(BaseAccess):
|
||||
I can see inventory when:
|
||||
- I'm a superuser.
|
||||
- I'm an org admin of the inventory's org.
|
||||
- I'm an inventory admin of the inventory's org.
|
||||
- I have read, write or admin permissions on it.
|
||||
I can change inventory when:
|
||||
- I'm a superuser.
|
||||
@@ -623,9 +642,9 @@ class InventoryAccess(BaseAccess):
|
||||
def can_add(self, data):
|
||||
# If no data is specified, just checking for generic add permission?
|
||||
if not data:
|
||||
return Organization.accessible_objects(self.user, 'admin_role').exists()
|
||||
return Organization.accessible_objects(self.user, 'inventory_admin_role').exists()
|
||||
|
||||
return self.check_related('organization', Organization, data)
|
||||
return self.check_related('organization', Organization, data, role_field='inventory_admin_role')
|
||||
|
||||
@check_superuser
|
||||
def can_change(self, obj, data):
|
||||
@@ -641,7 +660,7 @@ class InventoryAccess(BaseAccess):
|
||||
# Verify that the user has access to the new organization if moving an
|
||||
# inventory to a new organization. Otherwise, just check for admin permission.
|
||||
return (
|
||||
self.check_related('organization', Organization, data, obj=obj,
|
||||
self.check_related('organization', Organization, data, obj=obj, role_field='inventory_admin_role',
|
||||
mandatory=org_admin_mandatory) and
|
||||
self.user in obj.admin_role
|
||||
)
|
||||
@@ -662,8 +681,7 @@ class InventoryAccess(BaseAccess):
|
||||
active_jobs.extend([dict(type="ad_hoc_command", id=o.id)
|
||||
for o in AdHocCommand.objects.filter(inventory=obj, status__in=ACTIVE_STATES)])
|
||||
if len(active_jobs) > 0:
|
||||
raise StateConflict({"conflict": _("Resource is being used by running jobs"),
|
||||
"active_jobs": active_jobs})
|
||||
raise ActiveJobConflict(active_jobs)
|
||||
return True
|
||||
|
||||
def can_run_ad_hoc_commands(self, obj):
|
||||
@@ -788,8 +806,7 @@ class GroupAccess(BaseAccess):
|
||||
active_jobs.extend([dict(type="inventory_update", id=o.id)
|
||||
for o in InventoryUpdate.objects.filter(inventory_source__in=obj.inventory_sources.all(), status__in=ACTIVE_STATES)])
|
||||
if len(active_jobs) > 0:
|
||||
raise StateConflict({"conflict": _("Resource is being used by running jobs"),
|
||||
"active_jobs": active_jobs})
|
||||
raise ActiveJobConflict(active_jobs)
|
||||
return True
|
||||
|
||||
def can_start(self, obj, validate_license=True):
|
||||
@@ -839,8 +856,7 @@ class InventorySourceAccess(BaseAccess):
|
||||
return False
|
||||
active_jobs_qs = InventoryUpdate.objects.filter(inventory_source=obj, status__in=ACTIVE_STATES)
|
||||
if active_jobs_qs.exists():
|
||||
raise StateConflict({"conflict": _("Resource is being used by running jobs"),
|
||||
"active_jobs": [dict(type="inventory_update", id=o.id) for o in active_jobs_qs.all()]})
|
||||
raise ActiveJobConflict([dict(type="inventory_update", id=o.id) for o in active_jobs_qs.all()])
|
||||
return True
|
||||
|
||||
@check_superuser
|
||||
@@ -930,8 +946,12 @@ class CredentialAccess(BaseAccess):
|
||||
- I'm a superuser.
|
||||
- It's a user credential and it's my credential.
|
||||
- It's a user credential and I'm an admin of an organization where that
|
||||
user is a member of admin of the organization.
|
||||
user is a member.
|
||||
- It's a user credential and I'm a credential_admin of an organization
|
||||
where that user is a member.
|
||||
- It's a team credential and I'm an admin of the team's organization.
|
||||
- It's a team credential and I'm a credential admin of the team's
|
||||
organization.
|
||||
- It's a team credential and I'm a member of the team.
|
||||
I can change/delete when:
|
||||
- I'm a superuser.
|
||||
@@ -943,7 +963,8 @@ class CredentialAccess(BaseAccess):
|
||||
model = Credential
|
||||
select_related = ('created_by', 'modified_by',)
|
||||
prefetch_related = ('admin_role', 'use_role', 'read_role',
|
||||
'admin_role__parents', 'admin_role__members',)
|
||||
'admin_role__parents', 'admin_role__members',
|
||||
'credential_type', 'organization')
|
||||
|
||||
def filtered_queryset(self):
|
||||
return self.model.accessible_objects(self.user, 'read_role')
|
||||
@@ -964,7 +985,8 @@ class CredentialAccess(BaseAccess):
|
||||
return check_user_access(self.user, Team, 'change', team_obj, None)
|
||||
if data and data.get('organization', None):
|
||||
organization_obj = get_object_from_data('organization', Organization, data)
|
||||
return check_user_access(self.user, Organization, 'change', organization_obj, None)
|
||||
return any([check_user_access(self.user, Organization, 'change', organization_obj, None),
|
||||
self.user in organization_obj.credential_admin_role])
|
||||
return False
|
||||
|
||||
@check_superuser
|
||||
@@ -975,7 +997,7 @@ class CredentialAccess(BaseAccess):
|
||||
def can_change(self, obj, data):
|
||||
if not obj:
|
||||
return False
|
||||
return self.user in obj.admin_role and self.check_related('organization', Organization, data, obj=obj)
|
||||
return self.user in obj.admin_role and self.check_related('organization', Organization, data, obj=obj, role_field='credential_admin_role')
|
||||
|
||||
def can_delete(self, obj):
|
||||
# Unassociated credentials may be marked deleted by anyone, though we
|
||||
@@ -1051,6 +1073,7 @@ class ProjectAccess(BaseAccess):
|
||||
I can see projects when:
|
||||
- I am a superuser.
|
||||
- I am an admin in an organization associated with the project.
|
||||
- I am a project admin in an organization associated with the project.
|
||||
- I am a user in an organization associated with the project.
|
||||
- I am on a team associated with the project.
|
||||
- I have been explicitly granted permission to run/check jobs using the
|
||||
@@ -1071,12 +1094,12 @@ class ProjectAccess(BaseAccess):
|
||||
@check_superuser
|
||||
def can_add(self, data):
|
||||
if not data: # So the browseable API will work
|
||||
return Organization.accessible_objects(self.user, 'admin_role').exists()
|
||||
return self.check_related('organization', Organization, data, mandatory=True)
|
||||
return Organization.accessible_objects(self.user, 'project_admin_role').exists()
|
||||
return self.check_related('organization', Organization, data, role_field='project_admin_role', mandatory=True)
|
||||
|
||||
@check_superuser
|
||||
def can_change(self, obj, data):
|
||||
if not self.check_related('organization', Organization, data, obj=obj):
|
||||
if not self.check_related('organization', Organization, data, obj=obj, role_field='project_admin_role'):
|
||||
return False
|
||||
return self.user in obj.admin_role
|
||||
|
||||
@@ -1090,8 +1113,7 @@ class ProjectAccess(BaseAccess):
|
||||
active_jobs.extend([dict(type="project_update", id=o.id)
|
||||
for o in ProjectUpdate.objects.filter(project=obj, status__in=ACTIVE_STATES)])
|
||||
if len(active_jobs) > 0:
|
||||
raise StateConflict({"conflict": _("Resource is being used by running jobs"),
|
||||
"active_jobs": active_jobs})
|
||||
raise ActiveJobConflict(active_jobs)
|
||||
return True
|
||||
|
||||
@check_superuser
|
||||
@@ -1124,8 +1146,11 @@ class ProjectUpdateAccess(BaseAccess):
|
||||
|
||||
def can_start(self, obj, validate_license=True):
|
||||
# for relaunching
|
||||
if obj and obj.project:
|
||||
return self.user in obj.project.update_role
|
||||
try:
|
||||
if obj and obj.project:
|
||||
return self.user in obj.project.update_role
|
||||
except ObjectDoesNotExist:
|
||||
pass
|
||||
return False
|
||||
|
||||
@check_superuser
|
||||
@@ -1142,7 +1167,11 @@ class JobTemplateAccess(BaseAccess):
|
||||
model = JobTemplate
|
||||
select_related = ('created_by', 'modified_by', 'inventory', 'project',
|
||||
'next_schedule',)
|
||||
prefetch_related = ('credentials__credential_type',)
|
||||
prefetch_related = (
|
||||
'instance_groups',
|
||||
'credentials__credential_type',
|
||||
Prefetch('labels', queryset=Label.objects.all().order_by('name')),
|
||||
)
|
||||
|
||||
def filtered_queryset(self):
|
||||
return self.model.accessible_objects(self.user, 'read_role')
|
||||
@@ -1152,6 +1181,7 @@ class JobTemplateAccess(BaseAccess):
|
||||
a user can create a job template if
|
||||
- they are a superuser
|
||||
- an org admin of any org that the project is a member
|
||||
- if they are a project_admin for any org that project is a member of
|
||||
- if they have user or team
|
||||
based permissions tying the project to the inventory source for the
|
||||
given action as well as the 'create' deploy permission.
|
||||
@@ -1265,8 +1295,7 @@ class JobTemplateAccess(BaseAccess):
|
||||
active_jobs = [dict(type="job", id=o.id)
|
||||
for o in obj.jobs.filter(status__in=ACTIVE_STATES)]
|
||||
if len(active_jobs) > 0:
|
||||
raise StateConflict({"conflict": _("Resource is being used by running jobs"),
|
||||
"active_jobs": active_jobs})
|
||||
raise ActiveJobConflict(active_jobs)
|
||||
return True
|
||||
|
||||
@check_superuser
|
||||
@@ -1305,7 +1334,7 @@ class JobAccess(BaseAccess):
|
||||
|
||||
model = Job
|
||||
select_related = ('created_by', 'modified_by', 'job_template', 'inventory',
|
||||
'project', 'job_template',)
|
||||
'project', 'project_update',)
|
||||
prefetch_related = (
|
||||
'unified_job_template',
|
||||
'instance_group',
|
||||
@@ -1411,7 +1440,7 @@ class JobAccess(BaseAccess):
|
||||
elif not jt_access:
|
||||
return False
|
||||
|
||||
org_access = obj.inventory and self.user in obj.inventory.organization.admin_role
|
||||
org_access = obj.inventory and self.user in obj.inventory.organization.inventory_admin_role
|
||||
project_access = obj.project is None or self.user in obj.project.admin_role
|
||||
credential_access = all([self.user in cred.use_role for cred in obj.credentials.all()])
|
||||
|
||||
@@ -1704,13 +1733,14 @@ class WorkflowJobTemplateAccess(BaseAccess):
|
||||
Users who are able to create deploy jobs can also run normal and check (dry run) jobs.
|
||||
'''
|
||||
if not data: # So the browseable API will work
|
||||
return Organization.accessible_objects(self.user, 'admin_role').exists()
|
||||
return Organization.accessible_objects(self.user, 'workflow_admin_role').exists()
|
||||
|
||||
# will check this if surveys are added to WFJT
|
||||
if 'survey_enabled' in data and data['survey_enabled']:
|
||||
self.check_license(feature='surveys')
|
||||
|
||||
return self.check_related('organization', Organization, data, mandatory=True)
|
||||
return self.check_related('organization', Organization, data, role_field='workflow_admin_role',
|
||||
mandatory=True)
|
||||
|
||||
def can_copy(self, obj):
|
||||
if self.save_messages:
|
||||
@@ -1737,7 +1767,8 @@ class WorkflowJobTemplateAccess(BaseAccess):
|
||||
if missing_inventories:
|
||||
self.messages['inventories_unable_to_copy'] = missing_inventories
|
||||
|
||||
return self.check_related('organization', Organization, {'reference_obj': obj}, mandatory=True)
|
||||
return self.check_related('organization', Organization, {'reference_obj': obj}, role_field='workflow_admin_role',
|
||||
mandatory=True)
|
||||
|
||||
def can_start(self, obj, validate_license=True):
|
||||
if validate_license:
|
||||
@@ -1762,7 +1793,8 @@ class WorkflowJobTemplateAccess(BaseAccess):
|
||||
if self.user.is_superuser:
|
||||
return True
|
||||
|
||||
return self.check_related('organization', Organization, data, obj=obj) and self.user in obj.admin_role
|
||||
return (self.check_related('organization', Organization, data, role_field='workflow_admin_field', obj=obj) and
|
||||
self.user in obj.admin_role)
|
||||
|
||||
def can_delete(self, obj):
|
||||
is_delete_allowed = self.user.is_superuser or self.user in obj.admin_role
|
||||
@@ -1771,8 +1803,7 @@ class WorkflowJobTemplateAccess(BaseAccess):
|
||||
active_jobs = [dict(type="workflow_job", id=o.id)
|
||||
for o in obj.workflow_jobs.filter(status__in=ACTIVE_STATES)]
|
||||
if len(active_jobs) > 0:
|
||||
raise StateConflict({"conflict": _("Resource is being used by running jobs"),
|
||||
"active_jobs": active_jobs})
|
||||
raise ActiveJobConflict(active_jobs)
|
||||
return True
|
||||
|
||||
|
||||
@@ -1804,7 +1835,7 @@ class WorkflowJobAccess(BaseAccess):
|
||||
def can_delete(self, obj):
|
||||
return (obj.workflow_job_template and
|
||||
obj.workflow_job_template.organization and
|
||||
self.user in obj.workflow_job_template.organization.admin_role)
|
||||
self.user in obj.workflow_job_template.organization.workflow_admin_role)
|
||||
|
||||
def get_method_capability(self, method, obj, parent_obj):
|
||||
if method == 'start':
|
||||
@@ -1979,6 +2010,64 @@ class JobEventAccess(BaseAccess):
|
||||
return False
|
||||
|
||||
|
||||
class ProjectUpdateEventAccess(BaseAccess):
|
||||
'''
|
||||
I can see project update event records whenever I can access the project update
|
||||
'''
|
||||
|
||||
model = ProjectUpdateEvent
|
||||
|
||||
def filtered_queryset(self):
|
||||
return self.model.objects.filter(
|
||||
Q(project_update__in=ProjectUpdate.accessible_pk_qs(self.user, 'read_role')))
|
||||
|
||||
def can_add(self, data):
|
||||
return False
|
||||
|
||||
def can_change(self, obj, data):
|
||||
return False
|
||||
|
||||
def can_delete(self, obj):
|
||||
return False
|
||||
|
||||
|
||||
class InventoryUpdateEventAccess(BaseAccess):
|
||||
'''
|
||||
I can see inventory update event records whenever I can access the inventory update
|
||||
'''
|
||||
|
||||
model = InventoryUpdateEvent
|
||||
|
||||
def filtered_queryset(self):
|
||||
return self.model.objects.filter(
|
||||
Q(inventory_update__in=InventoryUpdate.accessible_pk_qs(self.user, 'read_role')))
|
||||
|
||||
def can_add(self, data):
|
||||
return False
|
||||
|
||||
def can_change(self, obj, data):
|
||||
return False
|
||||
|
||||
def can_delete(self, obj):
|
||||
return False
|
||||
|
||||
|
||||
class SystemJobEventAccess(BaseAccess):
|
||||
'''
|
||||
I can only see manage System Jobs events if I'm a super user
|
||||
'''
|
||||
model = SystemJobEvent
|
||||
|
||||
def can_add(self, data):
|
||||
return False
|
||||
|
||||
def can_change(self, obj, data):
|
||||
return False
|
||||
|
||||
def can_delete(self, obj):
|
||||
return False
|
||||
|
||||
|
||||
class UnifiedJobTemplateAccess(BaseAccess):
|
||||
'''
|
||||
I can see a unified job template whenever I can see the same project,
|
||||
@@ -2081,13 +2170,9 @@ class ScheduleAccess(BaseAccess):
|
||||
prefetch_related = ('unified_job_template', 'credentials',)
|
||||
|
||||
def filtered_queryset(self):
|
||||
qs = self.model.objects.all()
|
||||
|
||||
unified_pk_qs = UnifiedJobTemplate.accessible_pk_qs(self.user, 'read_role')
|
||||
inv_src_qs = InventorySource.objects.filter(inventory_id=Inventory._accessible_pk_qs(Inventory, self.user, 'read_role'))
|
||||
return qs.filter(
|
||||
Q(unified_job_template_id__in=unified_pk_qs) |
|
||||
Q(unified_job_template_id__in=inv_src_qs.values_list('pk', flat=True)))
|
||||
return self.model.objects.filter(
|
||||
unified_job_template__in=UnifiedJobTemplateAccess(self.user).filtered_queryset()
|
||||
)
|
||||
|
||||
@check_superuser
|
||||
def can_add(self, data):
|
||||
@@ -2130,7 +2215,7 @@ class NotificationTemplateAccess(BaseAccess):
|
||||
|
||||
def filtered_queryset(self):
|
||||
return self.model.objects.filter(
|
||||
Q(organization__in=self.user.admin_of_organizations) |
|
||||
Q(organization__in=Organization.accessible_objects(self.user, 'notification_admin_role')) |
|
||||
Q(organization__in=self.user.auditor_of_organizations)
|
||||
).distinct()
|
||||
|
||||
@@ -2138,22 +2223,22 @@ class NotificationTemplateAccess(BaseAccess):
|
||||
if self.user.is_superuser or self.user.is_system_auditor:
|
||||
return True
|
||||
if obj.organization is not None:
|
||||
if self.user in obj.organization.admin_role or self.user in obj.organization.auditor_role:
|
||||
if self.user in obj.organization.notification_admin_role or self.user in obj.organization.auditor_role:
|
||||
return True
|
||||
return False
|
||||
|
||||
@check_superuser
|
||||
def can_add(self, data):
|
||||
if not data:
|
||||
return Organization.accessible_objects(self.user, 'admin_role').exists()
|
||||
return self.check_related('organization', Organization, data, mandatory=True)
|
||||
return Organization.accessible_objects(self.user, 'notification_admin_role').exists()
|
||||
return self.check_related('organization', Organization, data, role_field='notification_admin_role', mandatory=True)
|
||||
|
||||
@check_superuser
|
||||
def can_change(self, obj, data):
|
||||
if obj.organization is None:
|
||||
# only superusers are allowed to edit orphan notification templates
|
||||
return False
|
||||
return self.check_related('organization', Organization, data, obj=obj, mandatory=True)
|
||||
return self.check_related('organization', Organization, data, obj=obj, role_field='notification_admin_role', mandatory=True)
|
||||
|
||||
def can_admin(self, obj, data):
|
||||
return self.can_change(obj, data)
|
||||
@@ -2165,7 +2250,7 @@ class NotificationTemplateAccess(BaseAccess):
|
||||
def can_start(self, obj, validate_license=True):
|
||||
if obj.organization is None:
|
||||
return False
|
||||
return self.user in obj.organization.admin_role
|
||||
return self.user in obj.organization.notification_admin_role
|
||||
|
||||
|
||||
class NotificationAccess(BaseAccess):
|
||||
@@ -2177,7 +2262,7 @@ class NotificationAccess(BaseAccess):
|
||||
|
||||
def filtered_queryset(self):
|
||||
return self.model.objects.filter(
|
||||
Q(notification_template__organization__in=self.user.admin_of_organizations) |
|
||||
Q(notification_template__organization__in=Organization.accessible_objects(self.user, 'notification_admin_role')) |
|
||||
Q(notification_template__organization__in=self.user.auditor_of_organizations)
|
||||
).distinct()
|
||||
|
||||
|
||||
@@ -5,7 +5,7 @@ import re
|
||||
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
|
||||
CLOUD_PROVIDERS = ('azure_rm', 'ec2', 'gce', 'vmware', 'openstack', 'satellite6', 'cloudforms')
|
||||
CLOUD_PROVIDERS = ('azure_rm', 'ec2', 'gce', 'vmware', 'openstack', 'rhv', 'satellite6', 'cloudforms', 'tower')
|
||||
SCHEDULEABLE_PROVIDERS = CLOUD_PROVIDERS + ('custom', 'scm',)
|
||||
PRIVILEGE_ESCALATION_METHODS = [
|
||||
('sudo', _('Sudo')), ('su', _('Su')), ('pbrun', _('Pbrun')), ('pfexec', _('Pfexec')),
|
||||
|
||||
@@ -1,24 +1,36 @@
|
||||
class AwxTaskError(Exception):
|
||||
"""Base exception for errors in unified job runs"""
|
||||
def __init__(self, task, message=None):
|
||||
# Copyright (c) 2018 Ansible by Red Hat
|
||||
# All Rights Reserved.
|
||||
|
||||
# Celery does not respect exception type when using a serializer different than pickle;
|
||||
# and awx uses the json serializer
|
||||
# https://github.com/celery/celery/issues/3586
|
||||
|
||||
|
||||
class _AwxTaskError():
|
||||
def build_exception(self, task, message=None):
|
||||
if message is None:
|
||||
message = "Execution error running {}".format(task.log_format)
|
||||
super(AwxTaskError, self).__init__(message)
|
||||
self.task = task
|
||||
|
||||
|
||||
class TaskCancel(AwxTaskError):
|
||||
"""Canceled flag caused run_pexpect to kill the job run"""
|
||||
def __init__(self, task, rc):
|
||||
super(TaskCancel, self).__init__(
|
||||
task, message="{} was canceled (rc={})".format(task.log_format, rc))
|
||||
self.rc = rc
|
||||
e = Exception(message)
|
||||
e.task = task
|
||||
e.is_awx_task_error = True
|
||||
return e
|
||||
|
||||
def TaskCancel(self, task, rc):
|
||||
"""Canceled flag caused run_pexpect to kill the job run"""
|
||||
message="{} was canceled (rc={})".format(task.log_format, rc)
|
||||
e = self.build_exception(task, message)
|
||||
e.rc = rc
|
||||
e.awx_task_error_type = "TaskCancel"
|
||||
return e
|
||||
|
||||
def TaskError(self, task, rc):
|
||||
"""Userspace error (non-zero exit code) in run_pexpect subprocess"""
|
||||
message = "{} encountered an error (rc={}), please see task stdout for details.".format(task.log_format, rc)
|
||||
e = self.build_exception(task, message)
|
||||
e.rc = rc
|
||||
e.awx_task_error_type = "TaskError"
|
||||
return e
|
||||
|
||||
|
||||
class TaskError(AwxTaskError):
|
||||
"""Userspace error (non-zero exit code) in run_pexpect subprocess"""
|
||||
def __init__(self, task, rc):
|
||||
super(TaskError, self).__init__(
|
||||
task, message="%s encountered an error (rc=%s), please see task stdout for details.".format(task.log_format, rc))
|
||||
self.rc = rc
|
||||
AwxTaskError = _AwxTaskError()
|
||||
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
import base64
|
||||
import cStringIO
|
||||
import codecs
|
||||
import StringIO
|
||||
import json
|
||||
@@ -15,7 +14,7 @@ from django.conf import settings
|
||||
|
||||
import awx
|
||||
from awx.main.expect import run
|
||||
from awx.main.utils import OutputEventFilter
|
||||
from awx.main.utils import OutputEventFilter, get_system_task_capacity
|
||||
from awx.main.queue import CallbackQueueDispatcher
|
||||
|
||||
logger = logging.getLogger('awx.isolated.manager')
|
||||
@@ -143,7 +142,7 @@ class IsolatedManager(object):
|
||||
|
||||
# if an ssh private key fifo exists, read its contents and delete it
|
||||
if self.ssh_key_path:
|
||||
buff = cStringIO.StringIO()
|
||||
buff = StringIO.StringIO()
|
||||
with open(self.ssh_key_path, 'r') as fifo:
|
||||
for line in fifo:
|
||||
buff.write(line)
|
||||
@@ -183,7 +182,7 @@ class IsolatedManager(object):
|
||||
job_timeout=settings.AWX_ISOLATED_LAUNCH_TIMEOUT,
|
||||
pexpect_timeout=5
|
||||
)
|
||||
output = buff.getvalue()
|
||||
output = buff.getvalue().encode('utf-8')
|
||||
playbook_logger.info('Isolated job {} dispatch:\n{}'.format(self.instance.id, output))
|
||||
if status != 'successful':
|
||||
self.stdout_handle.write(output)
|
||||
@@ -283,7 +282,7 @@ class IsolatedManager(object):
|
||||
status = 'failed'
|
||||
output = ''
|
||||
rc = None
|
||||
buff = cStringIO.StringIO()
|
||||
buff = StringIO.StringIO()
|
||||
last_check = time.time()
|
||||
seek = 0
|
||||
job_timeout = remaining = self.job_timeout
|
||||
@@ -304,7 +303,7 @@ class IsolatedManager(object):
|
||||
time.sleep(1)
|
||||
continue
|
||||
|
||||
buff = cStringIO.StringIO()
|
||||
buff = StringIO.StringIO()
|
||||
logger.debug('Checking on isolated job {} with `check_isolated.yml`.'.format(self.instance.id))
|
||||
status, rc = IsolatedManager.run_pexpect(
|
||||
args, self.awx_playbook_path(), self.management_env, buff,
|
||||
@@ -314,7 +313,7 @@ class IsolatedManager(object):
|
||||
pexpect_timeout=5,
|
||||
proot_cmd=self.proot_cmd
|
||||
)
|
||||
output = buff.getvalue()
|
||||
output = buff.getvalue().encode('utf-8')
|
||||
playbook_logger.info('Isolated job {} check:\n{}'.format(self.instance.id, output))
|
||||
|
||||
path = self.path_to('artifacts', 'stdout')
|
||||
@@ -356,14 +355,14 @@ class IsolatedManager(object):
|
||||
}
|
||||
args = self._build_args('clean_isolated.yml', '%s,' % self.host, extra_vars)
|
||||
logger.debug('Cleaning up job {} on isolated host with `clean_isolated.yml` playbook.'.format(self.instance.id))
|
||||
buff = cStringIO.StringIO()
|
||||
buff = StringIO.StringIO()
|
||||
timeout = max(60, 2 * settings.AWX_ISOLATED_CONNECTION_TIMEOUT)
|
||||
status, rc = IsolatedManager.run_pexpect(
|
||||
args, self.awx_playbook_path(), self.management_env, buff,
|
||||
idle_timeout=timeout, job_timeout=timeout,
|
||||
pexpect_timeout=5
|
||||
)
|
||||
output = buff.getvalue()
|
||||
output = buff.getvalue().encode('utf-8')
|
||||
playbook_logger.info('Isolated job {} cleanup:\n{}'.format(self.instance.id, output))
|
||||
|
||||
if status != 'successful':
|
||||
@@ -382,10 +381,14 @@ class IsolatedManager(object):
|
||||
logger.error(err_template.format(instance.hostname, instance.version, awx_application_version))
|
||||
instance.capacity = 0
|
||||
else:
|
||||
if instance.capacity == 0 and task_result['capacity']:
|
||||
if instance.capacity == 0 and task_result['capacity_cpu']:
|
||||
logger.warning('Isolated instance {} has re-joined.'.format(instance.hostname))
|
||||
instance.capacity = int(task_result['capacity'])
|
||||
instance.save(update_fields=['capacity', 'version', 'modified'])
|
||||
instance.cpu_capacity = int(task_result['capacity_cpu'])
|
||||
instance.mem_capacity = int(task_result['capacity_mem'])
|
||||
instance.capacity = get_system_task_capacity(scale=instance.capacity_adjustment,
|
||||
cpu_capacity=int(task_result['capacity_cpu']),
|
||||
mem_capacity=int(task_result['capacity_mem']))
|
||||
instance.save(update_fields=['cpu_capacity', 'mem_capacity', 'capacity', 'version', 'modified'])
|
||||
|
||||
@classmethod
|
||||
def health_check(cls, instance_qs, awx_application_version):
|
||||
@@ -406,14 +409,14 @@ class IsolatedManager(object):
|
||||
env = cls._base_management_env()
|
||||
env['ANSIBLE_STDOUT_CALLBACK'] = 'json'
|
||||
|
||||
buff = cStringIO.StringIO()
|
||||
buff = StringIO.StringIO()
|
||||
timeout = max(60, 2 * settings.AWX_ISOLATED_CONNECTION_TIMEOUT)
|
||||
status, rc = IsolatedManager.run_pexpect(
|
||||
args, cls.awx_playbook_path(), env, buff,
|
||||
idle_timeout=timeout, job_timeout=timeout,
|
||||
pexpect_timeout=5
|
||||
)
|
||||
output = buff.getvalue()
|
||||
output = buff.getvalue().encode('utf-8')
|
||||
buff.close()
|
||||
|
||||
try:
|
||||
@@ -429,7 +432,7 @@ class IsolatedManager(object):
|
||||
task_result = result['plays'][0]['tasks'][0]['hosts'][instance.hostname]
|
||||
except (KeyError, IndexError):
|
||||
task_result = {}
|
||||
if 'capacity' in task_result:
|
||||
if 'capacity_cpu' in task_result and 'capacity_mem' in task_result:
|
||||
cls.update_capacity(instance, task_result, awx_application_version)
|
||||
elif instance.capacity == 0:
|
||||
logger.debug('Isolated instance {} previously marked as lost, could not re-join.'.format(
|
||||
@@ -445,7 +448,7 @@ class IsolatedManager(object):
|
||||
instance.hostname, instance.modified))
|
||||
|
||||
@staticmethod
|
||||
def wrap_stdout_handle(instance, private_data_dir, stdout_handle, event_data_key='job_id'):
|
||||
def get_stdout_handle(instance, private_data_dir, event_data_key='job_id'):
|
||||
dispatcher = CallbackQueueDispatcher()
|
||||
|
||||
def job_event_callback(event_data):
|
||||
@@ -463,7 +466,7 @@ class IsolatedManager(object):
|
||||
event_data.get('event', ''), event_data['uuid'], instance.id, event_data))
|
||||
dispatcher.dispatch(event_data)
|
||||
|
||||
return OutputEventFilter(stdout_handle, job_event_callback)
|
||||
return OutputEventFilter(job_event_callback)
|
||||
|
||||
def run(self, instance, host, private_data_dir, proot_temp_dir):
|
||||
"""
|
||||
|
||||
@@ -47,7 +47,7 @@ def open_fifo_write(path, data):
|
||||
This blocks the thread until an external process (such as ssh-agent)
|
||||
reads data from the pipe.
|
||||
'''
|
||||
os.mkfifo(path, 0600)
|
||||
os.mkfifo(path, 0o600)
|
||||
thread.start_new_thread(lambda p, d: open(p, 'w').write(d), (path, data))
|
||||
|
||||
|
||||
@@ -99,7 +99,6 @@ def run_pexpect(args, cwd, env, logfile,
|
||||
password_patterns = expect_passwords.keys()
|
||||
password_values = expect_passwords.values()
|
||||
|
||||
logfile_pos = logfile.tell()
|
||||
child = pexpect.spawn(
|
||||
args[0], args[1:], cwd=cwd, env=env, ignore_sighup=True,
|
||||
encoding='utf-8', echo=False,
|
||||
@@ -116,8 +115,6 @@ def run_pexpect(args, cwd, env, logfile,
|
||||
password = password_values[result_id]
|
||||
if password is not None:
|
||||
child.sendline(password)
|
||||
if logfile_pos != logfile.tell():
|
||||
logfile_pos = logfile.tell()
|
||||
last_stdout_update = time.time()
|
||||
if cancelled_callback:
|
||||
try:
|
||||
|
||||
@@ -6,6 +6,7 @@ import copy
|
||||
import json
|
||||
import re
|
||||
import six
|
||||
import urllib
|
||||
|
||||
from jinja2 import Environment, StrictUndefined
|
||||
from jinja2.exceptions import UndefinedError
|
||||
@@ -73,7 +74,7 @@ class JSONField(upstream_JSONField):
|
||||
|
||||
class JSONBField(upstream_JSONBField):
|
||||
def get_prep_lookup(self, lookup_type, value):
|
||||
if isinstance(value, basestring) and value == "null":
|
||||
if isinstance(value, six.string_types) and value == "null":
|
||||
return 'null'
|
||||
return super(JSONBField, self).get_prep_lookup(lookup_type, value)
|
||||
|
||||
@@ -352,9 +353,10 @@ class SmartFilterField(models.TextField):
|
||||
# https://docs.python.org/2/library/stdtypes.html#truth-value-testing
|
||||
if not value:
|
||||
return None
|
||||
value = urllib.unquote(value)
|
||||
try:
|
||||
SmartFilter().query_from_string(value)
|
||||
except RuntimeError, e:
|
||||
except RuntimeError as e:
|
||||
raise models.base.ValidationError(e)
|
||||
return super(SmartFilterField, self).get_prep_value(value)
|
||||
|
||||
@@ -504,6 +506,12 @@ class CredentialInputField(JSONSchemaField):
|
||||
v != '$encrypted$',
|
||||
model_instance.pk
|
||||
]):
|
||||
if not isinstance(getattr(model_instance, k), six.string_types):
|
||||
raise django_exceptions.ValidationError(
|
||||
_('secret values must be of type string, not {}').format(type(v).__name__),
|
||||
code='invalid',
|
||||
params={'value': v},
|
||||
)
|
||||
decrypted_values[k] = utils.decrypt_field(model_instance, k)
|
||||
else:
|
||||
decrypted_values[k] = v
|
||||
@@ -693,11 +701,10 @@ class CredentialTypeInjectorField(JSONSchemaField):
|
||||
'properties': {
|
||||
'file': {
|
||||
'type': 'object',
|
||||
'properties': {
|
||||
'template': {'type': 'string'},
|
||||
'patternProperties': {
|
||||
'^template(\.[a-zA-Z_]+[a-zA-Z0-9_]*)?$': {'type': 'string'},
|
||||
},
|
||||
'additionalProperties': False,
|
||||
'required': ['template'],
|
||||
},
|
||||
'env': {
|
||||
'type': 'object',
|
||||
@@ -747,8 +754,22 @@ class CredentialTypeInjectorField(JSONSchemaField):
|
||||
|
||||
class TowerNamespace:
|
||||
filename = None
|
||||
|
||||
valid_namespace['tower'] = TowerNamespace()
|
||||
|
||||
# ensure either single file or multi-file syntax is used (but not both)
|
||||
template_names = [x for x in value.get('file', {}).keys() if x.startswith('template')]
|
||||
if 'template' in template_names and len(template_names) > 1:
|
||||
raise django_exceptions.ValidationError(
|
||||
_('Must use multi-file syntax when injecting multiple files'),
|
||||
code='invalid',
|
||||
params={'value': value},
|
||||
)
|
||||
if 'template' not in template_names:
|
||||
valid_namespace['tower'].filename = TowerNamespace()
|
||||
for template_name in template_names:
|
||||
template_name = template_name.split('.')[1]
|
||||
setattr(valid_namespace['tower'].filename, template_name, 'EXAMPLE')
|
||||
|
||||
for type_, injector in value.items():
|
||||
for key, tmpl in injector.items():
|
||||
try:
|
||||
|
||||
@@ -5,6 +5,8 @@
|
||||
import datetime
|
||||
import logging
|
||||
|
||||
import six
|
||||
|
||||
# Django
|
||||
from django.core.management.base import BaseCommand
|
||||
from django.utils.timezone import now
|
||||
@@ -41,7 +43,7 @@ class Command(BaseCommand):
|
||||
n_deleted_items = 0
|
||||
pks_to_delete = set()
|
||||
for asobj in ActivityStream.objects.iterator():
|
||||
asobj_disp = '"%s" id: %s' % (unicode(asobj), asobj.id)
|
||||
asobj_disp = '"%s" id: %s' % (six.text_type(asobj), asobj.id)
|
||||
if asobj.timestamp >= self.cutoff:
|
||||
if self.dry_run:
|
||||
self.logger.info("would skip %s" % asobj_disp)
|
||||
|
||||
@@ -5,6 +5,8 @@
|
||||
import datetime
|
||||
import logging
|
||||
|
||||
import six
|
||||
|
||||
# Django
|
||||
from django.core.management.base import BaseCommand, CommandError
|
||||
from django.db import transaction
|
||||
@@ -66,7 +68,7 @@ class Command(BaseCommand):
|
||||
jobs = Job.objects.filter(created__lt=self.cutoff)
|
||||
for job in jobs.iterator():
|
||||
job_display = '"%s" (%d host summaries, %d events)' % \
|
||||
(unicode(job),
|
||||
(six.text_type(job),
|
||||
job.job_host_summaries.count(), job.job_events.count())
|
||||
if job.status in ('pending', 'waiting', 'running'):
|
||||
action_text = 'would skip' if self.dry_run else 'skipping'
|
||||
@@ -87,7 +89,7 @@ class Command(BaseCommand):
|
||||
ad_hoc_commands = AdHocCommand.objects.filter(created__lt=self.cutoff)
|
||||
for ad_hoc_command in ad_hoc_commands.iterator():
|
||||
ad_hoc_command_display = '"%s" (%d events)' % \
|
||||
(unicode(ad_hoc_command),
|
||||
(six.text_type(ad_hoc_command),
|
||||
ad_hoc_command.ad_hoc_command_events.count())
|
||||
if ad_hoc_command.status in ('pending', 'waiting', 'running'):
|
||||
action_text = 'would skip' if self.dry_run else 'skipping'
|
||||
@@ -107,7 +109,7 @@ class Command(BaseCommand):
|
||||
skipped, deleted = 0, 0
|
||||
project_updates = ProjectUpdate.objects.filter(created__lt=self.cutoff)
|
||||
for pu in project_updates.iterator():
|
||||
pu_display = '"%s" (type %s)' % (unicode(pu), unicode(pu.launch_type))
|
||||
pu_display = '"%s" (type %s)' % (six.text_type(pu), six.text_type(pu.launch_type))
|
||||
if pu.status in ('pending', 'waiting', 'running'):
|
||||
action_text = 'would skip' if self.dry_run else 'skipping'
|
||||
self.logger.debug('%s %s project update %s', action_text, pu.status, pu_display)
|
||||
@@ -130,7 +132,7 @@ class Command(BaseCommand):
|
||||
skipped, deleted = 0, 0
|
||||
inventory_updates = InventoryUpdate.objects.filter(created__lt=self.cutoff)
|
||||
for iu in inventory_updates.iterator():
|
||||
iu_display = '"%s" (source %s)' % (unicode(iu), unicode(iu.source))
|
||||
iu_display = '"%s" (source %s)' % (six.text_type(iu), six.text_type(iu.source))
|
||||
if iu.status in ('pending', 'waiting', 'running'):
|
||||
action_text = 'would skip' if self.dry_run else 'skipping'
|
||||
self.logger.debug('%s %s inventory update %s', action_text, iu.status, iu_display)
|
||||
@@ -153,7 +155,7 @@ class Command(BaseCommand):
|
||||
skipped, deleted = 0, 0
|
||||
system_jobs = SystemJob.objects.filter(created__lt=self.cutoff)
|
||||
for sj in system_jobs.iterator():
|
||||
sj_display = '"%s" (type %s)' % (unicode(sj), unicode(sj.job_type))
|
||||
sj_display = '"%s" (type %s)' % (six.text_type(sj), six.text_type(sj.job_type))
|
||||
if sj.status in ('pending', 'waiting', 'running'):
|
||||
action_text = 'would skip' if self.dry_run else 'skipping'
|
||||
self.logger.debug('%s %s system_job %s', action_text, sj.status, sj_display)
|
||||
@@ -183,7 +185,7 @@ class Command(BaseCommand):
|
||||
workflow_jobs = WorkflowJob.objects.filter(created__lt=self.cutoff)
|
||||
for workflow_job in workflow_jobs.iterator():
|
||||
workflow_job_display = '"{}" ({} nodes)'.format(
|
||||
unicode(workflow_job),
|
||||
six.text_type(workflow_job),
|
||||
workflow_job.workflow_nodes.count())
|
||||
if workflow_job.status in ('pending', 'waiting', 'running'):
|
||||
action_text = 'would skip' if self.dry_run else 'skipping'
|
||||
@@ -204,7 +206,7 @@ class Command(BaseCommand):
|
||||
notifications = Notification.objects.filter(created__lt=self.cutoff)
|
||||
for notification in notifications.iterator():
|
||||
notification_display = '"{}" (started {}, {} type, {} sent)'.format(
|
||||
unicode(notification), unicode(notification.created),
|
||||
six.text_type(notification), six.text_type(notification.created),
|
||||
notification.notification_type, notification.notifications_sent)
|
||||
if notification.status in ('pending',):
|
||||
action_text = 'would skip' if self.dry_run else 'skipping'
|
||||
@@ -246,4 +248,3 @@ class Command(BaseCommand):
|
||||
self.logger.log(99, '%s: %d would be deleted, %d would be skipped.', m.replace('_', ' '), deleted, skipped)
|
||||
else:
|
||||
self.logger.log(99, '%s: %d deleted, %d skipped.', m.replace('_', ' '), deleted, skipped)
|
||||
|
||||
|
||||
@@ -17,7 +17,7 @@ class Command(BaseCommand):
|
||||
|
||||
def handle(self, *args, **kwargs):
|
||||
if getattr(settings, 'AWX_ISOLATED_PRIVATE_KEY', False):
|
||||
print settings.AWX_ISOLATED_PUBLIC_KEY
|
||||
print(settings.AWX_ISOLATED_PUBLIC_KEY)
|
||||
return
|
||||
|
||||
key = rsa.generate_private_key(
|
||||
@@ -41,4 +41,4 @@ class Command(BaseCommand):
|
||||
) + " generated-by-awx@%s" % datetime.datetime.utcnow().isoformat()
|
||||
)
|
||||
pemfile.save()
|
||||
print pemfile.value
|
||||
print(pemfile.value)
|
||||
|
||||
@@ -173,6 +173,7 @@ class AnsibleInventoryLoader(object):
|
||||
def load(self):
|
||||
base_args = self.get_base_args()
|
||||
logger.info('Reading Ansible inventory source: %s', self.source)
|
||||
|
||||
data = self.command_to_json(base_args + ['--list'])
|
||||
|
||||
# TODO: remove after we run custom scripts through ansible-inventory
|
||||
@@ -225,6 +226,7 @@ def load_inventory_source(source, group_filter_re=None,
|
||||
'''
|
||||
# Sanity check: We sanitize these module names for our API but Ansible proper doesn't follow
|
||||
# good naming conventions
|
||||
source = source.replace('rhv.py', 'ovirt4.py')
|
||||
source = source.replace('satellite6.py', 'foreman.py')
|
||||
source = source.replace('vmware.py', 'vmware_inventory.py')
|
||||
if not os.path.exists(source):
|
||||
@@ -600,27 +602,20 @@ class Command(BaseCommand):
|
||||
|
||||
def _update_inventory(self):
|
||||
'''
|
||||
Update/overwrite variables from "all" group. If importing from a
|
||||
cloud source attached to a specific group, variables will be set on
|
||||
the base group, otherwise they will be set on the whole inventory.
|
||||
Update inventory variables from "all" group.
|
||||
'''
|
||||
# FIXME: figure out how "all" variables are handled in the new inventory source system
|
||||
# TODO: We disable variable overwrite here in case user-defined inventory variables get
|
||||
# mangled. But we still need to figure out a better way of processing multiple inventory
|
||||
# update variables mixing with each other.
|
||||
all_obj = self.inventory
|
||||
all_name = 'inventory'
|
||||
db_variables = all_obj.variables_dict
|
||||
if self.overwrite_vars:
|
||||
db_variables = self.all_group.variables
|
||||
else:
|
||||
db_variables.update(self.all_group.variables)
|
||||
db_variables.update(self.all_group.variables)
|
||||
if db_variables != all_obj.variables_dict:
|
||||
all_obj.variables = json.dumps(db_variables)
|
||||
all_obj.save(update_fields=['variables'])
|
||||
if self.overwrite_vars:
|
||||
logger.info('%s variables replaced from "all" group', all_name.capitalize())
|
||||
else:
|
||||
logger.info('%s variables updated from "all" group', all_name.capitalize())
|
||||
logger.info('Inventory variables updated from "all" group')
|
||||
else:
|
||||
logger.info('%s variables unmodified', all_name.capitalize())
|
||||
logger.info('Inventory variables unmodified')
|
||||
|
||||
def _create_update_groups(self):
|
||||
'''
|
||||
@@ -909,7 +904,6 @@ class Command(BaseCommand):
|
||||
new_count = Host.objects.active_count()
|
||||
if time_remaining <= 0 and not license_info.get('demo', False):
|
||||
logger.error(LICENSE_EXPIRED_MESSAGE)
|
||||
raise CommandError("License has expired!")
|
||||
if free_instances < 0:
|
||||
d = {
|
||||
'new_count': new_count,
|
||||
@@ -919,7 +913,6 @@ class Command(BaseCommand):
|
||||
logger.error(DEMO_LICENSE_MESSAGE % d)
|
||||
else:
|
||||
logger.error(LICENSE_MESSAGE % d)
|
||||
raise CommandError('License count exceeded!')
|
||||
|
||||
def mark_license_failure(self, save=True):
|
||||
self.inventory_update.license_error = True
|
||||
|
||||
@@ -17,6 +17,10 @@ class Command(BaseCommand):
|
||||
help='Comma-Delimited Hosts to add to the Queue')
|
||||
parser.add_argument('--controller', dest='controller', type=str,
|
||||
default='', help='The controlling group (makes this an isolated group)')
|
||||
parser.add_argument('--instance_percent', dest='instance_percent', type=int, default=0,
|
||||
help='The percentage of active instances that will be assigned to this group'),
|
||||
parser.add_argument('--instance_minimum', dest='instance_minimum', type=int, default=0,
|
||||
help='The minimum number of instance that will be retained for this group from available instances')
|
||||
|
||||
def handle(self, **options):
|
||||
queuename = options.get('queuename')
|
||||
@@ -38,7 +42,9 @@ class Command(BaseCommand):
|
||||
changed = True
|
||||
else:
|
||||
print("Creating instance group {}".format(queuename))
|
||||
ig = InstanceGroup(name=queuename)
|
||||
ig = InstanceGroup(name=queuename,
|
||||
policy_instance_percentage=options.get('instance_percent'),
|
||||
policy_instance_minimum=options.get('instance_minimum'))
|
||||
if control_ig:
|
||||
ig.controller = control_ig
|
||||
ig.save()
|
||||
@@ -60,5 +66,7 @@ class Command(BaseCommand):
|
||||
sys.exit(1)
|
||||
else:
|
||||
print("Instance already registered {}".format(instance[0].hostname))
|
||||
ig.policy_instance_list = instance_list
|
||||
ig.save()
|
||||
if changed:
|
||||
print('(changed: True)')
|
||||
|
||||
@@ -12,11 +12,17 @@ from awx.main.models import (
|
||||
UnifiedJob,
|
||||
Job,
|
||||
AdHocCommand,
|
||||
ProjectUpdate,
|
||||
InventoryUpdate,
|
||||
SystemJob
|
||||
)
|
||||
from awx.main.consumers import emit_channel_notification
|
||||
from awx.api.serializers import (
|
||||
JobEventWebSocketSerializer,
|
||||
AdHocCommandEventWebSocketSerializer,
|
||||
ProjectUpdateEventWebSocketSerializer,
|
||||
InventoryUpdateEventWebSocketSerializer,
|
||||
SystemJobEventWebSocketSerializer
|
||||
)
|
||||
|
||||
|
||||
@@ -60,7 +66,16 @@ class ReplayJobEvents():
|
||||
return self.replay_elapsed().total_seconds() - (self.recording_elapsed(created).total_seconds() * (1.0 / speed))
|
||||
|
||||
def get_job_events(self, job):
|
||||
job_events = job.job_events.order_by('created')
|
||||
if type(job) is Job:
|
||||
job_events = job.job_events.order_by('created')
|
||||
elif type(job) is AdHocCommand:
|
||||
job_events = job.ad_hoc_command_events.order_by('created')
|
||||
elif type(job) is ProjectUpdate:
|
||||
job_events = job.project_update_events.order_by('created')
|
||||
elif type(job) is InventoryUpdate:
|
||||
job_events = job.inventory_update_events.order_by('created')
|
||||
elif type(job) is SystemJob:
|
||||
job_events = job.system_job_events.order_by('created')
|
||||
if job_events.count() == 0:
|
||||
raise RuntimeError("No events for job id {}".format(job.id))
|
||||
return job_events
|
||||
@@ -70,6 +85,12 @@ class ReplayJobEvents():
|
||||
return JobEventWebSocketSerializer
|
||||
elif type(job) is AdHocCommand:
|
||||
return AdHocCommandEventWebSocketSerializer
|
||||
elif type(job) is ProjectUpdate:
|
||||
return ProjectUpdateEventWebSocketSerializer
|
||||
elif type(job) is InventoryUpdate:
|
||||
return InventoryUpdateEventWebSocketSerializer
|
||||
elif type(job) is SystemJob:
|
||||
return SystemJobEventWebSocketSerializer
|
||||
else:
|
||||
raise RuntimeError("Job is of type {} and replay is not yet supported.".format(type(job)))
|
||||
sys.exit(1)
|
||||
|
||||
@@ -3,13 +3,14 @@
|
||||
|
||||
# Python
|
||||
import logging
|
||||
import os
|
||||
import signal
|
||||
import time
|
||||
from uuid import UUID
|
||||
from multiprocessing import Process
|
||||
from multiprocessing import Queue as MPQueue
|
||||
from Queue import Empty as QueueEmpty
|
||||
from Queue import Full as QueueFull
|
||||
import os
|
||||
|
||||
from kombu import Connection, Exchange, Queue
|
||||
from kombu.mixins import ConsumerMixin
|
||||
@@ -18,11 +19,13 @@ from kombu.mixins import ConsumerMixin
|
||||
from django.conf import settings
|
||||
from django.core.management.base import BaseCommand
|
||||
from django.db import connection as django_connection
|
||||
from django.db import DatabaseError
|
||||
from django.db import DatabaseError, OperationalError
|
||||
from django.db.utils import InterfaceError, InternalError
|
||||
from django.core.cache import cache as django_cache
|
||||
|
||||
# AWX
|
||||
from awx.main.models import * # noqa
|
||||
from awx.main.consumers import emit_channel_notification
|
||||
|
||||
logger = logging.getLogger('awx.main.commands.run_callback_receiver')
|
||||
|
||||
@@ -39,6 +42,9 @@ class WorkerSignalHandler:
|
||||
|
||||
|
||||
class CallbackBrokerWorker(ConsumerMixin):
|
||||
|
||||
MAX_RETRIES = 2
|
||||
|
||||
def __init__(self, connection, use_workers=True):
|
||||
self.connection = connection
|
||||
self.worker_queues = []
|
||||
@@ -123,8 +129,17 @@ class CallbackBrokerWorker(ConsumerMixin):
|
||||
logger.error("Exception on worker thread, restarting: " + str(e))
|
||||
continue
|
||||
try:
|
||||
if 'job_id' not in body and 'ad_hoc_command_id' not in body:
|
||||
raise Exception('Payload does not have a job_id or ad_hoc_command_id')
|
||||
|
||||
event_map = {
|
||||
'job_id': JobEvent,
|
||||
'ad_hoc_command_id': AdHocCommandEvent,
|
||||
'project_update_id': ProjectUpdateEvent,
|
||||
'inventory_update_id': InventoryUpdateEvent,
|
||||
'system_job_id': SystemJobEvent,
|
||||
}
|
||||
|
||||
if not any([key in body for key in event_map]):
|
||||
raise Exception('Payload does not have a job identifier')
|
||||
if settings.DEBUG:
|
||||
from pygments import highlight
|
||||
from pygments.lexers import PythonLexer
|
||||
@@ -132,14 +147,51 @@ class CallbackBrokerWorker(ConsumerMixin):
|
||||
from pprint import pformat
|
||||
logger.info('Body: {}'.format(
|
||||
highlight(pformat(body, width=160), PythonLexer(), Terminal256Formatter(style='friendly'))
|
||||
))
|
||||
try:
|
||||
if 'job_id' in body:
|
||||
JobEvent.create_from_data(**body)
|
||||
elif 'ad_hoc_command_id' in body:
|
||||
AdHocCommandEvent.create_from_data(**body)
|
||||
except DatabaseError as e:
|
||||
logger.error('Database Error Saving Job Event: {}'.format(e))
|
||||
)[:1024 * 4])
|
||||
|
||||
def _save_event_data():
|
||||
for key, cls in event_map.items():
|
||||
if key in body:
|
||||
cls.create_from_data(**body)
|
||||
|
||||
job_identifier = 'unknown job'
|
||||
for key in event_map.keys():
|
||||
if key in body:
|
||||
job_identifier = body[key]
|
||||
break
|
||||
|
||||
if body.get('event') == 'EOF':
|
||||
# EOF events are sent when stdout for the running task is
|
||||
# closed. don't actually persist them to the database; we
|
||||
# just use them to report `summary` websocket events as an
|
||||
# approximation for when a job is "done"
|
||||
emit_channel_notification(
|
||||
'jobs-summary',
|
||||
dict(group_name='jobs', unified_job_id=job_identifier)
|
||||
)
|
||||
continue
|
||||
|
||||
retries = 0
|
||||
while retries <= self.MAX_RETRIES:
|
||||
try:
|
||||
_save_event_data()
|
||||
break
|
||||
except (OperationalError, InterfaceError, InternalError) as e:
|
||||
if retries >= self.MAX_RETRIES:
|
||||
logger.exception('Worker could not re-establish database connectivity, shutting down gracefully: Job {}'.format(job_identifier))
|
||||
os.kill(os.getppid(), signal.SIGINT)
|
||||
return
|
||||
delay = 60 * retries
|
||||
logger.exception('Database Error Saving Job Event, retry #{i} in {delay} seconds:'.format(
|
||||
i=retries + 1,
|
||||
delay=delay
|
||||
))
|
||||
django_connection.close()
|
||||
time.sleep(delay)
|
||||
retries += 1
|
||||
except DatabaseError as e:
|
||||
logger.exception('Database Error Saving Job Event for Job {}'.format(job_identifier))
|
||||
break
|
||||
except Exception as exc:
|
||||
import traceback
|
||||
tb = traceback.format_exc()
|
||||
|
||||
49
awx/main/management/commands/test_isolated_connection.py
Normal file
49
awx/main/management/commands/test_isolated_connection.py
Normal file
@@ -0,0 +1,49 @@
|
||||
import os
|
||||
import shutil
|
||||
import subprocess
|
||||
import sys
|
||||
import tempfile
|
||||
from optparse import make_option
|
||||
|
||||
from django.conf import settings
|
||||
from django.core.management.base import BaseCommand, CommandError
|
||||
|
||||
from awx.main.expect import run
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
"""Tests SSH connectivity between a controller and target isolated node"""
|
||||
help = 'Tests SSH connectivity between a controller and target isolated node'
|
||||
|
||||
option_list = BaseCommand.option_list + (
|
||||
make_option('--hostname', dest='hostname', type='string',
|
||||
help='Hostname of an isolated node'),
|
||||
)
|
||||
|
||||
def handle(self, *args, **options):
|
||||
hostname = options.get('hostname')
|
||||
if not hostname:
|
||||
raise CommandError("--hostname is a required argument")
|
||||
|
||||
try:
|
||||
path = tempfile.mkdtemp(prefix='awx_isolated_ssh', dir=settings.AWX_PROOT_BASE_PATH)
|
||||
args = [
|
||||
'ansible', 'all', '-i', '{},'.format(hostname), '-u',
|
||||
settings.AWX_ISOLATED_USERNAME, '-T5', '-m', 'shell',
|
||||
'-a', 'hostname', '-vvv'
|
||||
]
|
||||
if all([
|
||||
getattr(settings, 'AWX_ISOLATED_KEY_GENERATION', False) is True,
|
||||
getattr(settings, 'AWX_ISOLATED_PRIVATE_KEY', None)
|
||||
]):
|
||||
ssh_key_path = os.path.join(path, '.isolated')
|
||||
ssh_auth_sock = os.path.join(path, 'ssh_auth.sock')
|
||||
run.open_fifo_write(ssh_key_path, settings.AWX_ISOLATED_PRIVATE_KEY)
|
||||
args = run.wrap_args_with_ssh_agent(args, ssh_key_path, ssh_auth_sock)
|
||||
try:
|
||||
print(' '.join(args))
|
||||
subprocess.check_call(args)
|
||||
except subprocess.CalledProcessError as e:
|
||||
sys.exit(e.returncode)
|
||||
finally:
|
||||
shutil.rmtree(path)
|
||||
@@ -2,12 +2,9 @@
|
||||
# All Rights Reserved.
|
||||
|
||||
import sys
|
||||
from datetime import timedelta
|
||||
import logging
|
||||
|
||||
from django.db import models
|
||||
from django.utils.timezone import now
|
||||
from django.db.models import Sum
|
||||
from django.conf import settings
|
||||
|
||||
from awx.main.utils.filters import SmartFilter
|
||||
@@ -21,11 +18,15 @@ class HostManager(models.Manager):
|
||||
"""Custom manager class for Hosts model."""
|
||||
|
||||
def active_count(self):
|
||||
"""Return count of active, unique hosts for licensing."""
|
||||
try:
|
||||
return self.order_by('name').distinct('name').count()
|
||||
except NotImplementedError: # For unit tests only, SQLite doesn't support distinct('name')
|
||||
return len(set(self.values_list('name', flat=True)))
|
||||
"""Return count of active, unique hosts for licensing.
|
||||
Construction of query involves:
|
||||
- remove any ordering specified in model's Meta
|
||||
- Exclude hosts sourced from another Tower
|
||||
- Restrict the query to only return the name column
|
||||
- Only consider results that are unique
|
||||
- Return the count of this query
|
||||
"""
|
||||
return self.order_by().exclude(inventory_sources__source='tower').values('name').distinct().count()
|
||||
|
||||
def get_queryset(self):
|
||||
"""When the parent instance of the host query set has a `kind=smart` and a `host_filter`
|
||||
@@ -89,11 +90,6 @@ class InstanceManager(models.Manager):
|
||||
"""Return count of active Tower nodes for licensing."""
|
||||
return self.all().count()
|
||||
|
||||
def total_capacity(self):
|
||||
sumval = self.filter(modified__gte=now() - timedelta(seconds=settings.AWX_ACTIVE_NODE_TIME)) \
|
||||
.aggregate(total_capacity=Sum('capacity'))['total_capacity']
|
||||
return max(50, sumval)
|
||||
|
||||
def my_role(self):
|
||||
# NOTE: TODO: Likely to repurpose this once standalone ramparts are a thing
|
||||
return "tower"
|
||||
|
||||
@@ -5,6 +5,10 @@ import logging
|
||||
import threading
|
||||
import uuid
|
||||
import six
|
||||
import time
|
||||
import cProfile
|
||||
import pstats
|
||||
import os
|
||||
|
||||
from django.conf import settings
|
||||
from django.contrib.auth.models import User
|
||||
@@ -25,6 +29,40 @@ from awx.conf import fields, register
|
||||
|
||||
logger = logging.getLogger('awx.main.middleware')
|
||||
analytics_logger = logging.getLogger('awx.analytics.activity_stream')
|
||||
perf_logger = logging.getLogger('awx.analytics.performance')
|
||||
|
||||
|
||||
class TimingMiddleware(threading.local):
|
||||
|
||||
dest = '/var/lib/awx/profile'
|
||||
|
||||
def process_request(self, request):
|
||||
self.start_time = time.time()
|
||||
if settings.AWX_REQUEST_PROFILE:
|
||||
self.prof = cProfile.Profile()
|
||||
self.prof.enable()
|
||||
|
||||
def process_response(self, request, response):
|
||||
if not hasattr(self, 'start_time'): # some tools may not invoke process_request
|
||||
return response
|
||||
total_time = time.time() - self.start_time
|
||||
response['X-API-Total-Time'] = '%0.3fs' % total_time
|
||||
if settings.AWX_REQUEST_PROFILE:
|
||||
self.prof.disable()
|
||||
cprofile_file = self.save_profile_file(request)
|
||||
response['cprofile_file'] = cprofile_file
|
||||
perf_logger.info('api response times', extra=dict(python_objects=dict(request=request, response=response)))
|
||||
return response
|
||||
|
||||
def save_profile_file(self, request):
|
||||
if not os.path.isdir(self.dest):
|
||||
os.makedirs(self.dest)
|
||||
filename = '%.3fs-%s' % (pstats.Stats(self.prof).total_tt, uuid.uuid4())
|
||||
filepath = os.path.join(self.dest, filename)
|
||||
with open(filepath, 'w') as f:
|
||||
f.write('%s %s\n' % (request.method, request.get_full_path()))
|
||||
pstats.Stats(self.prof, stream=f).sort_stats('cumulative').print_stats()
|
||||
return filepath
|
||||
|
||||
|
||||
class ActivityStreamMiddleware(threading.local):
|
||||
|
||||
@@ -8,14 +8,9 @@ from __future__ import unicode_literals
|
||||
from django.db import migrations, models
|
||||
from django.conf import settings
|
||||
import awx.main.fields
|
||||
import jsonfield.fields
|
||||
|
||||
|
||||
def update_dashed_host_variables(apps, schema_editor):
|
||||
Host = apps.get_model('main', 'Host')
|
||||
for host in Host.objects.filter(variables='---'):
|
||||
host.variables = ''
|
||||
host.save()
|
||||
import _squashed
|
||||
from _squashed_30 import SQUASHED_30
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
@@ -27,13 +22,7 @@ class Migration(migrations.Migration):
|
||||
(b'main', '0025_v300_update_rbac_parents'),
|
||||
(b'main', '0026_v300_credential_unique'),
|
||||
(b'main', '0027_v300_team_migrations'),
|
||||
(b'main', '0028_v300_org_team_cascade'),
|
||||
(b'main', '0029_v302_add_ask_skip_tags'),
|
||||
(b'main', '0030_v302_job_survey_passwords'),
|
||||
(b'main', '0031_v302_migrate_survey_passwords'),
|
||||
(b'main', '0032_v302_credential_permissions_update'),
|
||||
(b'main', '0033_v303_v245_host_variable_fix'),]
|
||||
|
||||
(b'main', '0028_v300_org_team_cascade')] + _squashed.replaces(SQUASHED_30, applied=True)
|
||||
|
||||
dependencies = [
|
||||
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
|
||||
@@ -130,27 +119,4 @@ class Migration(migrations.Migration):
|
||||
field=models.ForeignKey(related_name='teams', to='main.Organization'),
|
||||
preserve_default=False,
|
||||
),
|
||||
# add ask skip tags
|
||||
migrations.AddField(
|
||||
model_name='jobtemplate',
|
||||
name='ask_skip_tags_on_launch',
|
||||
field=models.BooleanField(default=False),
|
||||
),
|
||||
# job survery passwords
|
||||
migrations.AddField(
|
||||
model_name='job',
|
||||
name='survey_passwords',
|
||||
field=jsonfield.fields.JSONField(default={}, editable=False, blank=True),
|
||||
),
|
||||
# RBAC credential permission updates
|
||||
migrations.AlterField(
|
||||
model_name='credential',
|
||||
name='admin_role',
|
||||
field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=[b'singleton:system_administrator', b'organization.admin_role'], to='main.Role', null=b'True'),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='credential',
|
||||
name='use_role',
|
||||
field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=[b'admin_role'], to='main.Role', null=b'True'),
|
||||
),
|
||||
]
|
||||
] + _squashed.operations(SQUASHED_30, applied=True)
|
||||
|
||||
@@ -8,6 +8,9 @@ import django.db.models.deletion
|
||||
import awx.main.models.workflow
|
||||
import awx.main.fields
|
||||
|
||||
import _squashed
|
||||
from _squashed_30 import SQUASHED_30
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
@@ -15,11 +18,11 @@ class Migration(migrations.Migration):
|
||||
('main', '0003_squashed_v300_v303_updates'),
|
||||
]
|
||||
|
||||
replaces = [
|
||||
replaces = _squashed.replaces(SQUASHED_30) + [
|
||||
(b'main', '0034_v310_release'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
operations = _squashed.operations(SQUASHED_30) + [
|
||||
# Create ChannelGroup table
|
||||
migrations.CreateModel(
|
||||
name='ChannelGroup',
|
||||
|
||||
@@ -1,7 +1,9 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
from __future__ import unicode_literals
|
||||
from django.db import migrations
|
||||
|
||||
from django.db import migrations, models
|
||||
import _squashed
|
||||
from _squashed_31 import SQUASHED_31
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
@@ -10,28 +12,5 @@ class Migration(migrations.Migration):
|
||||
('main', '0004_squashed_v310_release'),
|
||||
]
|
||||
|
||||
replaces = [
|
||||
(b'main', '0035_v310_remove_tower_settings'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
# Remove Tower settings, these settings are now in separate awx.conf app.
|
||||
migrations.RemoveField(
|
||||
model_name='towersettings',
|
||||
name='user',
|
||||
),
|
||||
migrations.DeleteModel(
|
||||
name='TowerSettings',
|
||||
),
|
||||
|
||||
migrations.AlterField(
|
||||
model_name='project',
|
||||
name='scm_type',
|
||||
field=models.CharField(default=b'', choices=[(b'', 'Manual'), (b'git', 'Git'), (b'hg', 'Mercurial'), (b'svn', 'Subversion'), (b'insights', 'Red Hat Insights')], max_length=8, blank=True, help_text='Specifies the source control system used to store the project.', verbose_name='SCM Type'),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='projectupdate',
|
||||
name='scm_type',
|
||||
field=models.CharField(default=b'', choices=[(b'', 'Manual'), (b'git', 'Git'), (b'hg', 'Mercurial'), (b'svn', 'Subversion'), (b'insights', 'Red Hat Insights')], max_length=8, blank=True, help_text='Specifies the source control system used to store the project.', verbose_name='SCM Type'),
|
||||
),
|
||||
]
|
||||
replaces = _squashed.replaces(SQUASHED_31)
|
||||
operations = _squashed.operations(SQUASHED_31)
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user