mirror of
https://github.com/ansible/awx.git
synced 2026-02-05 11:34:43 -03:30
Compare commits
827 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
ed1bacdc08 | ||
|
|
f39fa35d86 | ||
|
|
9266444b19 | ||
|
|
35230eded1 | ||
|
|
ecacf64c28 | ||
|
|
d01e6ab8b6 | ||
|
|
5653b47aa3 | ||
|
|
7bc3d85913 | ||
|
|
0a8df7fde2 | ||
|
|
b39269c4c2 | ||
|
|
09981c0020 | ||
|
|
81bdbef785 | ||
|
|
3c541a4695 | ||
|
|
5a1ae9b816 | ||
|
|
8c261892ee | ||
|
|
b89d4349c0 | ||
|
|
3e98363811 | ||
|
|
f24289b2ba | ||
|
|
9170c557a7 | ||
|
|
a47b403f8d | ||
|
|
83aa7bfac4 | ||
|
|
db0b2e6cb6 | ||
|
|
f391b7ace4 | ||
|
|
008c9e4320 | ||
|
|
8ddc1c61ef | ||
|
|
0aa6c7b83f | ||
|
|
e43879d44e | ||
|
|
2a6f6111dc | ||
|
|
6b0659d63a | ||
|
|
426e901cdf | ||
|
|
ac55f93cfb | ||
|
|
c32c3db35e | ||
|
|
20a999f846 | ||
|
|
81af34fce3 | ||
|
|
8fed469975 | ||
|
|
c6d4a62263 | ||
|
|
a9b77eb706 | ||
|
|
e642af82cc | ||
|
|
b0a755d7b5 | ||
|
|
6753f1ca35 | ||
|
|
f8d9d5f51a | ||
|
|
bad8c65321 | ||
|
|
6f0c937236 | ||
|
|
55a616cba6 | ||
|
|
87365e5969 | ||
|
|
7e829e3a9d | ||
|
|
b8cba916a5 | ||
|
|
dc96a1730e | ||
|
|
d4983ea10d | ||
|
|
209bdd00a1 | ||
|
|
c4efbd62bc | ||
|
|
287a3bc8d4 | ||
|
|
9fefc26528 | ||
|
|
e2d4ef31fd | ||
|
|
a15e257b9e | ||
|
|
a56370fed5 | ||
|
|
e7ed4811c1 | ||
|
|
9860b38438 | ||
|
|
ef80ecd3b6 | ||
|
|
50290a9063 | ||
|
|
fefa4a8bf4 | ||
|
|
546f88c74d | ||
|
|
afa1fb489c | ||
|
|
3571abb42b | ||
|
|
21425db889 | ||
|
|
cc64657749 | ||
|
|
7300c2ccc1 | ||
|
|
7c596039c5 | ||
|
|
9857c8272e | ||
|
|
797169317c | ||
|
|
67c6591f6f | ||
|
|
15906b7e3c | ||
|
|
fdd2b84804 | ||
|
|
ac3f7d0fac | ||
|
|
09d63b4883 | ||
|
|
b96e33ea50 | ||
|
|
71d23e8c81 | ||
|
|
073feb74cb | ||
|
|
43f19cc94b | ||
|
|
ef312f0030 | ||
|
|
d0fec0f19c | ||
|
|
1e14221625 | ||
|
|
b6a901ac51 | ||
|
|
1af0ee2f8c | ||
|
|
b62ac6fbe4 | ||
|
|
e5aaeedc43 | ||
|
|
fc5c5400cd | ||
|
|
95bead2bb2 | ||
|
|
bcbda23aee | ||
|
|
5a21783013 | ||
|
|
e33604de71 | ||
|
|
c50c63a9ff | ||
|
|
916d91cbc7 | ||
|
|
79bd8b2c72 | ||
|
|
5939116b0a | ||
|
|
6759e60428 | ||
|
|
ef8af79700 | ||
|
|
dbb4d2b011 | ||
|
|
4a28065dbb | ||
|
|
5387846cbb | ||
|
|
6b247f1f24 | ||
|
|
838b793704 | ||
|
|
3cb8c98a41 | ||
|
|
18f254fc28 | ||
|
|
9c6c6ce816 | ||
|
|
6699be95bf | ||
|
|
17cd0595d7 | ||
|
|
0402064c0f | ||
|
|
e33265e12c | ||
|
|
b8c76301de | ||
|
|
51f7907a01 | ||
|
|
1a98cedc0f | ||
|
|
db974d4fd4 | ||
|
|
d6e663eff0 | ||
|
|
ccb40c8c68 | ||
|
|
6eb04de1a7 | ||
|
|
cad5c5e79a | ||
|
|
97472cb91b | ||
|
|
0c63ea0052 | ||
|
|
2b1d2b2976 | ||
|
|
7d51b1cb9d | ||
|
|
52e531625c | ||
|
|
b5db652050 | ||
|
|
e699402115 | ||
|
|
d012f5cd99 | ||
|
|
4a2ca20b60 | ||
|
|
e49dfd6ee2 | ||
|
|
fb414802fa | ||
|
|
00f400e839 | ||
|
|
234e33df0e | ||
|
|
f9b0a3121f | ||
|
|
0afdca3674 | ||
|
|
03cef6fea3 | ||
|
|
7dc0fce1aa | ||
|
|
648d27f28d | ||
|
|
5a5e5bc121 | ||
|
|
aea37654e2 | ||
|
|
2ed97aeb0c | ||
|
|
9431b0b6ff | ||
|
|
a5007ccd41 | ||
|
|
81fc4219ae | ||
|
|
c3c4d79890 | ||
|
|
b01b229fea | ||
|
|
984b7e066d | ||
|
|
67d927121d | ||
|
|
ae06cff991 | ||
|
|
7ea6d7bf4d | ||
|
|
fad4a549d0 | ||
|
|
9365e477c5 | ||
|
|
d0b3cac72a | ||
|
|
de02138dfd | ||
|
|
44f0b003fc | ||
|
|
56aed597b2 | ||
|
|
f33ee03b98 | ||
|
|
69a3b0def6 | ||
|
|
6504972d82 | ||
|
|
4bb2b5768e | ||
|
|
c0a641ed52 | ||
|
|
1e8c89f536 | ||
|
|
54d3412820 | ||
|
|
1690938dfb | ||
|
|
0a9d3d47b9 | ||
|
|
2952b0a0fe | ||
|
|
1d3e8f8b87 | ||
|
|
97c040aaa1 | ||
|
|
818c95501a | ||
|
|
664bdec57f | ||
|
|
92068930a6 | ||
|
|
d07a946183 | ||
|
|
9d58b15135 | ||
|
|
a0038276a4 | ||
|
|
f0ff6ecb0a | ||
|
|
60743d6ba6 | ||
|
|
4707b5e020 | ||
|
|
ed7d7fcf00 | ||
|
|
6c2a7f3782 | ||
|
|
47875c5f9a | ||
|
|
f28f7c6184 | ||
|
|
1494c8395b | ||
|
|
2691e1d707 | ||
|
|
6d413bd412 | ||
|
|
54bf7e13d8 | ||
|
|
c6b6a3ad89 | ||
|
|
2bd656e61d | ||
|
|
35b8e40d3c | ||
|
|
c4d901bf2c | ||
|
|
1369f72885 | ||
|
|
0b30e7907b | ||
|
|
fc94b3a943 | ||
|
|
fde9099198 | ||
|
|
815cd829e0 | ||
|
|
28c612ae9c | ||
|
|
d6ed6a856d | ||
|
|
706b370f7e | ||
|
|
80a2d10742 | ||
|
|
f7259a1e78 | ||
|
|
08570fe785 | ||
|
|
987cdc6802 | ||
|
|
6e27294e2b | ||
|
|
3439ba5f3b | ||
|
|
c8e10adc96 | ||
|
|
7e261b5246 | ||
|
|
1e1839915d | ||
|
|
74bf058d62 | ||
|
|
5ec537bad2 | ||
|
|
568901af74 | ||
|
|
c2e9926330 | ||
|
|
c4ccfa1b27 | ||
|
|
478bcc0b07 | ||
|
|
0bb9c58e25 | ||
|
|
9c783aa0ce | ||
|
|
526391a072 | ||
|
|
98f8faa349 | ||
|
|
8a2a5b0fb1 | ||
|
|
07cfa6cba5 | ||
|
|
e188692acf | ||
|
|
ad70754b6a | ||
|
|
9fb24f1a4c | ||
|
|
aefa30e1e9 | ||
|
|
7eb2d86890 | ||
|
|
2fb0144914 | ||
|
|
e3a731bb9e | ||
|
|
451e9a7504 | ||
|
|
8311acfba2 | ||
|
|
77a1c405a6 | ||
|
|
1b0bca8229 | ||
|
|
bd91e8eb54 | ||
|
|
ea4cd99003 | ||
|
|
00ce244716 | ||
|
|
3b791609cd | ||
|
|
a8d4eb7c1d | ||
|
|
d35bfafcf5 | ||
|
|
9f8ef4d1e5 | ||
|
|
a978d094b4 | ||
|
|
47e422ba7a | ||
|
|
4b86815275 | ||
|
|
6c1c850c5f | ||
|
|
f4f1e0fd3c | ||
|
|
ca84e1c654 | ||
|
|
6b6e898882 | ||
|
|
9dbcc5934e | ||
|
|
05bec924e4 | ||
|
|
40d1f2671f | ||
|
|
202161f090 | ||
|
|
7243f871b4 | ||
|
|
2c64a2ce63 | ||
|
|
86eb0353c5 | ||
|
|
282290e151 | ||
|
|
8d348f916b | ||
|
|
659d31324d | ||
|
|
1bc2d83403 | ||
|
|
8c90d36290 | ||
|
|
fac7fd45f8 | ||
|
|
9be438a60a | ||
|
|
c62430c282 | ||
|
|
1be3c77ac6 | ||
|
|
34c206fab0 | ||
|
|
a2f64f1053 | ||
|
|
334d47f3ab | ||
|
|
4adfb9804e | ||
|
|
64ac1ee238 | ||
|
|
0bf06479d5 | ||
|
|
1f8cab4171 | ||
|
|
526bcc4a68 | ||
|
|
9dcdf20fb0 | ||
|
|
be0f66fd94 | ||
|
|
2135291f35 | ||
|
|
a9aae91634 | ||
|
|
4724b6a3d6 | ||
|
|
ce94ba4c83 | ||
|
|
905ff7dad7 | ||
|
|
e59a724efa | ||
|
|
1c8217936d | ||
|
|
72a8854c27 | ||
|
|
98df442ced | ||
|
|
5ada021a6e | ||
|
|
34a8e0a9b6 | ||
|
|
cd8a4b4669 | ||
|
|
0dc4fa975b | ||
|
|
1fb890f4eb | ||
|
|
7fc896e183 | ||
|
|
da0b686369 | ||
|
|
9488105381 | ||
|
|
ec14ae1930 | ||
|
|
15e8fd5eca | ||
|
|
e1e225d6a0 | ||
|
|
3ad174b15b | ||
|
|
b5644ed65b | ||
|
|
06e751fea1 | ||
|
|
fe93ef5488 | ||
|
|
13d84b8d35 | ||
|
|
9b05a41eec | ||
|
|
9275b024de | ||
|
|
4f8d4994cf | ||
|
|
a3144ee234 | ||
|
|
7fe22e9c53 | ||
|
|
42d8368596 | ||
|
|
2c12f1b66e | ||
|
|
eecf997856 | ||
|
|
33dedc88c8 | ||
|
|
759867c863 | ||
|
|
d4613d448c | ||
|
|
21bdea05a0 | ||
|
|
dbd68c5747 | ||
|
|
a3071c2a1f | ||
|
|
d23d7c422d | ||
|
|
4b793dc58a | ||
|
|
112757e202 | ||
|
|
12380fe1b1 | ||
|
|
cf0cc2e2f2 | ||
|
|
b987b7daa0 | ||
|
|
6c7851b51f | ||
|
|
1ff0591553 | ||
|
|
58ad214dcf | ||
|
|
a71cee9300 | ||
|
|
1057b93570 | ||
|
|
e0edfeac7c | ||
|
|
47f45bf9b3 | ||
|
|
8d162f9044 | ||
|
|
6269b43456 | ||
|
|
67867cf0c8 | ||
|
|
7538b4ce15 | ||
|
|
e7918ad637 | ||
|
|
8c6a1e348d | ||
|
|
dfc154ed95 | ||
|
|
a1f8f65add | ||
|
|
3cd80ef67a | ||
|
|
f3310236e4 | ||
|
|
ed28faa3db | ||
|
|
fde5a8850d | ||
|
|
c359c072c4 | ||
|
|
ee0aa40542 | ||
|
|
81f2184aa7 | ||
|
|
96c66b1e20 | ||
|
|
dbb9ffbaf4 | ||
|
|
06a7c024fe | ||
|
|
1229a10f35 | ||
|
|
f15b1ae549 | ||
|
|
71fea2e360 | ||
|
|
5baa371739 | ||
|
|
cc8b5bc808 | ||
|
|
53c6248a6d | ||
|
|
fc4b02b79f | ||
|
|
a3dd9eb4b7 | ||
|
|
c4bc310271 | ||
|
|
079abc162f | ||
|
|
d773d163f7 | ||
|
|
1899795d08 | ||
|
|
43c58b5bf5 | ||
|
|
68ada92f3b | ||
|
|
2c06bfc9ce | ||
|
|
4c43afda19 | ||
|
|
5602b5d2d7 | ||
|
|
032318494b | ||
|
|
40c22dcec8 | ||
|
|
04f682bf7a | ||
|
|
070a12a10c | ||
|
|
91cc4689c9 | ||
|
|
febfcf709d | ||
|
|
53460db4d7 | ||
|
|
37a44c439e | ||
|
|
6609f38fa2 | ||
|
|
8f5be46d52 | ||
|
|
cf1d5a29f6 | ||
|
|
1425021106 | ||
|
|
3866dcaaae | ||
|
|
7b42316366 | ||
|
|
8cede51bac | ||
|
|
a880f47925 | ||
|
|
383c3cfe3e | ||
|
|
32fcb84cf6 | ||
|
|
ce9d75c2e4 | ||
|
|
26845642f0 | ||
|
|
34195a1b35 | ||
|
|
6fa0d9d4ed | ||
|
|
c723ba5289 | ||
|
|
3ff9fa9931 | ||
|
|
7accac2f63 | ||
|
|
044c047ac6 | ||
|
|
3202e77b57 | ||
|
|
a858093db8 | ||
|
|
5a2ecd25e7 | ||
|
|
6c89935521 | ||
|
|
86a559caef | ||
|
|
33ff10728d | ||
|
|
ff1f322c88 | ||
|
|
d3da899459 | ||
|
|
9fe524cd20 | ||
|
|
1481a62b23 | ||
|
|
ce6d96feda | ||
|
|
6c57a3bb68 | ||
|
|
0641c6b0a6 | ||
|
|
565b0b82dd | ||
|
|
4ea27e0d1b | ||
|
|
79c196fc08 | ||
|
|
249a5e5e4d | ||
|
|
51c73cb357 | ||
|
|
98f2d936d9 | ||
|
|
8d35b71321 | ||
|
|
a80d5b1b39 | ||
|
|
e5d86419c8 | ||
|
|
54a98ff612 | ||
|
|
9b5371f2ab | ||
|
|
e7077185bf | ||
|
|
4187d02b8a | ||
|
|
457359322f | ||
|
|
8a65c6e1c8 | ||
|
|
c4e6fc23fc | ||
|
|
fb29f68efc | ||
|
|
71127c039d | ||
|
|
1fcddba558 | ||
|
|
127da5525c | ||
|
|
0f52ab47a0 | ||
|
|
b06a508ceb | ||
|
|
8cb5ce8307 | ||
|
|
c1aa4129f9 | ||
|
|
d6b10b7f44 | ||
|
|
e2aa9dc599 | ||
|
|
a043369d07 | ||
|
|
03eca250d9 | ||
|
|
65d01d508b | ||
|
|
e20599d7bb | ||
|
|
9288b53015 | ||
|
|
82be0a8af2 | ||
|
|
35c374fc79 | ||
|
|
dbe135991b | ||
|
|
3a2ec25fb4 | ||
|
|
fa09d68603 | ||
|
|
eb140d9e69 | ||
|
|
64f89b3fce | ||
|
|
aaaae87aa7 | ||
|
|
5852c16ba6 | ||
|
|
ebd8941439 | ||
|
|
32cb18fc85 | ||
|
|
aeb8eb3d1e | ||
|
|
6654cc35f7 | ||
|
|
28ce9b700e | ||
|
|
0558bd82bb | ||
|
|
f887aaa71f | ||
|
|
69ada03b7b | ||
|
|
ee6beae50a | ||
|
|
799feac0e1 | ||
|
|
0d86678a44 | ||
|
|
38f893c124 | ||
|
|
a2b444f179 | ||
|
|
f46bacdeaa | ||
|
|
9ee77a95c6 | ||
|
|
93b80307db | ||
|
|
0a883edd4d | ||
|
|
4cd9556f7b | ||
|
|
9ed2a0da8f | ||
|
|
7eac219eae | ||
|
|
805170ffd7 | ||
|
|
d696f6c3f6 | ||
|
|
3cdeb446c4 | ||
|
|
58737a8e28 | ||
|
|
2fb74f5b02 | ||
|
|
768a3f62f1 | ||
|
|
b03a64dd53 | ||
|
|
44a2d7a346 | ||
|
|
386382c456 | ||
|
|
d9f8f7721a | ||
|
|
d2711f4af0 | ||
|
|
77fd7ea4a8 | ||
|
|
be00b1ca96 | ||
|
|
faa5a5e024 | ||
|
|
798d27c2cb | ||
|
|
5b4dc9e7ee | ||
|
|
33574d70c8 | ||
|
|
f118e27047 | ||
|
|
2ab33467d8 | ||
|
|
42a6757a10 | ||
|
|
aa38c1123c | ||
|
|
5fcff09aae | ||
|
|
bc705ad8ce | ||
|
|
5e2ecda413 | ||
|
|
25dc3f8778 | ||
|
|
2957f5bc7f | ||
|
|
78961c8037 | ||
|
|
8713e38c44 | ||
|
|
96904968d8 | ||
|
|
6d6bbbb627 | ||
|
|
14c5123fda | ||
|
|
de376292ba | ||
|
|
8faf588775 | ||
|
|
e8fd40ace0 | ||
|
|
a2b18a9f6e | ||
|
|
8f6289707b | ||
|
|
4cd2f93c31 | ||
|
|
e22486ada8 | ||
|
|
0051da95c9 | ||
|
|
122142c040 | ||
|
|
91ad0a9f89 | ||
|
|
79f450df8e | ||
|
|
aab66b8ce8 | ||
|
|
0afe94c4d4 | ||
|
|
6ea3ecbb26 | ||
|
|
6c1919273b | ||
|
|
1ed3a8f0e9 | ||
|
|
7dc30ab866 | ||
|
|
8f82fc26a2 | ||
|
|
e87dce023b | ||
|
|
89a05e9bbc | ||
|
|
74c9b9cf6a | ||
|
|
96fbc9ea27 | ||
|
|
e70d377a53 | ||
|
|
f65ef9f75c | ||
|
|
632ff959ff | ||
|
|
19d093f7aa | ||
|
|
7149c41804 | ||
|
|
1a5b5c32b8 | ||
|
|
1b44ca8ef4 | ||
|
|
d7f4707044 | ||
|
|
270a41443c | ||
|
|
9d39ac83f9 | ||
|
|
ce393da6fd | ||
|
|
2f86774006 | ||
|
|
8666512d99 | ||
|
|
e2c63c41e7 | ||
|
|
c827e73dac | ||
|
|
f9685717b8 | ||
|
|
47a3ba9bd5 | ||
|
|
af3e6f792c | ||
|
|
fc56a1c170 | ||
|
|
84fb908261 | ||
|
|
cb4a38d7a7 | ||
|
|
9518c38bb8 | ||
|
|
5e37d6ea7e | ||
|
|
b70f7bd866 | ||
|
|
77e11fe8fe | ||
|
|
93f35b037d | ||
|
|
d056cb22ef | ||
|
|
4883876dc5 | ||
|
|
863b5e2e8e | ||
|
|
0f8e073d10 | ||
|
|
54e76b2534 | ||
|
|
0579db1162 | ||
|
|
7f20118d48 | ||
|
|
89d0f90e27 | ||
|
|
41c84b4652 | ||
|
|
0ae9283fba | ||
|
|
f1813c35ed | ||
|
|
0c5978715e | ||
|
|
5c1a6b7d6d | ||
|
|
84c439b774 | ||
|
|
655759a5fc | ||
|
|
6c85902ce8 | ||
|
|
ef0c2086eb | ||
|
|
ffb148aaa9 | ||
|
|
b8ed41fa82 | ||
|
|
fbd03287ea | ||
|
|
7919433288 | ||
|
|
bf281f6ea9 | ||
|
|
3568be84c8 | ||
|
|
8d2ab3de42 | ||
|
|
641897713f | ||
|
|
4c4cbaef9f | ||
|
|
aef224732c | ||
|
|
b0c1be7338 | ||
|
|
14a3a6073e | ||
|
|
d7ae95684c | ||
|
|
8b39b3b41a | ||
|
|
fc7c2117e9 | ||
|
|
0876d7825c | ||
|
|
3953366a9e | ||
|
|
d7f5ef6564 | ||
|
|
63cf681369 | ||
|
|
962de13965 | ||
|
|
7211ff22df | ||
|
|
003d7f0915 | ||
|
|
f019452207 | ||
|
|
c323a2393a | ||
|
|
85be3c7692 | ||
|
|
5f3ebc26e0 | ||
|
|
d282966aa1 | ||
|
|
71e132ce0f | ||
|
|
d6d84e8f5e | ||
|
|
a0f1c8fc7c | ||
|
|
4fbfddaa93 | ||
|
|
bc7793def1 | ||
|
|
fdc7f58bb4 | ||
|
|
6c597ad165 | ||
|
|
48ec69c4f5 | ||
|
|
4e16b19ae6 | ||
|
|
1ea3d55167 | ||
|
|
b4a446dba0 | ||
|
|
7181bd1c9b | ||
|
|
9e8ac3b09b | ||
|
|
e24e1fc1f0 | ||
|
|
f28b48a473 | ||
|
|
4f58537949 | ||
|
|
0512f65c8f | ||
|
|
641b18fe13 | ||
|
|
c680327ec3 | ||
|
|
e5d2eb9f3d | ||
|
|
da25f4104c | ||
|
|
871dc81da3 | ||
|
|
947bdeed3e | ||
|
|
1285e8ffef | ||
|
|
a8947c3b96 | ||
|
|
565d116955 | ||
|
|
1fe9f43690 | ||
|
|
4a522fd10f | ||
|
|
5e349590fd | ||
|
|
625c0ad578 | ||
|
|
d3a7bec674 | ||
|
|
652facba9f | ||
|
|
b1ef7506ea | ||
|
|
c95d7d465a | ||
|
|
3800a16f3e | ||
|
|
d70a0c8c24 | ||
|
|
e999b35c42 | ||
|
|
70919638ba | ||
|
|
6ea48cd73e | ||
|
|
63ca8e4134 | ||
|
|
725cc469cf | ||
|
|
553e81f888 | ||
|
|
665a4d83e3 | ||
|
|
018514d657 | ||
|
|
73ece87e68 | ||
|
|
90f63774f4 | ||
|
|
17aecc17d2 | ||
|
|
9157f53d43 | ||
|
|
9bb696aa6e | ||
|
|
32da686724 | ||
|
|
cee81e9df6 | ||
|
|
b544922da1 | ||
|
|
b3c2f35358 | ||
|
|
7d767f8f63 | ||
|
|
834e6c692c | ||
|
|
fabdab78ef | ||
|
|
71d428433f | ||
|
|
2f689fffbe | ||
|
|
3119d5ed22 | ||
|
|
aab27e9b93 | ||
|
|
b60a30cbd4 | ||
|
|
a313109b15 | ||
|
|
88acd95a72 | ||
|
|
7b36630f47 | ||
|
|
cc5f329d33 | ||
|
|
c3fbb07535 | ||
|
|
31c1e1684d | ||
|
|
083e56d97d | ||
|
|
098a407e25 | ||
|
|
6347db56c5 | ||
|
|
e660879a00 | ||
|
|
5635f5fb49 | ||
|
|
8d043e6f85 | ||
|
|
31602c4b28 | ||
|
|
57cd8adc2d | ||
|
|
c1e20fe7a0 | ||
|
|
b1f5529aa4 | ||
|
|
0497a4ba96 | ||
|
|
275e02a8cf | ||
|
|
887a09d052 | ||
|
|
02af117f51 | ||
|
|
6e2de1b4b0 | ||
|
|
47f743e623 | ||
|
|
4f0fa57a1b | ||
|
|
3d5f301a07 | ||
|
|
f9c991e660 | ||
|
|
6e3f4a7a6e | ||
|
|
07139820cb | ||
|
|
350699eda8 | ||
|
|
764356bf47 | ||
|
|
ea683344f5 | ||
|
|
10a7544d68 | ||
|
|
d3eea5e694 | ||
|
|
8fd9fea113 | ||
|
|
bff13e168a | ||
|
|
470a4b7746 | ||
|
|
38c2ea7025 | ||
|
|
774a3da7f4 | ||
|
|
5f3b4575de | ||
|
|
5895654538 | ||
|
|
59f9967dba | ||
|
|
058475c131 | ||
|
|
b402d9ba6d | ||
|
|
3685cb5517 | ||
|
|
5db478a4a0 | ||
|
|
059347eec3 | ||
|
|
e8dbfa42cf | ||
|
|
3d12e040ed | ||
|
|
fceca3bcae | ||
|
|
4e2cf62e89 | ||
|
|
5e17d72922 | ||
|
|
67df298f21 | ||
|
|
353a9a55c7 | ||
|
|
0ac3598ca5 | ||
|
|
06f06173b0 | ||
|
|
fcd03fb1c2 | ||
|
|
da5e6883d4 | ||
|
|
ef05df9224 | ||
|
|
8b8c0e325f | ||
|
|
5bb06fdb50 | ||
|
|
993fa9290d | ||
|
|
5924571904 | ||
|
|
9cc4520a34 | ||
|
|
62987196cb | ||
|
|
cfa21af432 | ||
|
|
6f1c7ee733 | ||
|
|
2cab6982c1 | ||
|
|
3ede367df4 | ||
|
|
bcd2a8f211 | ||
|
|
ee15db4c7c | ||
|
|
f6bf0ad21f | ||
|
|
ad0e43dc52 | ||
|
|
5287e5c111 | ||
|
|
e19a57c50a | ||
|
|
113d62a95f | ||
|
|
b5899c193a | ||
|
|
817b397d20 | ||
|
|
8b41810189 | ||
|
|
f25ab7c6da | ||
|
|
f03b40aa50 | ||
|
|
9dd4c7aaa3 | ||
|
|
b61fdaf721 | ||
|
|
1603106cb4 | ||
|
|
1454000b91 | ||
|
|
b2e63d5e47 | ||
|
|
e7ede6af4a | ||
|
|
5503d4efb4 | ||
|
|
d4af743805 | ||
|
|
8b395c934c | ||
|
|
ae0855614b | ||
|
|
169cd1a466 | ||
|
|
54640dbca0 | ||
|
|
82f81752e4 | ||
|
|
8b6cc0e323 | ||
|
|
c0996f5fb1 | ||
|
|
3998796bf0 | ||
|
|
eab82f3efa | ||
|
|
9e3d90896b | ||
|
|
e66a1002ee | ||
|
|
82160e2072 | ||
|
|
e814f28039 | ||
|
|
03e58523b2 | ||
|
|
341ef411a4 | ||
|
|
8d19555cf1 | ||
|
|
d23fd0515d | ||
|
|
b9483c28b0 | ||
|
|
6f9fc0c3f8 | ||
|
|
766a088749 | ||
|
|
2b539cab85 | ||
|
|
2fb67a3648 | ||
|
|
64c5e3994e | ||
|
|
7b792926eb | ||
|
|
c067788428 | ||
|
|
b7071a48c2 | ||
|
|
dee4b72303 | ||
|
|
5994a77b84 | ||
|
|
f93506fe2c | ||
|
|
7c86e38b81 | ||
|
|
1c374fba7d | ||
|
|
2cc9e2ca0b | ||
|
|
335dfd564a | ||
|
|
5380d57ce8 | ||
|
|
a01f80db5b | ||
|
|
d7eba47adb | ||
|
|
5fffdec69d | ||
|
|
358ef76529 | ||
|
|
bb628c52ad | ||
|
|
d2e0b26287 | ||
|
|
f2d46baf09 | ||
|
|
c6fdadd7f2 | ||
|
|
cc8b115c6a | ||
|
|
82d05e0a10 | ||
|
|
9978b3f9ad | ||
|
|
4f4af058b3 | ||
|
|
b372cebf8d | ||
|
|
3df8e2beb1 | ||
|
|
c45fbcf2ee | ||
|
|
5efa50788f | ||
|
|
3abbe87e10 | ||
|
|
f26bdb3e96 | ||
|
|
4be4e3db7f | ||
|
|
4ea92f0dcb | ||
|
|
a0cfbb93e9 | ||
|
|
08a784d50c | ||
|
|
9ee18d02c8 | ||
|
|
4fd190e4c8 | ||
|
|
a11e33458f | ||
|
|
84fdfbb898 | ||
|
|
f4a252a331 | ||
|
|
d4fe60756b | ||
|
|
f4ab979b59 | ||
|
|
3d3d79b6b3 | ||
|
|
e06d4d7734 | ||
|
|
ab18a4a440 | ||
|
|
7438062b97 | ||
|
|
4510cd11db | ||
|
|
74f2509482 | ||
|
|
f84e42ed15 | ||
|
|
94b4dabee2 | ||
|
|
94d44e8791 | ||
|
|
d24166bd68 | ||
|
|
62f82e7a7e | ||
|
|
7a21a45781 | ||
|
|
91ec0a4482 | ||
|
|
c8f4320b58 | ||
|
|
71a725c5f8 | ||
|
|
96572fe3d4 | ||
|
|
554a9586c6 | ||
|
|
f41c8cf4f2 | ||
|
|
f2f42c2c8a | ||
|
|
70f8ec78de | ||
|
|
e2c398ade2 | ||
|
|
7365e4a63f | ||
|
|
d4fc4bcd61 | ||
|
|
4237b9ed5c | ||
|
|
2dd2541550 | ||
|
|
edda5e5420 | ||
|
|
721674f0cd | ||
|
|
f97ca9c42f | ||
|
|
8f883d8d43 | ||
|
|
5bcf704b76 | ||
|
|
2818bb5833 | ||
|
|
e8b79cde4a | ||
|
|
81c14ce942 | ||
|
|
eacbeef660 | ||
|
|
02e3f45422 | ||
|
|
00afc87af1 | ||
|
|
e48bebb761 | ||
|
|
4c5ec2fb3a | ||
|
|
cad8710ac7 | ||
|
|
96fd07d0f3 | ||
|
|
692007072d |
2
.github/ISSUE_TEMPLATE.md
vendored
2
.github/ISSUE_TEMPLATE.md
vendored
@@ -14,13 +14,11 @@
|
||||
<!-- Briefly describe the problem. -->
|
||||
|
||||
##### ENVIRONMENT
|
||||
<!--
|
||||
* AWX version: X.Y.Z
|
||||
* AWX install method: openshift, minishift, docker on linux, docker for mac, boot2docker
|
||||
* Ansible version: X.Y.Z
|
||||
* Operating System:
|
||||
* Web Browser:
|
||||
-->
|
||||
|
||||
##### STEPS TO REPRODUCE
|
||||
|
||||
|
||||
2
.gitignore
vendored
2
.gitignore
vendored
@@ -49,7 +49,7 @@ __pycache__
|
||||
/.istanbul.yml
|
||||
**/node_modules/**
|
||||
/tmp
|
||||
npm-debug.log
|
||||
**/npm-debug.log*
|
||||
|
||||
# UI build flag files
|
||||
awx/ui/.deps_built
|
||||
|
||||
@@ -57,7 +57,7 @@ For Linux platforms, refer to the following from Docker:
|
||||
|
||||
> https://docs.docker.com/engine/installation/linux/docker-ce/fedora/
|
||||
|
||||
**Centos**
|
||||
**CentOS**
|
||||
|
||||
> https://docs.docker.com/engine/installation/linux/docker-ce/centos/
|
||||
|
||||
@@ -217,7 +217,7 @@ If you want to start and use the development environment, you'll first need to b
|
||||
(container)# /bootstrap_development.sh
|
||||
```
|
||||
|
||||
The above will do all the setup tasks, including running database migrations, so it amy take a couple minutes.
|
||||
The above will do all the setup tasks, including running database migrations, so it may take a couple minutes.
|
||||
|
||||
Now you can start each service individually, or start all services in a pre-configured tmux session like so:
|
||||
|
||||
@@ -281,7 +281,7 @@ For feature work, take a look at the current [Enhancements](https://github.com/a
|
||||
|
||||
If it has someone assigned to it then that person is the person responsible for working the enhancement. If you feel like you could contribute then reach out to that person.
|
||||
|
||||
Fixing bugs, adding translations, and updating the documentation are always appreciated, so reviewing the backlog of issues is always a good place to start.
|
||||
Fixing bugs, adding translations, and updating the documentation are always appreciated, so reviewing the backlog of issues is always a good place to start. For extra information on debugging tools, see [Debugging](https://github.com/ansible/awx/blob/devel/docs/debugging.md).
|
||||
|
||||
**NOTE**
|
||||
|
||||
@@ -293,7 +293,7 @@ Fixing bugs, adding translations, and updating the documentation are always appr
|
||||
|
||||
## Submitting Pull Requests
|
||||
|
||||
Fixes and Features for AWX will go through the Github pull request process. Submit your pull request (PR) agains the `devel` branch.
|
||||
Fixes and Features for AWX will go through the Github pull request process. Submit your pull request (PR) against the `devel` branch.
|
||||
|
||||
Here are a few things you can do to help the visibility of your change, and increase the likelihood that it will be accepted:
|
||||
|
||||
@@ -312,7 +312,7 @@ It's generally a good idea to discuss features with us first by engaging us in t
|
||||
We like to keep our commit history clean, and will require resubmission of pull requests that contain merge commits. Use `git pull --rebase`, rather than
|
||||
`git pull`, and `git rebase`, rather than `git merge`.
|
||||
|
||||
Sometimes it might take us a while to fully review your PR. We try to keep the `devel` branch in good working order, and so we review requests carefuly. Please be patient.
|
||||
Sometimes it might take us a while to fully review your PR. We try to keep the `devel` branch in good working order, and so we review requests carefully. Please be patient.
|
||||
|
||||
All submitted PRs will have the linter and unit tests run against them, and the status reported in the PR.
|
||||
|
||||
|
||||
147
INSTALL.md
147
INSTALL.md
@@ -8,28 +8,35 @@ This document provides a guide for installing AWX.
|
||||
- [Clone the repo](#clone-the-repo)
|
||||
- [AWX branding](#awx-branding)
|
||||
- [Prerequisites](#prerequisites)
|
||||
- [System Requirements](#system-requirements)
|
||||
- [AWX Tunables](#awx-tunables)
|
||||
- [Choose a deployment platform](#choose-a-deployment-platform)
|
||||
- [Official vs Building Images](#official-vs-building-images)
|
||||
- [OpenShift](#openshift)
|
||||
- [Prerequisites](#prerequisites)
|
||||
- [Prerequisites](#prerequisites-1)
|
||||
- [Deploying to Minishift](#deploying-to-minishift)
|
||||
- [Pre-build steps](#pre-build-steps)
|
||||
- [PostgreSQL](#postgresql)
|
||||
- [Start the build](#start-the-build)
|
||||
- [Post build](#post-build)
|
||||
- [Accessing AWX](#accessing-awx)
|
||||
- [Docker](#docker)
|
||||
- [Kubernetes](#kubernetes)
|
||||
- [Prerequisites](#prerequisites-2)
|
||||
- [Pre-build steps](#pre-build-steps-1)
|
||||
- [Start the build](#start-the-build-1)
|
||||
- [Accessing AWX](#accessing-awx-1)
|
||||
- [SSL Termination](#ssl-termination)
|
||||
- [Docker or Docker Compose](#docker-or-docker-compose)
|
||||
- [Prerequisites](#prerequisites-3)
|
||||
- [Pre-build steps](#pre-build-steps-2)
|
||||
- [Deploying to a remote host](#deploying-to-a-remote-host)
|
||||
- [Inventory variables](#inventory-variables)
|
||||
- [Docker registry](#docker-registry)
|
||||
- [PostgreSQL](#postgresql-1)
|
||||
- [Proxy settings](#proxy-settings)
|
||||
- [Start the build](#start-the-build-1)
|
||||
- [Start the build](#start-the-build-2)
|
||||
- [Post build](#post-build-1)
|
||||
- [Accessing AWX](#accessing-awx-1)
|
||||
- [Accessing AWX](#accessing-awx-2)
|
||||
|
||||
## Getting started
|
||||
|
||||
@@ -49,11 +56,20 @@ To install the assets, clone the `awx-logos` repo so that it is next to your `aw
|
||||
|
||||
Before you can run a deployment, you'll need the following installed in your local environment:
|
||||
|
||||
- [Ansible](http://docs.ansible.com/ansible/latest/intro_installation.html) Requires Version 2.3+
|
||||
- [Ansible](http://docs.ansible.com/ansible/latest/intro_installation.html) Requires Version 2.4+
|
||||
- [Docker](https://docs.docker.com/engine/installation/)
|
||||
- [docker-py](https://github.com/docker/docker-py) Python module
|
||||
- [GNU Make](https://www.gnu.org/software/make/)
|
||||
- [Git](https://git-scm.com/)
|
||||
- [Git](https://git-scm.com/) Requires Version 1.8.4+
|
||||
|
||||
### System Requirements
|
||||
|
||||
The system that runs the AWX service will need to satisfy the following requirements
|
||||
|
||||
- At leasts 4GB of memory
|
||||
- At least 2 cpu cores
|
||||
- At least 20GB of space
|
||||
- Running Docker, Openshift, or Kubernetes
|
||||
|
||||
### AWX Tunables
|
||||
|
||||
@@ -61,11 +77,14 @@ Before you can run a deployment, you'll need the following installed in your loc
|
||||
|
||||
### Choose a deployment platform
|
||||
|
||||
We currently support running AWX as a containerized application using Docker images deployed to either an OpenShift cluster, or a standalone Docker daemon. The remainder of this document will walk you through the process of building the images, and deploying them to either platform.
|
||||
We currently support running AWX as a containerized application using Docker images deployed to either an OpenShift cluster, docker-compose or a standalone Docker daemon. The remainder of this document will walk you through the process of building the images, and deploying them to either platform.
|
||||
|
||||
The [installer](./installer) directory contains an [inventory](./installer/inventory) file, and a playbook, [install.yml](./installer/install.yml). You'll begin by setting variables in the inventory file according to the platform you wish to use, and then you'll start the image build and deployment process by running the playbook.
|
||||
|
||||
In the sections below, you'll find deployment details and instructions for each platform. To deploy to Docker, view the [Docker section](#docker), and for OpenShift, view the [OpenShift section](#openshift).
|
||||
In the sections below, you'll find deployment details and instructions for each platform:
|
||||
- [Docker and Docker Compose](#docker-and-docker-compose)
|
||||
- [OpenShift](#openshift)
|
||||
- [Kubernetes](#kubernetes).
|
||||
|
||||
### Official vs Building Images
|
||||
|
||||
@@ -123,10 +142,6 @@ Before starting the build process, review the [inventory](./installer/inventory)
|
||||
|
||||
> Name of the OpenShift project that will be created, and used as the namespace for the AWX app. Defaults to *awx*.
|
||||
|
||||
*awx_node_port*
|
||||
|
||||
> The web server port running inside the AWX pod. Defaults to *30083*.
|
||||
|
||||
*openshift_user*
|
||||
|
||||
> Username of the OpenShift user that will create the project, and deploy the application. Defaults to *developer*.
|
||||
@@ -134,7 +149,7 @@ Before starting the build process, review the [inventory](./installer/inventory)
|
||||
*docker_registry*
|
||||
|
||||
> IP address and port, or URL, for accessing a registry that the OpenShift cluster can access. Defaults to *172.30.1.1:5000*, the internal registry delivered with Minishift. This is not needed if you are using official hosted images.
|
||||
n
|
||||
|
||||
*docker_registry_repository*
|
||||
|
||||
> Namespace to use when pushing and pulling images to and from the registry. Generally this will match the project name. It defaults to *awx*. This is not needed if you are using official hosted images.
|
||||
@@ -261,16 +276,88 @@ The above example is taken from a Minishift instance. From a web browser, use `h
|
||||
|
||||
Once you access the AWX server, you will be prompted with a login dialog. The default administrator username is `admin`, and the password is `password`.
|
||||
|
||||
## Docker
|
||||
## Kubernetes
|
||||
|
||||
### Prerequisites
|
||||
|
||||
You will need the following installed on the host where AWX will be deployed:
|
||||
A Kubernetes deployment will require you to have access to a Kubernetes cluster as well as the following tools:
|
||||
|
||||
- [Docker](https://docs.docker.com/engine/installation/)
|
||||
- [docker-py](https://github.com/docker/docker-py) Python module
|
||||
- [kubectl](https://kubernetes.io/docs/tasks/tools/install-kubectl/)
|
||||
- [helm](https://docs.helm.sh/using_helm/#quickstart-guide)
|
||||
|
||||
Note: After installing Docker, the Docker service must be started.
|
||||
The installation program will reference `kubectl` directly. `helm` is only necessary if you are letting the installer configure PostgreSQL for you.
|
||||
|
||||
### Pre-build steps
|
||||
|
||||
Before starting the build process, review the [inventory](./installer/inventory) file, and uncomment and provide values for the following variables found in the `[all:vars]` section uncommenting when necessary. Make sure the openshift and standalone docker sections are commented out:
|
||||
|
||||
*kubernetes_context*
|
||||
|
||||
> Prior to running the installer, make sure you've configured the context for the cluster you'll be installing to. This is how the installer knows which cluster to connect to and what authentication to use
|
||||
|
||||
*awx_kubernetes_namespace*
|
||||
|
||||
> Name of the Kubernetes namespace where the AWX resources will be installed. This will be created if it doesn't exist
|
||||
|
||||
*docker_registry_*
|
||||
|
||||
> These settings should be used if building your own base images. You'll need access to an external registry and are responsible for making sure your kube cluster can talk to it and use it. If these are undefined and the dockerhub_ configuration settings are uncommented then the images will be pulled from dockerhub instead
|
||||
|
||||
### Start the build
|
||||
|
||||
After making changes to the `inventory` file use `ansible-playbook` to begin the install
|
||||
|
||||
```bash
|
||||
$ ansible-playbook -i inventory install.yml
|
||||
```
|
||||
|
||||
### Post build
|
||||
|
||||
After the playbook run completes, check the status of the deployment by running `kubectl get pods --namespace awx` (replace awx with the namespace you used):
|
||||
|
||||
```bash
|
||||
# View the running pods, it may take a few minutes for everything to be marked in the Running state
|
||||
$ kubectl get pods --namespace awx
|
||||
NAME READY STATUS RESTARTS AGE
|
||||
awx-2558692395-2r8ss 4/4 Running 0 29s
|
||||
awx-postgresql-355348841-kltkn 1/1 Running 0 1m
|
||||
```
|
||||
|
||||
### Accessing AWX
|
||||
|
||||
The AWX web interface is running in the AWX pod behind the `awx-web-svc` service:
|
||||
|
||||
```bash
|
||||
# View available services
|
||||
$ kubectl get svc --namespace awx
|
||||
NAME TYPE CLUSTER-IP EXTERNAL-IP PORT(S) AGE
|
||||
awx-postgresql ClusterIP 10.7.250.208 <none> 5432/TCP 2m
|
||||
awx-web-svc NodePort 10.7.241.35 <none> 80:30177/TCP 1m
|
||||
```
|
||||
|
||||
The deployment process creates an `Ingress` named `awx-web-svc` also. Some kubernetes cloud providers will automatically handle routing configuration when an Ingress is created others may require that you more explicitly configure it. You can see what kubernetes knows about things with:
|
||||
|
||||
```bash
|
||||
kubectl get ing --namespace awx
|
||||
NAME HOSTS ADDRESS PORTS AGE
|
||||
awx-web-svc * 35.227.x.y 80 3m
|
||||
```
|
||||
|
||||
If your provider is able to allocate an IP Address from the Ingress controller then you can navigate to the address and access the AWX interface. For some providers it can take a few minutes to allocate and make this accessible. For other providers it may require you to manually intervene.
|
||||
|
||||
### SSL Termination
|
||||
|
||||
Unlike Openshift's `Route` the Kubernetes `Ingress` doesn't yet handle SSL termination. As such the default configuration will only expose AWX through HTTP on port 80. You are responsible for configuring SSL support until support is added (either to Kubernetes or AWX itself).
|
||||
|
||||
|
||||
## Docker or Docker-Compose
|
||||
|
||||
### Prerequisites
|
||||
|
||||
- [Docker](https://docs.docker.com/engine/installation/) on the host where AWX will be deployed. After installing Docker, the Docker service must be started (depending on your OS, you may have to add the local user that uses Docker to the ``docker`` group, refer to the documentation for details)
|
||||
- [docker-py](https://github.com/docker/docker-py) Python module.
|
||||
|
||||
If you're installing using Docker Compose, you'll need [Docker Compose](https://docs.docker.com/compose/install/).
|
||||
|
||||
### Pre-build steps
|
||||
|
||||
@@ -313,6 +400,13 @@ Before starting the build process, review the [inventory](./installer/inventory)
|
||||
|
||||
> Provide a port number that can be mapped from the Docker daemon host to the web server running inside the AWX container. Defaults to *80*.
|
||||
|
||||
*use_docker_compose*
|
||||
|
||||
> Switch to ``true`` to use Docker Compose instead of the standalone Docker install.
|
||||
|
||||
*docker_compose_dir*
|
||||
|
||||
When using docker-compose, the `docker-compose.yml` file will be created there (default `/var/lib/awx`).
|
||||
|
||||
#### Docker registry
|
||||
|
||||
@@ -330,6 +424,10 @@ If you wish to tag and push built images to a Docker registry, set the following
|
||||
|
||||
> Username of the user that will push images to the registry. Defaults to *developer*.
|
||||
|
||||
*docker_remove_local_images*
|
||||
|
||||
> Due to the way that the docker_image module behaves, images will not be pushed to a remote repository if they are present locally. Set this to delete local versions of the images that will be pushed to the remote. This will fail if containers are currently running from those images.
|
||||
|
||||
**Note**
|
||||
|
||||
> These settings are ignored if using official images
|
||||
@@ -390,6 +488,8 @@ e240ed8209cd awx_task:1.0.0.8 "/tini -- /bin/sh ..." 2 minutes ago
|
||||
97e196120ab3 postgres:9.6 "docker-entrypoint..." 2 minutes ago Up 2 minutes 5432/tcp postgres
|
||||
```
|
||||
|
||||
If you're deploying using Docker Compose, container names will be prefixed by the name of the folder where the docker-compose.yml file is created (by default, `awx`).
|
||||
|
||||
Immediately after the containers start, the *awx_task* container will perform required setup tasks, including database migrations. These tasks need to complete before the web interface can be accessed. To monitor the progress, you can follow the container's STDOUT by running the following:
|
||||
|
||||
```bash
|
||||
@@ -452,3 +552,14 @@ Added instance awx to tower
|
||||
The AWX web server is accessible on the deployment host, using the *host_port* value set in the *inventory* file. The default URL is [http://localhost](http://localhost).
|
||||
|
||||
You will prompted with a login dialog. The default administrator username is `admin`, and the password is `password`.
|
||||
|
||||
### Maintenance using docker-compose
|
||||
|
||||
After the installation, maintenance operations with docker-compose can be done by using the `docker-compose.yml` file created at the location pointed by `docker_compose_dir`.
|
||||
|
||||
Among the possible operations, you may:
|
||||
|
||||
- Stop AWX : `docker-compose stop`
|
||||
- Upgrade AWX : `docker-compose pull && docker-compose up --force-recreate`
|
||||
|
||||
See the [docker-compose documentation](https://docs.docker.com/compose/) for details.
|
||||
|
||||
@@ -1,4 +1,6 @@
|
||||
recursive-include awx *.py
|
||||
recursive-include awx *.po
|
||||
recursive-include awx *.mo
|
||||
recursive-include awx/static *
|
||||
recursive-include awx/templates *.html
|
||||
recursive-include awx/api/templates *.md *.html
|
||||
|
||||
44
Makefile
44
Makefile
@@ -12,10 +12,10 @@ MANAGEMENT_COMMAND ?= awx-manage
|
||||
IMAGE_REPOSITORY_AUTH ?=
|
||||
IMAGE_REPOSITORY_BASE ?= https://gcr.io
|
||||
|
||||
VERSION=$(shell git describe --long)
|
||||
VERSION3=$(shell git describe --long | sed 's/\-g.*//')
|
||||
VERSION3DOT=$(shell git describe --long | sed 's/\-g.*//' | sed 's/\-/\./')
|
||||
RELEASE_VERSION=$(shell git describe --long | sed 's@\([0-9.]\{1,\}\).*@\1@')
|
||||
VERSION=$(shell git describe --long --first-parent)
|
||||
VERSION3=$(shell git describe --long --first-parent | sed 's/\-g.*//')
|
||||
VERSION3DOT=$(shell git describe --long --first-parent | sed 's/\-g.*//' | sed 's/\-/\./')
|
||||
RELEASE_VERSION=$(shell git describe --long --first-parent | sed 's@\([0-9.]\{1,\}\).*@\1@')
|
||||
|
||||
# NOTE: This defaults the container image version to the branch that's active
|
||||
COMPOSE_TAG ?= $(GIT_BRANCH)
|
||||
@@ -83,7 +83,9 @@ I18N_FLAG_FILE = .i18n_built
|
||||
clean-ui:
|
||||
rm -rf awx/ui/static/
|
||||
rm -rf awx/ui/node_modules/
|
||||
rm -rf awx/ui/coverage/
|
||||
rm -rf awx/ui/test/unit/reports/
|
||||
rm -rf awx/ui/test/spec/reports/
|
||||
rm -rf awx/ui/test/e2e/reports/
|
||||
rm -rf awx/ui/client/languages/
|
||||
rm -f $(UI_DEPS_FLAG_FILE)
|
||||
rm -f $(UI_RELEASE_FLAG_FILE)
|
||||
@@ -201,8 +203,11 @@ develop:
|
||||
fi
|
||||
|
||||
version_file:
|
||||
mkdir -p /var/lib/awx/
|
||||
python -c "import awx as awx; print awx.__version__" > /var/lib/awx/.awx_version
|
||||
mkdir -p /var/lib/awx/; \
|
||||
if [ "$(VENV_BASE)" ]; then \
|
||||
. $(VENV_BASE)/awx/bin/activate; \
|
||||
fi; \
|
||||
python -c "import awx as awx; print awx.__version__" > /var/lib/awx/.awx_version; \
|
||||
|
||||
# Do any one-time init tasks.
|
||||
comma := ,
|
||||
@@ -282,7 +287,7 @@ flower:
|
||||
@if [ "$(VENV_BASE)" ]; then \
|
||||
. $(VENV_BASE)/awx/bin/activate; \
|
||||
fi; \
|
||||
$(PYTHON) manage.py celery flower --address=0.0.0.0 --port=5555 --broker=amqp://guest:guest@$(RABBITMQ_HOST):5672//
|
||||
celery flower --address=0.0.0.0 --port=5555 --broker=amqp://guest:guest@$(RABBITMQ_HOST):5672//
|
||||
|
||||
collectstatic:
|
||||
@if [ "$(VENV_BASE)" ]; then \
|
||||
@@ -294,7 +299,7 @@ uwsgi: collectstatic
|
||||
@if [ "$(VENV_BASE)" ]; then \
|
||||
. $(VENV_BASE)/awx/bin/activate; \
|
||||
fi; \
|
||||
uwsgi -b 32768 --socket 127.0.0.1:8050 --module=awx.wsgi:application --home=/venv/awx --chdir=/awx_devel/ --vacuum --processes=5 --harakiri=120 --master --no-orphans --py-autoreload 1 --max-requests=1000 --stats /tmp/stats.socket --master-fifo=/awxfifo --lazy-apps --logformat "%(addr) %(method) %(uri) - %(proto) %(status)"
|
||||
uwsgi -b 32768 --socket 127.0.0.1:8050 --module=awx.wsgi:application --home=/venv/awx --chdir=/awx_devel/ --vacuum --processes=5 --harakiri=120 --master --no-orphans --py-autoreload 1 --max-requests=1000 --stats /tmp/stats.socket --master-fifo=/awxfifo --lazy-apps --logformat "%(addr) %(method) %(uri) - %(proto) %(status)" --hook-accepting1-once="exec:/bin/sh -c '[ -f /tmp/celery_pid ] && kill -1 `cat /tmp/celery_pid`'"
|
||||
|
||||
daphne:
|
||||
@if [ "$(VENV_BASE)" ]; then \
|
||||
@@ -317,11 +322,11 @@ runserver:
|
||||
|
||||
# Run to start the background celery worker for development.
|
||||
celeryd:
|
||||
rm -f /tmp/celery_pid
|
||||
@if [ "$(VENV_BASE)" ]; then \
|
||||
. $(VENV_BASE)/awx/bin/activate; \
|
||||
fi; \
|
||||
$(PYTHON) manage.py celeryd -l DEBUG -B -Ofair --autoreload --autoscale=100,4 --schedule=$(CELERY_SCHEDULE_FILE) -Q tower_scheduler,tower_broadcast_all,$(COMPOSE_HOST),$(AWX_GROUP_QUEUES) -n celery@$(COMPOSE_HOST)
|
||||
#$(PYTHON) manage.py celery multi show projects jobs default -l DEBUG -Q:projects projects -Q:jobs jobs -Q:default default -c:projects 1 -c:jobs 3 -c:default 3 -Ofair -B --schedule=$(CELERY_SCHEDULE_FILE)
|
||||
celery worker -A awx -l DEBUG -B -Ofair --autoscale=100,4 --schedule=$(CELERY_SCHEDULE_FILE) -Q tower_scheduler,tower_broadcast_all,$(COMPOSE_HOST),$(AWX_GROUP_QUEUES) -n celery@$(COMPOSE_HOST) --pidfile /tmp/celery_pid
|
||||
|
||||
# Run to start the zeromq callback receiver
|
||||
receiver:
|
||||
@@ -330,18 +335,18 @@ receiver:
|
||||
fi; \
|
||||
$(PYTHON) manage.py run_callback_receiver
|
||||
|
||||
socketservice:
|
||||
@if [ "$(VENV_BASE)" ]; then \
|
||||
. $(VENV_BASE)/awx/bin/activate; \
|
||||
fi; \
|
||||
$(PYTHON) manage.py run_socketio_service
|
||||
|
||||
nginx:
|
||||
nginx -g "daemon off;"
|
||||
|
||||
rdb:
|
||||
$(PYTHON) tools/rdb.py
|
||||
|
||||
jupyter:
|
||||
@if [ "$(VENV_BASE)" ]; then \
|
||||
. $(VENV_BASE)/awx/bin/activate; \
|
||||
fi; \
|
||||
$(MANAGEMENT_COMMAND) shell_plus --notebook
|
||||
|
||||
reports:
|
||||
mkdir -p $@
|
||||
|
||||
@@ -495,12 +500,14 @@ ui: clean-ui ui-devel
|
||||
|
||||
ui-test-ci: $(UI_DEPS_FLAG_FILE)
|
||||
$(NPM_BIN) --prefix awx/ui run test:ci
|
||||
$(NPM_BIN) --prefix awx/ui run unit
|
||||
|
||||
testjs_ci:
|
||||
echo "Update UI unittests later" #ui-test-ci
|
||||
|
||||
jshint: $(UI_DEPS_FLAG_FILE)
|
||||
$(NPM_BIN) run --prefix awx/ui jshint
|
||||
$(NPM_BIN) run --prefix awx/ui lint
|
||||
|
||||
# END UI TASKS
|
||||
# --------------------------------------
|
||||
@@ -545,6 +552,7 @@ docker-isolated:
|
||||
TAG=$(COMPOSE_TAG) DEV_DOCKER_TAG_BASE=$(DEV_DOCKER_TAG_BASE) docker-compose -f tools/docker-compose.yml -f tools/docker-isolated-override.yml create
|
||||
docker start tools_awx_1
|
||||
docker start tools_isolated_1
|
||||
echo "__version__ = '`python setup.py --version`'" | docker exec -i tools_isolated_1 /bin/bash -c "cat > /venv/awx/lib/python2.7/site-packages/awx.py"
|
||||
if [ "`docker exec -i -t tools_isolated_1 cat /root/.ssh/authorized_keys`" == "`docker exec -t tools_awx_1 cat /root/.ssh/id_rsa.pub`" ]; then \
|
||||
echo "SSH keys already copied to isolated instance"; \
|
||||
else \
|
||||
@@ -600,7 +608,7 @@ clean-elk:
|
||||
docker rm tools_kibana_1
|
||||
|
||||
psql-container:
|
||||
docker run -it --net tools_default --rm postgres:9.4.1 sh -c 'exec psql -h "postgres" -p "5432" -U postgres'
|
||||
docker run -it --net tools_default --rm postgres:9.6 sh -c 'exec psql -h "postgres" -p "5432" -U postgres'
|
||||
|
||||
VERSION:
|
||||
@echo $(VERSION_TARGET) > $@
|
||||
|
||||
@@ -1,15 +1,17 @@
|
||||
# Copyright (c) 2015 Ansible, Inc.
|
||||
# All Rights Reserved.
|
||||
from __future__ import absolute_import, unicode_literals
|
||||
|
||||
import os
|
||||
import sys
|
||||
import warnings
|
||||
|
||||
from pkg_resources import get_distribution
|
||||
from .celery import app as celery_app
|
||||
|
||||
__version__ = get_distribution('awx').version
|
||||
|
||||
__all__ = ['__version__']
|
||||
__all__ = ['__version__', 'celery_app']
|
||||
|
||||
# Check for the presence/absence of "devonly" module to determine if running
|
||||
# from a source code checkout or release packaage.
|
||||
|
||||
@@ -17,7 +17,7 @@ from rest_framework import exceptions
|
||||
from rest_framework import HTTP_HEADER_ENCODING
|
||||
|
||||
# AWX
|
||||
from awx.main.models import UnifiedJob, AuthToken
|
||||
from awx.main.models import AuthToken
|
||||
|
||||
logger = logging.getLogger('awx.api.authentication')
|
||||
|
||||
@@ -137,29 +137,3 @@ class LoggedBasicAuthentication(authentication.BasicAuthentication):
|
||||
if not settings.AUTH_BASIC_ENABLED:
|
||||
return
|
||||
return super(LoggedBasicAuthentication, self).authenticate_header(request)
|
||||
|
||||
|
||||
class TaskAuthentication(authentication.BaseAuthentication):
|
||||
'''
|
||||
Custom authentication used for views accessed by the inventory and callback
|
||||
scripts when running a task.
|
||||
'''
|
||||
|
||||
model = None
|
||||
|
||||
def authenticate(self, request):
|
||||
auth = authentication.get_authorization_header(request).split()
|
||||
if len(auth) != 2 or auth[0].lower() != 'token' or '-' not in auth[1]:
|
||||
return None
|
||||
pk, key = auth[1].split('-', 1)
|
||||
try:
|
||||
unified_job = UnifiedJob.objects.get(pk=pk, status='running')
|
||||
except UnifiedJob.DoesNotExist:
|
||||
return None
|
||||
token = unified_job.task_auth_token
|
||||
if auth[1] != token:
|
||||
raise exceptions.AuthenticationFailed(_('Invalid task token'))
|
||||
return (None, token)
|
||||
|
||||
def authenticate_header(self, request):
|
||||
return 'Token'
|
||||
|
||||
@@ -22,6 +22,7 @@ from rest_framework.filters import BaseFilterBackend
|
||||
|
||||
# AWX
|
||||
from awx.main.utils import get_type_for_model, to_python_boolean
|
||||
from awx.main.utils.db import get_all_field_names
|
||||
from awx.main.models.credential import CredentialType
|
||||
from awx.main.models.rbac import RoleAncestorEntry
|
||||
|
||||
@@ -70,7 +71,7 @@ class TypeFilterBackend(BaseFilterBackend):
|
||||
types_map[ct_type] = ct.pk
|
||||
model = queryset.model
|
||||
model_type = get_type_for_model(model)
|
||||
if 'polymorphic_ctype' in model._meta.get_all_field_names():
|
||||
if 'polymorphic_ctype' in get_all_field_names(model):
|
||||
types_pks = set([v for k,v in types_map.items() if k in types])
|
||||
queryset = queryset.filter(polymorphic_ctype_id__in=types_pks)
|
||||
elif model_type in types:
|
||||
@@ -119,7 +120,7 @@ class FieldLookupBackend(BaseFilterBackend):
|
||||
'last_updated': 'last_job_run',
|
||||
}.get(name, name)
|
||||
|
||||
if name == 'type' and 'polymorphic_ctype' in model._meta.get_all_field_names():
|
||||
if name == 'type' and 'polymorphic_ctype' in get_all_field_names(model):
|
||||
name = 'polymorphic_ctype'
|
||||
new_parts.append('polymorphic_ctype__model')
|
||||
else:
|
||||
@@ -136,7 +137,7 @@ class FieldLookupBackend(BaseFilterBackend):
|
||||
new_parts.pop()
|
||||
new_parts.append(name_alt)
|
||||
else:
|
||||
field = model._meta.get_field_by_name(name)[0]
|
||||
field = model._meta.get_field(name)
|
||||
if isinstance(field, ForeignObjectRel) and getattr(field.field, '__prevent_search__', False):
|
||||
raise PermissionDenied(_('Filtering on %s is not allowed.' % name))
|
||||
elif getattr(field, '__prevent_search__', False):
|
||||
@@ -165,7 +166,13 @@ class FieldLookupBackend(BaseFilterBackend):
|
||||
elif isinstance(field, models.BooleanField):
|
||||
return to_python_boolean(value)
|
||||
elif isinstance(field, (ForeignObjectRel, ManyToManyField, GenericForeignKey, ForeignKey)):
|
||||
return self.to_python_related(value)
|
||||
try:
|
||||
return self.to_python_related(value)
|
||||
except ValueError:
|
||||
raise ParseError(_('Invalid {field_name} id: {field_id}').format(
|
||||
field_name=getattr(field, 'name', 'related field'),
|
||||
field_id=value)
|
||||
)
|
||||
else:
|
||||
return field.to_python(value)
|
||||
|
||||
@@ -242,11 +249,10 @@ class FieldLookupBackend(BaseFilterBackend):
|
||||
# Search across related objects.
|
||||
if key.endswith('__search'):
|
||||
for value in values:
|
||||
for search_term in force_text(value).replace(',', ' ').split():
|
||||
search_value, new_keys = self.value_to_python(queryset.model, key, search_term)
|
||||
assert isinstance(new_keys, list)
|
||||
for new_key in new_keys:
|
||||
search_filters.append((new_key, search_value))
|
||||
search_value, new_keys = self.value_to_python(queryset.model, key, force_text(value))
|
||||
assert isinstance(new_keys, list)
|
||||
for new_key in new_keys:
|
||||
search_filters.append((new_key, search_value))
|
||||
continue
|
||||
|
||||
# Custom chain__ and or__ filters, mutually exclusive (both can
|
||||
@@ -268,8 +274,10 @@ class FieldLookupBackend(BaseFilterBackend):
|
||||
|
||||
# Make legacy v1 Job/Template fields work for backwards compatability
|
||||
# TODO: remove after API v1 deprecation period
|
||||
if queryset.model._meta.object_name in ('JobTemplate', 'Job') and key in ('cloud_credential', 'network_credential'):
|
||||
key = 'extra_credentials'
|
||||
if queryset.model._meta.object_name in ('JobTemplate', 'Job') and key in (
|
||||
'credential', 'vault_credential', 'cloud_credential', 'network_credential'
|
||||
):
|
||||
key = 'credentials'
|
||||
|
||||
# Make legacy v1 Credential fields work for backwards compatability
|
||||
# TODO: remove after API v1 deprecation period
|
||||
@@ -375,7 +383,7 @@ class OrderByBackend(BaseFilterBackend):
|
||||
# given the limited number of views with multiple types,
|
||||
# sorting on polymorphic_ctype.model is effectively the same.
|
||||
new_order_by = []
|
||||
if 'polymorphic_ctype' in queryset.model._meta.get_all_field_names():
|
||||
if 'polymorphic_ctype' in get_all_field_names(queryset.model):
|
||||
for field in order_by:
|
||||
if field == 'type':
|
||||
new_order_by.append('polymorphic_ctype__model')
|
||||
|
||||
@@ -21,7 +21,7 @@ from django.utils.translation import ugettext_lazy as _
|
||||
|
||||
# Django REST Framework
|
||||
from rest_framework.authentication import get_authorization_header
|
||||
from rest_framework.exceptions import PermissionDenied
|
||||
from rest_framework.exceptions import PermissionDenied, AuthenticationFailed
|
||||
from rest_framework import generics
|
||||
from rest_framework.response import Response
|
||||
from rest_framework import status
|
||||
@@ -30,16 +30,19 @@ from rest_framework import views
|
||||
# AWX
|
||||
from awx.api.filters import FieldLookupBackend
|
||||
from awx.main.models import * # noqa
|
||||
from awx.main.access import access_registry
|
||||
from awx.main.utils import * # noqa
|
||||
from awx.main.utils.db import get_all_field_names
|
||||
from awx.api.serializers import ResourceAccessListElementSerializer
|
||||
from awx.api.versioning import URLPathVersioning, get_request_version
|
||||
from awx.api.metadata import SublistAttachDetatchMetadata
|
||||
|
||||
__all__ = ['APIView', 'GenericAPIView', 'ListAPIView', 'SimpleListAPIView',
|
||||
'ListCreateAPIView', 'SubListAPIView', 'SubListCreateAPIView',
|
||||
'SubListDestroyAPIView',
|
||||
'SubListCreateAttachDetachAPIView', 'RetrieveAPIView',
|
||||
'RetrieveUpdateAPIView', 'RetrieveDestroyAPIView',
|
||||
'RetrieveUpdateDestroyAPIView', 'DestroyAPIView',
|
||||
'RetrieveUpdateDestroyAPIView',
|
||||
'SubDetailAPIView',
|
||||
'ResourceAccessList',
|
||||
'ParentMixin',
|
||||
@@ -114,6 +117,10 @@ class APIView(views.APIView):
|
||||
|
||||
drf_request = super(APIView, self).initialize_request(request, *args, **kwargs)
|
||||
request.drf_request = drf_request
|
||||
try:
|
||||
request.drf_request_user = getattr(drf_request, 'user', False)
|
||||
except AuthenticationFailed:
|
||||
request.drf_request_user = None
|
||||
return drf_request
|
||||
|
||||
def finalize_response(self, request, response, *args, **kwargs):
|
||||
@@ -139,7 +146,6 @@ class APIView(views.APIView):
|
||||
response['X-API-Query-Count'] = len(q_times)
|
||||
response['X-API-Query-Time'] = '%0.3fs' % sum(q_times)
|
||||
|
||||
analytics_logger.info("api response", extra=dict(python_objects=dict(request=request, response=response)))
|
||||
return response
|
||||
|
||||
def get_authenticate_header(self, request):
|
||||
@@ -188,6 +194,7 @@ class APIView(views.APIView):
|
||||
'new_in_300': getattr(self, 'new_in_300', False),
|
||||
'new_in_310': getattr(self, 'new_in_310', False),
|
||||
'new_in_320': getattr(self, 'new_in_320', False),
|
||||
'new_in_330': getattr(self, 'new_in_330', False),
|
||||
'new_in_api_v2': getattr(self, 'new_in_api_v2', False),
|
||||
'deprecated': getattr(self, 'deprecated', False),
|
||||
}
|
||||
@@ -267,12 +274,17 @@ class GenericAPIView(generics.GenericAPIView, APIView):
|
||||
return serializer
|
||||
|
||||
def get_queryset(self):
|
||||
#if hasattr(self.request.user, 'get_queryset'):
|
||||
# return self.request.user.get_queryset(self.model)
|
||||
if self.queryset is not None:
|
||||
return self.queryset._clone()
|
||||
elif self.model is not None:
|
||||
return self.model._default_manager.all()
|
||||
qs = self.model._default_manager
|
||||
if self.model in access_registry:
|
||||
access_class = access_registry[self.model]
|
||||
if access_class.select_related:
|
||||
qs = qs.select_related(*access_class.select_related)
|
||||
if access_class.prefetch_related:
|
||||
qs = qs.prefetch_related(*access_class.prefetch_related)
|
||||
return qs
|
||||
else:
|
||||
return super(GenericAPIView, self).get_queryset()
|
||||
|
||||
@@ -321,8 +333,7 @@ class ListAPIView(generics.ListAPIView, GenericAPIView):
|
||||
return page
|
||||
|
||||
def get_description_context(self):
|
||||
opts = self.model._meta
|
||||
if 'username' in opts.get_all_field_names():
|
||||
if 'username' in get_all_field_names(self.model):
|
||||
order_field = 'username'
|
||||
else:
|
||||
order_field = 'name'
|
||||
@@ -441,6 +452,41 @@ class SubListAPIView(ParentMixin, ListAPIView):
|
||||
return qs & sublist_qs
|
||||
|
||||
|
||||
class DestroyAPIView(generics.DestroyAPIView):
|
||||
|
||||
def has_delete_permission(self, obj):
|
||||
return self.request.user.can_access(self.model, 'delete', obj)
|
||||
|
||||
def perform_destroy(self, instance, check_permission=True):
|
||||
if check_permission and not self.has_delete_permission(instance):
|
||||
raise PermissionDenied()
|
||||
super(DestroyAPIView, self).perform_destroy(instance)
|
||||
|
||||
|
||||
class SubListDestroyAPIView(DestroyAPIView, SubListAPIView):
|
||||
"""
|
||||
Concrete view for deleting everything related by `relationship`.
|
||||
"""
|
||||
check_sub_obj_permission = True
|
||||
|
||||
def destroy(self, request, *args, **kwargs):
|
||||
instance_list = self.get_queryset()
|
||||
if (not self.check_sub_obj_permission and
|
||||
not request.user.can_access(self.parent_model, 'delete', self.get_parent_object())):
|
||||
raise PermissionDenied()
|
||||
self.perform_list_destroy(instance_list)
|
||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||
|
||||
def perform_list_destroy(self, instance_list):
|
||||
if self.check_sub_obj_permission:
|
||||
# Check permissions for all before deleting, avoiding half-deleted lists
|
||||
for instance in instance_list:
|
||||
if self.has_delete_permission(instance):
|
||||
raise PermissionDenied()
|
||||
for instance in instance_list:
|
||||
self.perform_destroy(instance, check_permission=False)
|
||||
|
||||
|
||||
class SubListCreateAPIView(SubListAPIView, ListCreateAPIView):
|
||||
# Base class for a sublist view that allows for creating subobjects
|
||||
# associated with the parent object.
|
||||
@@ -679,22 +725,11 @@ class RetrieveUpdateAPIView(RetrieveAPIView, generics.RetrieveUpdateAPIView):
|
||||
pass
|
||||
|
||||
|
||||
class RetrieveDestroyAPIView(RetrieveAPIView, generics.RetrieveDestroyAPIView):
|
||||
|
||||
def destroy(self, request, *args, **kwargs):
|
||||
# somewhat lame that delete has to call it's own permissions check
|
||||
obj = self.get_object()
|
||||
if not request.user.can_access(self.model, 'delete', obj):
|
||||
raise PermissionDenied()
|
||||
obj.delete()
|
||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||
|
||||
|
||||
class RetrieveUpdateDestroyAPIView(RetrieveUpdateAPIView, RetrieveDestroyAPIView):
|
||||
class RetrieveDestroyAPIView(RetrieveAPIView, DestroyAPIView):
|
||||
pass
|
||||
|
||||
|
||||
class DestroyAPIView(GenericAPIView, generics.DestroyAPIView):
|
||||
class RetrieveUpdateDestroyAPIView(RetrieveUpdateAPIView, DestroyAPIView):
|
||||
pass
|
||||
|
||||
|
||||
|
||||
@@ -52,14 +52,18 @@ class ModelAccessPermission(permissions.BasePermission):
|
||||
if not check_user_access(request.user, view.model, 'add', {view.parent_key: parent_obj}):
|
||||
return False
|
||||
return True
|
||||
elif getattr(view, 'is_job_start', False):
|
||||
elif hasattr(view, 'obj_permission_type'):
|
||||
# Generic object-centric view permission check without object not needed
|
||||
if not obj:
|
||||
return True
|
||||
return check_user_access(request.user, view.model, 'start', obj)
|
||||
elif getattr(view, 'is_job_cancel', False):
|
||||
if not obj:
|
||||
return True
|
||||
return check_user_access(request.user, view.model, 'cancel', obj)
|
||||
# Permission check that happens when get_object() is called
|
||||
extra_kwargs = {}
|
||||
if view.obj_permission_type == 'admin':
|
||||
extra_kwargs['data'] = {}
|
||||
return check_user_access(
|
||||
request.user, view.model, view.obj_permission_type, obj,
|
||||
**extra_kwargs
|
||||
)
|
||||
else:
|
||||
if obj:
|
||||
return True
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,9 +1,9 @@
|
||||
The resulting data structure contains:
|
||||
|
||||
{
|
||||
"count": 99,
|
||||
"next": null,
|
||||
"previous": null,
|
||||
"count": 99,
|
||||
"next": null,
|
||||
"previous": null,
|
||||
"results": [
|
||||
...
|
||||
]
|
||||
@@ -60,6 +60,10 @@ _Added in AWX 1.4_
|
||||
|
||||
?related__search=findme
|
||||
|
||||
Note: If you want to provide more than one search terms, please use multiple
|
||||
search fields with the same key, like `?related__search=foo&related__search=bar`,
|
||||
All search terms with the same key will be ORed together.
|
||||
|
||||
## Filtering
|
||||
|
||||
Any additional query string parameters may be used to filter the list of
|
||||
@@ -70,7 +74,7 @@ in the specified value should be url-encoded. For example:
|
||||
?field=value%20xyz
|
||||
|
||||
Fields may also span relations, only for fields and relationships defined in
|
||||
the database:
|
||||
the database:
|
||||
|
||||
?other__field=value
|
||||
|
||||
|
||||
@@ -10,4 +10,5 @@
|
||||
{% if new_in_300 %}> _Added in Ansible Tower 3.0.0_{% endif %}
|
||||
{% if new_in_310 %}> _New in Ansible Tower 3.1.0_{% endif %}
|
||||
{% if new_in_320 %}> _New in Ansible Tower 3.2.0_{% endif %}
|
||||
{% if new_in_330 %}> _New in Ansible Tower 3.3.0_{% endif %}
|
||||
{% endif %}
|
||||
|
||||
12
awx/api/templates/api/job_create_schedule.md
Normal file
12
awx/api/templates/api/job_create_schedule.md
Normal file
@@ -0,0 +1,12 @@
|
||||
Create a schedule based on a job:
|
||||
|
||||
Make a POST request to this endpoint to create a schedule that launches
|
||||
the job template that launched this job, and uses the same
|
||||
parameters that the job was launched with. These parameters include all
|
||||
"prompted" resources such as `extra_vars`, `inventory`, `limit`, etc.
|
||||
|
||||
Jobs that were launched with user-provided passwords cannot have a schedule
|
||||
created from them.
|
||||
|
||||
Make a GET request for information about what those prompts are and
|
||||
whether or not a schedule can be created.
|
||||
@@ -26,9 +26,6 @@ The response will include the following fields:
|
||||
job_template (array, read-only)
|
||||
* `survey_enabled`: Flag indicating whether the job_template has an enabled
|
||||
survey (boolean, read-only)
|
||||
* `credential_needed_to_start`: Flag indicating the presence of a credential
|
||||
associated with the job template. If not then one should be supplied when
|
||||
launching the job (boolean, read-only)
|
||||
* `inventory_needed_to_start`: Flag indicating the presence of an inventory
|
||||
associated with the job template. If not then one should be supplied when
|
||||
launching the job (boolean, read-only)
|
||||
@@ -36,9 +33,8 @@ The response will include the following fields:
|
||||
Make a POST request to this resource to launch the job_template. If any
|
||||
passwords, inventory, or extra variables (extra_vars) are required, they must
|
||||
be passed via POST data, with extra_vars given as a YAML or JSON string and
|
||||
escaped parentheses. If `credential_needed_to_start` is `True` then the
|
||||
`credential` field is required and if the `inventory_needed_to_start` is
|
||||
`True` then the `inventory` is required as well.
|
||||
escaped parentheses. If the `inventory_needed_to_start` is `True` then the
|
||||
`inventory` is required.
|
||||
|
||||
If successful, the response status code will be 201. If any required passwords
|
||||
are not provided, a 400 status code will be returned. If the job cannot be
|
||||
|
||||
6
awx/api/templates/api/sub_list_destroy_api_view.md
Normal file
6
awx/api/templates/api/sub_list_destroy_api_view.md
Normal file
@@ -0,0 +1,6 @@
|
||||
{% include "api/sub_list_create_api_view.md" %}
|
||||
|
||||
# Delete all {{ model_verbose_name_plural }} of this {{ parent_model_verbose_name|title }}:
|
||||
|
||||
Make a DELETE request to this resource to delete all {{ model_verbose_name_plural }} show in the list.
|
||||
The {{ parent_model_verbose_name|title }} will not be deleted by this request.
|
||||
420
awx/api/urls.py
420
awx/api/urls.py
@@ -1,420 +0,0 @@
|
||||
# Copyright (c) 2015 Ansible, Inc.
|
||||
# All Rights Reserved.
|
||||
|
||||
# noqa
|
||||
|
||||
from django.conf.urls import include, patterns, url as original_url
|
||||
|
||||
def url(regex, view, kwargs=None, name=None, prefix=''):
|
||||
# Set default name from view name (if a string).
|
||||
if isinstance(view, basestring) and name is None:
|
||||
name = view
|
||||
return original_url(regex, view, kwargs, name, prefix)
|
||||
|
||||
organization_urls = patterns('awx.api.views',
|
||||
url(r'^$', 'organization_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/$', 'organization_detail'),
|
||||
url(r'^(?P<pk>[0-9]+)/users/$', 'organization_users_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/admins/$', 'organization_admins_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/inventories/$', 'organization_inventories_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/projects/$', 'organization_projects_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/workflow_job_templates/$', 'organization_workflow_job_templates_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/teams/$', 'organization_teams_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/credentials/$', 'organization_credential_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/activity_stream/$', 'organization_activity_stream_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/notification_templates/$', 'organization_notification_templates_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/notification_templates_any/$', 'organization_notification_templates_any_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/notification_templates_error/$', 'organization_notification_templates_error_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/notification_templates_success/$', 'organization_notification_templates_success_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/instance_groups/$', 'organization_instance_groups_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/object_roles/$', 'organization_object_roles_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/access_list/$', 'organization_access_list'),
|
||||
)
|
||||
|
||||
user_urls = patterns('awx.api.views',
|
||||
url(r'^$', 'user_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/$', 'user_detail'),
|
||||
url(r'^(?P<pk>[0-9]+)/teams/$', 'user_teams_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/organizations/$', 'user_organizations_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/admin_of_organizations/$', 'user_admin_of_organizations_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/projects/$', 'user_projects_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/credentials/$', 'user_credentials_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/roles/$', 'user_roles_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/activity_stream/$', 'user_activity_stream_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/access_list/$', 'user_access_list'),
|
||||
|
||||
)
|
||||
|
||||
project_urls = patterns('awx.api.views',
|
||||
url(r'^$', 'project_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/$', 'project_detail'),
|
||||
url(r'^(?P<pk>[0-9]+)/playbooks/$', 'project_playbooks'),
|
||||
url(r'^(?P<pk>[0-9]+)/inventories/$', 'project_inventories'),
|
||||
url(r'^(?P<pk>[0-9]+)/scm_inventory_sources/$', 'project_scm_inventory_sources'),
|
||||
url(r'^(?P<pk>[0-9]+)/teams/$', 'project_teams_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/update/$', 'project_update_view'),
|
||||
url(r'^(?P<pk>[0-9]+)/project_updates/$', 'project_updates_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/activity_stream/$', 'project_activity_stream_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/schedules/$', 'project_schedules_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/notification_templates_any/$', 'project_notification_templates_any_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/notification_templates_error/$', 'project_notification_templates_error_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/notification_templates_success/$', 'project_notification_templates_success_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/object_roles/$', 'project_object_roles_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/access_list/$', 'project_access_list'),
|
||||
)
|
||||
|
||||
project_update_urls = patterns('awx.api.views',
|
||||
url(r'^$', 'project_update_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/$', 'project_update_detail'),
|
||||
url(r'^(?P<pk>[0-9]+)/cancel/$', 'project_update_cancel'),
|
||||
url(r'^(?P<pk>[0-9]+)/stdout/$', 'project_update_stdout'),
|
||||
url(r'^(?P<pk>[0-9]+)/scm_inventory_updates/$', 'project_update_scm_inventory_updates'),
|
||||
url(r'^(?P<pk>[0-9]+)/notifications/$', 'project_update_notifications_list'),
|
||||
)
|
||||
|
||||
team_urls = patterns('awx.api.views',
|
||||
url(r'^$', 'team_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/$', 'team_detail'),
|
||||
url(r'^(?P<pk>[0-9]+)/projects/$', 'team_projects_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/users/$', 'team_users_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/credentials/$', 'team_credentials_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/roles/$', 'team_roles_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/object_roles/$', 'team_object_roles_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/activity_stream/$', 'team_activity_stream_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/access_list/$', 'team_access_list'),
|
||||
)
|
||||
|
||||
inventory_urls = patterns('awx.api.views',
|
||||
url(r'^$', 'inventory_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/$', 'inventory_detail'),
|
||||
url(r'^(?P<pk>[0-9]+)/hosts/$', 'inventory_hosts_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/groups/$', 'inventory_groups_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/root_groups/$', 'inventory_root_groups_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/variable_data/$', 'inventory_variable_data'),
|
||||
url(r'^(?P<pk>[0-9]+)/script/$', 'inventory_script_view'),
|
||||
url(r'^(?P<pk>[0-9]+)/tree/$', 'inventory_tree_view'),
|
||||
url(r'^(?P<pk>[0-9]+)/inventory_sources/$', 'inventory_inventory_sources_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/update_inventory_sources/$', 'inventory_inventory_sources_update'),
|
||||
url(r'^(?P<pk>[0-9]+)/activity_stream/$', 'inventory_activity_stream_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/job_templates/$', 'inventory_job_template_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/ad_hoc_commands/$', 'inventory_ad_hoc_commands_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/access_list/$', 'inventory_access_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/object_roles/$', 'inventory_object_roles_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/instance_groups/$', 'inventory_instance_groups_list'),
|
||||
#url(r'^(?P<pk>[0-9]+)/single_fact/$', 'inventory_single_fact_view'),
|
||||
)
|
||||
|
||||
host_urls = patterns('awx.api.views',
|
||||
url(r'^$', 'host_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/$', 'host_detail'),
|
||||
url(r'^(?P<pk>[0-9]+)/variable_data/$', 'host_variable_data'),
|
||||
url(r'^(?P<pk>[0-9]+)/groups/$', 'host_groups_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/all_groups/$', 'host_all_groups_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/job_events/', 'host_job_events_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/job_host_summaries/$', 'host_job_host_summaries_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/activity_stream/$', 'host_activity_stream_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/inventory_sources/$', 'host_inventory_sources_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/smart_inventories/$', 'host_smart_inventories_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/ad_hoc_commands/$', 'host_ad_hoc_commands_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/ad_hoc_command_events/$', 'host_ad_hoc_command_events_list'),
|
||||
#url(r'^(?P<pk>[0-9]+)/single_fact/$', 'host_single_fact_view'),
|
||||
url(r'^(?P<pk>[0-9]+)/fact_versions/$', 'host_fact_versions_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/fact_view/$', 'host_fact_compare_view'),
|
||||
url(r'^(?P<pk>[0-9]+)/insights/$', 'host_insights'),
|
||||
)
|
||||
|
||||
group_urls = patterns('awx.api.views',
|
||||
url(r'^$', 'group_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/$', 'group_detail'),
|
||||
url(r'^(?P<pk>[0-9]+)/children/$', 'group_children_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/hosts/$', 'group_hosts_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/all_hosts/$', 'group_all_hosts_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/variable_data/$', 'group_variable_data'),
|
||||
url(r'^(?P<pk>[0-9]+)/job_events/$', 'group_job_events_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/job_host_summaries/$', 'group_job_host_summaries_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/potential_children/$', 'group_potential_children_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/activity_stream/$', 'group_activity_stream_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/inventory_sources/$', 'group_inventory_sources_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/ad_hoc_commands/$', 'group_ad_hoc_commands_list'),
|
||||
#url(r'^(?P<pk>[0-9]+)/single_fact/$', 'group_single_fact_view'),
|
||||
)
|
||||
|
||||
inventory_source_urls = patterns('awx.api.views',
|
||||
url(r'^$', 'inventory_source_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/$', 'inventory_source_detail'),
|
||||
url(r'^(?P<pk>[0-9]+)/update/$', 'inventory_source_update_view'),
|
||||
url(r'^(?P<pk>[0-9]+)/inventory_updates/$', 'inventory_source_updates_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/activity_stream/$', 'inventory_source_activity_stream_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/schedules/$', 'inventory_source_schedules_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/groups/$', 'inventory_source_groups_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/hosts/$', 'inventory_source_hosts_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/notification_templates_any/$', 'inventory_source_notification_templates_any_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/notification_templates_error/$', 'inventory_source_notification_templates_error_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/notification_templates_success/$', 'inventory_source_notification_templates_success_list'),
|
||||
)
|
||||
|
||||
inventory_update_urls = patterns('awx.api.views',
|
||||
url(r'^$', 'inventory_update_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/$', 'inventory_update_detail'),
|
||||
url(r'^(?P<pk>[0-9]+)/cancel/$', 'inventory_update_cancel'),
|
||||
url(r'^(?P<pk>[0-9]+)/stdout/$', 'inventory_update_stdout'),
|
||||
url(r'^(?P<pk>[0-9]+)/notifications/$', 'inventory_update_notifications_list'),
|
||||
)
|
||||
|
||||
inventory_script_urls = patterns('awx.api.views',
|
||||
url(r'^$', 'inventory_script_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/$', 'inventory_script_detail'),
|
||||
url(r'^(?P<pk>[0-9]+)/object_roles/$', 'inventory_script_object_roles_list'),
|
||||
)
|
||||
|
||||
credential_type_urls = patterns('awx.api.views',
|
||||
url(r'^$', 'credential_type_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/$', 'credential_type_detail'),
|
||||
url(r'^(?P<pk>[0-9]+)/credentials/$', 'credential_type_credential_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/activity_stream/$', 'credential_type_activity_stream_list'),
|
||||
)
|
||||
|
||||
credential_urls = patterns('awx.api.views',
|
||||
url(r'^$', 'credential_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/activity_stream/$', 'credential_activity_stream_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/$', 'credential_detail'),
|
||||
url(r'^(?P<pk>[0-9]+)/access_list/$', 'credential_access_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/object_roles/$', 'credential_object_roles_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/owner_users/$', 'credential_owner_users_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/owner_teams/$', 'credential_owner_teams_list'),
|
||||
# See also credentials resources on users/teams.
|
||||
)
|
||||
|
||||
role_urls = patterns('awx.api.views',
|
||||
url(r'^$', 'role_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/$', 'role_detail'),
|
||||
url(r'^(?P<pk>[0-9]+)/users/$', 'role_users_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/teams/$', 'role_teams_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/parents/$', 'role_parents_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/children/$', 'role_children_list'),
|
||||
)
|
||||
|
||||
job_template_urls = patterns('awx.api.views',
|
||||
url(r'^$', 'job_template_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/$', 'job_template_detail'),
|
||||
url(r'^(?P<pk>[0-9]+)/launch/$', 'job_template_launch'),
|
||||
url(r'^(?P<pk>[0-9]+)/jobs/$', 'job_template_jobs_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/callback/$', 'job_template_callback'),
|
||||
url(r'^(?P<pk>[0-9]+)/schedules/$', 'job_template_schedules_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/survey_spec/$', 'job_template_survey_spec'),
|
||||
url(r'^(?P<pk>[0-9]+)/activity_stream/$', 'job_template_activity_stream_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/notification_templates_any/$', 'job_template_notification_templates_any_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/notification_templates_error/$', 'job_template_notification_templates_error_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/notification_templates_success/$', 'job_template_notification_templates_success_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/instance_groups/$', 'job_template_instance_groups_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/access_list/$', 'job_template_access_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/object_roles/$', 'job_template_object_roles_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/labels/$', 'job_template_label_list'),
|
||||
)
|
||||
|
||||
job_urls = patterns('awx.api.views',
|
||||
url(r'^$', 'job_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/$', 'job_detail'),
|
||||
url(r'^(?P<pk>[0-9]+)/start/$', 'job_start'), # TODO: remove in 3.3
|
||||
url(r'^(?P<pk>[0-9]+)/cancel/$', 'job_cancel'),
|
||||
url(r'^(?P<pk>[0-9]+)/relaunch/$', 'job_relaunch'),
|
||||
url(r'^(?P<pk>[0-9]+)/job_host_summaries/$', 'job_job_host_summaries_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/job_events/$', 'job_job_events_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/activity_stream/$', 'job_activity_stream_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/stdout/$', 'job_stdout'),
|
||||
url(r'^(?P<pk>[0-9]+)/notifications/$', 'job_notifications_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/labels/$', 'job_label_list'),
|
||||
)
|
||||
|
||||
job_host_summary_urls = patterns('awx.api.views',
|
||||
url(r'^(?P<pk>[0-9]+)/$', 'job_host_summary_detail'),
|
||||
)
|
||||
|
||||
job_event_urls = patterns('awx.api.views',
|
||||
url(r'^$', 'job_event_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/$', 'job_event_detail'),
|
||||
url(r'^(?P<pk>[0-9]+)/children/$', 'job_event_children_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/hosts/$', 'job_event_hosts_list'),
|
||||
)
|
||||
|
||||
ad_hoc_command_urls = patterns('awx.api.views',
|
||||
url(r'^$', 'ad_hoc_command_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/$', 'ad_hoc_command_detail'),
|
||||
url(r'^(?P<pk>[0-9]+)/cancel/$', 'ad_hoc_command_cancel'),
|
||||
url(r'^(?P<pk>[0-9]+)/relaunch/$', 'ad_hoc_command_relaunch'),
|
||||
url(r'^(?P<pk>[0-9]+)/events/$', 'ad_hoc_command_ad_hoc_command_events_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/activity_stream/$', 'ad_hoc_command_activity_stream_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/notifications/$', 'ad_hoc_command_notifications_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/stdout/$', 'ad_hoc_command_stdout'),
|
||||
)
|
||||
|
||||
ad_hoc_command_event_urls = patterns('awx.api.views',
|
||||
url(r'^$', 'ad_hoc_command_event_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/$', 'ad_hoc_command_event_detail'),
|
||||
)
|
||||
|
||||
system_job_template_urls = patterns('awx.api.views',
|
||||
url(r'^$', 'system_job_template_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/$', 'system_job_template_detail'),
|
||||
url(r'^(?P<pk>[0-9]+)/launch/$', 'system_job_template_launch'),
|
||||
url(r'^(?P<pk>[0-9]+)/jobs/$', 'system_job_template_jobs_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/schedules/$', 'system_job_template_schedules_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/notification_templates_any/$', 'system_job_template_notification_templates_any_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/notification_templates_error/$', 'system_job_template_notification_templates_error_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/notification_templates_success/$', 'system_job_template_notification_templates_success_list'),
|
||||
)
|
||||
|
||||
system_job_urls = patterns('awx.api.views',
|
||||
url(r'^$', 'system_job_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/$', 'system_job_detail'),
|
||||
url(r'^(?P<pk>[0-9]+)/cancel/$', 'system_job_cancel'),
|
||||
url(r'^(?P<pk>[0-9]+)/notifications/$', 'system_job_notifications_list'),
|
||||
)
|
||||
|
||||
workflow_job_template_urls = patterns('awx.api.views',
|
||||
url(r'^$', 'workflow_job_template_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/$', 'workflow_job_template_detail'),
|
||||
url(r'^(?P<pk>[0-9]+)/workflow_jobs/$', 'workflow_job_template_jobs_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/launch/$', 'workflow_job_template_launch'),
|
||||
url(r'^(?P<pk>[0-9]+)/copy/$', 'workflow_job_template_copy'),
|
||||
url(r'^(?P<pk>[0-9]+)/schedules/$', 'workflow_job_template_schedules_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/survey_spec/$', 'workflow_job_template_survey_spec'),
|
||||
url(r'^(?P<pk>[0-9]+)/workflow_nodes/$', 'workflow_job_template_workflow_nodes_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/activity_stream/$', 'workflow_job_template_activity_stream_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/notification_templates_any/$', 'workflow_job_template_notification_templates_any_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/notification_templates_error/$', 'workflow_job_template_notification_templates_error_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/notification_templates_success/$', 'workflow_job_template_notification_templates_success_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/access_list/$', 'workflow_job_template_access_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/object_roles/$', 'workflow_job_template_object_roles_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/labels/$', 'workflow_job_template_label_list'),
|
||||
)
|
||||
|
||||
workflow_job_urls = patterns('awx.api.views',
|
||||
url(r'^$', 'workflow_job_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/$', 'workflow_job_detail'),
|
||||
url(r'^(?P<pk>[0-9]+)/workflow_nodes/$', 'workflow_job_workflow_nodes_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/labels/$', 'workflow_job_label_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/cancel/$', 'workflow_job_cancel'),
|
||||
url(r'^(?P<pk>[0-9]+)/relaunch/$', 'workflow_job_relaunch'),
|
||||
url(r'^(?P<pk>[0-9]+)/notifications/$', 'workflow_job_notifications_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/activity_stream/$', 'workflow_job_activity_stream_list'),
|
||||
)
|
||||
|
||||
|
||||
notification_template_urls = patterns('awx.api.views',
|
||||
url(r'^$', 'notification_template_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/$', 'notification_template_detail'),
|
||||
url(r'^(?P<pk>[0-9]+)/test/$', 'notification_template_test'),
|
||||
url(r'^(?P<pk>[0-9]+)/notifications/$', 'notification_template_notification_list'),
|
||||
)
|
||||
|
||||
notification_urls = patterns('awx.api.views',
|
||||
url(r'^$', 'notification_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/$', 'notification_detail'),
|
||||
)
|
||||
|
||||
label_urls = patterns('awx.api.views',
|
||||
url(r'^$', 'label_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/$', 'label_detail'),
|
||||
)
|
||||
|
||||
workflow_job_template_node_urls = patterns('awx.api.views',
|
||||
url(r'^$', 'workflow_job_template_node_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/$', 'workflow_job_template_node_detail'),
|
||||
url(r'^(?P<pk>[0-9]+)/success_nodes/$', 'workflow_job_template_node_success_nodes_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/failure_nodes/$', 'workflow_job_template_node_failure_nodes_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/always_nodes/$', 'workflow_job_template_node_always_nodes_list'),
|
||||
)
|
||||
|
||||
workflow_job_node_urls = patterns('awx.api.views',
|
||||
url(r'^$', 'workflow_job_node_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/$', 'workflow_job_node_detail'),
|
||||
url(r'^(?P<pk>[0-9]+)/success_nodes/$', 'workflow_job_node_success_nodes_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/failure_nodes/$', 'workflow_job_node_failure_nodes_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/always_nodes/$', 'workflow_job_node_always_nodes_list'),
|
||||
)
|
||||
|
||||
schedule_urls = patterns('awx.api.views',
|
||||
url(r'^$', 'schedule_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/$', 'schedule_detail'),
|
||||
url(r'^(?P<pk>[0-9]+)/jobs/$', 'schedule_unified_jobs_list'),
|
||||
)
|
||||
|
||||
activity_stream_urls = patterns('awx.api.views',
|
||||
url(r'^$', 'activity_stream_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/$', 'activity_stream_detail'),
|
||||
)
|
||||
|
||||
instance_urls = patterns('awx.api.views',
|
||||
url(r'^$', 'instance_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/$', 'instance_detail'),
|
||||
url(r'^(?P<pk>[0-9]+)/jobs/$', 'instance_unified_jobs_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/instance_groups/$', 'instance_instance_groups_list'),
|
||||
)
|
||||
|
||||
instance_group_urls = patterns('awx.api.views',
|
||||
url(r'^$', 'instance_group_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/$', 'instance_group_detail'),
|
||||
url(r'^(?P<pk>[0-9]+)/jobs/$', 'instance_group_unified_jobs_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/instances/$', 'instance_group_instance_list'),
|
||||
)
|
||||
|
||||
v1_urls = patterns('awx.api.views',
|
||||
url(r'^$', 'api_v1_root_view'),
|
||||
url(r'^ping/$', 'api_v1_ping_view'),
|
||||
url(r'^config/$', 'api_v1_config_view'),
|
||||
url(r'^auth/$', 'auth_view'),
|
||||
url(r'^authtoken/$', 'auth_token_view'),
|
||||
url(r'^me/$', 'user_me_list'),
|
||||
url(r'^dashboard/$', 'dashboard_view'),
|
||||
url(r'^dashboard/graphs/jobs/$','dashboard_jobs_graph_view'),
|
||||
url(r'^settings/', include('awx.conf.urls')),
|
||||
url(r'^instances/', include(instance_urls)),
|
||||
url(r'^instance_groups/', include(instance_group_urls)),
|
||||
url(r'^schedules/', include(schedule_urls)),
|
||||
url(r'^organizations/', include(organization_urls)),
|
||||
url(r'^users/', include(user_urls)),
|
||||
url(r'^projects/', include(project_urls)),
|
||||
url(r'^project_updates/', include(project_update_urls)),
|
||||
url(r'^teams/', include(team_urls)),
|
||||
url(r'^inventories/', include(inventory_urls)),
|
||||
url(r'^hosts/', include(host_urls)),
|
||||
url(r'^groups/', include(group_urls)),
|
||||
url(r'^inventory_sources/', include(inventory_source_urls)),
|
||||
url(r'^inventory_updates/', include(inventory_update_urls)),
|
||||
url(r'^inventory_scripts/', include(inventory_script_urls)),
|
||||
url(r'^credentials/', include(credential_urls)),
|
||||
url(r'^roles/', include(role_urls)),
|
||||
url(r'^job_templates/', include(job_template_urls)),
|
||||
url(r'^jobs/', include(job_urls)),
|
||||
url(r'^job_host_summaries/', include(job_host_summary_urls)),
|
||||
url(r'^job_events/', include(job_event_urls)),
|
||||
url(r'^ad_hoc_commands/', include(ad_hoc_command_urls)),
|
||||
url(r'^ad_hoc_command_events/', include(ad_hoc_command_event_urls)),
|
||||
url(r'^system_job_templates/', include(system_job_template_urls)),
|
||||
url(r'^system_jobs/', include(system_job_urls)),
|
||||
url(r'^notification_templates/', include(notification_template_urls)),
|
||||
url(r'^notifications/', include(notification_urls)),
|
||||
url(r'^workflow_job_templates/',include(workflow_job_template_urls)),
|
||||
url(r'^workflow_jobs/' ,include(workflow_job_urls)),
|
||||
url(r'^labels/', include(label_urls)),
|
||||
url(r'^workflow_job_template_nodes/', include(workflow_job_template_node_urls)),
|
||||
url(r'^workflow_job_nodes/', include(workflow_job_node_urls)),
|
||||
url(r'^unified_job_templates/$','unified_job_template_list'),
|
||||
url(r'^unified_jobs/$', 'unified_job_list'),
|
||||
url(r'^activity_stream/', include(activity_stream_urls)),
|
||||
)
|
||||
|
||||
v2_urls = patterns('awx.api.views',
|
||||
url(r'^$', 'api_v2_root_view'),
|
||||
url(r'^credential_types/', include(credential_type_urls)),
|
||||
url(r'^hosts/(?P<pk>[0-9]+)/ansible_facts/$', 'host_ansible_facts_detail'),
|
||||
url(r'^jobs/(?P<pk>[0-9]+)/extra_credentials/$', 'job_extra_credentials_list'),
|
||||
url(r'^job_templates/(?P<pk>[0-9]+)/extra_credentials/$', 'job_template_extra_credentials_list'),
|
||||
)
|
||||
|
||||
urlpatterns = patterns('awx.api.views',
|
||||
url(r'^$', 'api_root_view'),
|
||||
url(r'^(?P<version>(v2))/', include(v2_urls)),
|
||||
url(r'^(?P<version>(v1|v2))/', include(v1_urls))
|
||||
)
|
||||
0
awx/api/urls/Pipfile
Normal file
0
awx/api/urls/Pipfile
Normal file
7
awx/api/urls/__init__.py
Normal file
7
awx/api/urls/__init__.py
Normal file
@@ -0,0 +1,7 @@
|
||||
# Copyright (c) 2017 Ansible, Inc.
|
||||
# All Rights Reserved.
|
||||
|
||||
from __future__ import absolute_import, unicode_literals
|
||||
from .urls import urlpatterns
|
||||
|
||||
__all__ = ['urlpatterns']
|
||||
17
awx/api/urls/activity_stream.py
Normal file
17
awx/api/urls/activity_stream.py
Normal file
@@ -0,0 +1,17 @@
|
||||
# Copyright (c) 2017 Ansible, Inc.
|
||||
# All Rights Reserved.
|
||||
|
||||
from django.conf.urls import url
|
||||
|
||||
from awx.api.views import (
|
||||
ActivityStreamList,
|
||||
ActivityStreamDetail,
|
||||
)
|
||||
|
||||
|
||||
urls = [
|
||||
url(r'^$', ActivityStreamList.as_view(), name='activity_stream_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/$', ActivityStreamDetail.as_view(), name='activity_stream_detail'),
|
||||
]
|
||||
|
||||
__all__ = ['urls']
|
||||
29
awx/api/urls/ad_hoc_command.py
Normal file
29
awx/api/urls/ad_hoc_command.py
Normal file
@@ -0,0 +1,29 @@
|
||||
# Copyright (c) 2017 Ansible, Inc.
|
||||
# All Rights Reserved.
|
||||
|
||||
from django.conf.urls import url
|
||||
|
||||
from awx.api.views import (
|
||||
AdHocCommandList,
|
||||
AdHocCommandDetail,
|
||||
AdHocCommandCancel,
|
||||
AdHocCommandRelaunch,
|
||||
AdHocCommandAdHocCommandEventsList,
|
||||
AdHocCommandActivityStreamList,
|
||||
AdHocCommandNotificationsList,
|
||||
AdHocCommandStdout,
|
||||
)
|
||||
|
||||
|
||||
urls = [
|
||||
url(r'^$', AdHocCommandList.as_view(), name='ad_hoc_command_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/$', AdHocCommandDetail.as_view(), name='ad_hoc_command_detail'),
|
||||
url(r'^(?P<pk>[0-9]+)/cancel/$', AdHocCommandCancel.as_view(), name='ad_hoc_command_cancel'),
|
||||
url(r'^(?P<pk>[0-9]+)/relaunch/$', AdHocCommandRelaunch.as_view(), name='ad_hoc_command_relaunch'),
|
||||
url(r'^(?P<pk>[0-9]+)/events/$', AdHocCommandAdHocCommandEventsList.as_view(), name='ad_hoc_command_ad_hoc_command_events_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/activity_stream/$', AdHocCommandActivityStreamList.as_view(), name='ad_hoc_command_activity_stream_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/notifications/$', AdHocCommandNotificationsList.as_view(), name='ad_hoc_command_notifications_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/stdout/$', AdHocCommandStdout.as_view(), name='ad_hoc_command_stdout'),
|
||||
]
|
||||
|
||||
__all__ = ['urls']
|
||||
17
awx/api/urls/ad_hoc_command_event.py
Normal file
17
awx/api/urls/ad_hoc_command_event.py
Normal file
@@ -0,0 +1,17 @@
|
||||
# Copyright (c) 2017 Ansible, Inc.
|
||||
# All Rights Reserved.
|
||||
|
||||
from django.conf.urls import url
|
||||
|
||||
from awx.api.views import (
|
||||
AdHocCommandEventList,
|
||||
AdHocCommandEventDetail,
|
||||
)
|
||||
|
||||
|
||||
urls = [
|
||||
url(r'^$', AdHocCommandEventList.as_view(), name='ad_hoc_command_event_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/$', AdHocCommandEventDetail.as_view(), name='ad_hoc_command_event_detail'),
|
||||
]
|
||||
|
||||
__all__ = ['urls']
|
||||
27
awx/api/urls/credential.py
Normal file
27
awx/api/urls/credential.py
Normal file
@@ -0,0 +1,27 @@
|
||||
# Copyright (c) 2017 Ansible, Inc.
|
||||
# All Rights Reserved.
|
||||
|
||||
from django.conf.urls import url
|
||||
|
||||
from awx.api.views import (
|
||||
CredentialList,
|
||||
CredentialActivityStreamList,
|
||||
CredentialDetail,
|
||||
CredentialAccessList,
|
||||
CredentialObjectRolesList,
|
||||
CredentialOwnerUsersList,
|
||||
CredentialOwnerTeamsList,
|
||||
)
|
||||
|
||||
|
||||
urls = [
|
||||
url(r'^$', CredentialList.as_view(), name='credential_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/activity_stream/$', CredentialActivityStreamList.as_view(), name='credential_activity_stream_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/$', CredentialDetail.as_view(), name='credential_detail'),
|
||||
url(r'^(?P<pk>[0-9]+)/access_list/$', CredentialAccessList.as_view(), name='credential_access_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/object_roles/$', CredentialObjectRolesList.as_view(), name='credential_object_roles_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/owner_users/$', CredentialOwnerUsersList.as_view(), name='credential_owner_users_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/owner_teams/$', CredentialOwnerTeamsList.as_view(), name='credential_owner_teams_list'),
|
||||
]
|
||||
|
||||
__all__ = ['urls']
|
||||
21
awx/api/urls/credential_type.py
Normal file
21
awx/api/urls/credential_type.py
Normal file
@@ -0,0 +1,21 @@
|
||||
# Copyright (c) 2017 Ansible, Inc.
|
||||
# All Rights Reserved.
|
||||
|
||||
from django.conf.urls import url
|
||||
|
||||
from awx.api.views import (
|
||||
CredentialTypeList,
|
||||
CredentialTypeDetail,
|
||||
CredentialTypeCredentialList,
|
||||
CredentialTypeActivityStreamList,
|
||||
)
|
||||
|
||||
|
||||
urls = [
|
||||
url(r'^$', CredentialTypeList.as_view(), name='credential_type_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/$', CredentialTypeDetail.as_view(), name='credential_type_detail'),
|
||||
url(r'^(?P<pk>[0-9]+)/credentials/$', CredentialTypeCredentialList.as_view(), name='credential_type_credential_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/activity_stream/$', CredentialTypeActivityStreamList.as_view(), name='credential_type_activity_stream_list'),
|
||||
]
|
||||
|
||||
__all__ = ['urls']
|
||||
37
awx/api/urls/group.py
Normal file
37
awx/api/urls/group.py
Normal file
@@ -0,0 +1,37 @@
|
||||
# Copyright (c) 2017 Ansible, Inc.
|
||||
# All Rights Reserved.
|
||||
|
||||
from django.conf.urls import url
|
||||
|
||||
from awx.api.views import (
|
||||
GroupList,
|
||||
GroupDetail,
|
||||
GroupChildrenList,
|
||||
GroupHostsList,
|
||||
GroupAllHostsList,
|
||||
GroupVariableData,
|
||||
GroupJobEventsList,
|
||||
GroupJobHostSummariesList,
|
||||
GroupPotentialChildrenList,
|
||||
GroupActivityStreamList,
|
||||
GroupInventorySourcesList,
|
||||
GroupAdHocCommandsList,
|
||||
)
|
||||
|
||||
|
||||
urls = [
|
||||
url(r'^$', GroupList.as_view(), name='group_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/$', GroupDetail.as_view(), name='group_detail'),
|
||||
url(r'^(?P<pk>[0-9]+)/children/$', GroupChildrenList.as_view(), name='group_children_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/hosts/$', GroupHostsList.as_view(), name='group_hosts_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/all_hosts/$', GroupAllHostsList.as_view(), name='group_all_hosts_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/variable_data/$', GroupVariableData.as_view(), name='group_variable_data'),
|
||||
url(r'^(?P<pk>[0-9]+)/job_events/$', GroupJobEventsList.as_view(), name='group_job_events_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/job_host_summaries/$', GroupJobHostSummariesList.as_view(), name='group_job_host_summaries_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/potential_children/$', GroupPotentialChildrenList.as_view(), name='group_potential_children_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/activity_stream/$', GroupActivityStreamList.as_view(), name='group_activity_stream_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/inventory_sources/$', GroupInventorySourcesList.as_view(), name='group_inventory_sources_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/ad_hoc_commands/$', GroupAdHocCommandsList.as_view(), name='group_ad_hoc_commands_list'),
|
||||
]
|
||||
|
||||
__all__ = ['urls']
|
||||
43
awx/api/urls/host.py
Normal file
43
awx/api/urls/host.py
Normal file
@@ -0,0 +1,43 @@
|
||||
# Copyright (c) 2017 Ansible, Inc.
|
||||
# All Rights Reserved.
|
||||
|
||||
from django.conf.urls import url
|
||||
|
||||
from awx.api.views import (
|
||||
HostList,
|
||||
HostDetail,
|
||||
HostVariableData,
|
||||
HostGroupsList,
|
||||
HostAllGroupsList,
|
||||
HostJobEventsList,
|
||||
HostJobHostSummariesList,
|
||||
HostActivityStreamList,
|
||||
HostInventorySourcesList,
|
||||
HostSmartInventoriesList,
|
||||
HostAdHocCommandsList,
|
||||
HostAdHocCommandEventsList,
|
||||
HostFactVersionsList,
|
||||
HostFactCompareView,
|
||||
HostInsights,
|
||||
)
|
||||
|
||||
|
||||
urls = [
|
||||
url(r'^$', HostList.as_view(), name='host_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/$', HostDetail.as_view(), name='host_detail'),
|
||||
url(r'^(?P<pk>[0-9]+)/variable_data/$', HostVariableData.as_view(), name='host_variable_data'),
|
||||
url(r'^(?P<pk>[0-9]+)/groups/$', HostGroupsList.as_view(), name='host_groups_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/all_groups/$', HostAllGroupsList.as_view(), name='host_all_groups_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/job_events/', HostJobEventsList.as_view(), name='host_job_events_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/job_host_summaries/$', HostJobHostSummariesList.as_view(), name='host_job_host_summaries_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/activity_stream/$', HostActivityStreamList.as_view(), name='host_activity_stream_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/inventory_sources/$', HostInventorySourcesList.as_view(), name='host_inventory_sources_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/smart_inventories/$', HostSmartInventoriesList.as_view(), name='host_smart_inventories_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/ad_hoc_commands/$', HostAdHocCommandsList.as_view(), name='host_ad_hoc_commands_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/ad_hoc_command_events/$', HostAdHocCommandEventsList.as_view(), name='host_ad_hoc_command_events_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/fact_versions/$', HostFactVersionsList.as_view(), name='host_fact_versions_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/fact_view/$', HostFactCompareView.as_view(), name='host_fact_compare_view'),
|
||||
url(r'^(?P<pk>[0-9]+)/insights/$', HostInsights.as_view(), name='host_insights'),
|
||||
]
|
||||
|
||||
__all__ = ['urls']
|
||||
22
awx/api/urls/instance.py
Normal file
22
awx/api/urls/instance.py
Normal file
@@ -0,0 +1,22 @@
|
||||
# Copyright (c) 2017 Ansible, Inc.
|
||||
# All Rights Reserved.
|
||||
|
||||
from django.conf.urls import url
|
||||
|
||||
from awx.api.views import (
|
||||
InstanceList,
|
||||
InstanceDetail,
|
||||
InstanceUnifiedJobsList,
|
||||
InstanceInstanceGroupsList,
|
||||
)
|
||||
|
||||
|
||||
urls = [
|
||||
url(r'^$', InstanceList.as_view(), name='instance_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/$', InstanceDetail.as_view(), name='instance_detail'),
|
||||
url(r'^(?P<pk>[0-9]+)/jobs/$', InstanceUnifiedJobsList.as_view(), name='instance_unified_jobs_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/instance_groups/$', InstanceInstanceGroupsList.as_view(),
|
||||
name='instance_instance_groups_list'),
|
||||
]
|
||||
|
||||
__all__ = ['urls']
|
||||
21
awx/api/urls/instance_group.py
Normal file
21
awx/api/urls/instance_group.py
Normal file
@@ -0,0 +1,21 @@
|
||||
# Copyright (c) 2017 Ansible, Inc.
|
||||
# All Rights Reserved.
|
||||
|
||||
from django.conf.urls import url
|
||||
|
||||
from awx.api.views import (
|
||||
InstanceGroupList,
|
||||
InstanceGroupDetail,
|
||||
InstanceGroupUnifiedJobsList,
|
||||
InstanceGroupInstanceList,
|
||||
)
|
||||
|
||||
|
||||
urls = [
|
||||
url(r'^$', InstanceGroupList.as_view(), name='instance_group_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/$', InstanceGroupDetail.as_view(), name='instance_group_detail'),
|
||||
url(r'^(?P<pk>[0-9]+)/jobs/$', InstanceGroupUnifiedJobsList.as_view(), name='instance_group_unified_jobs_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/instances/$', InstanceGroupInstanceList.as_view(), name='instance_group_instance_list'),
|
||||
]
|
||||
|
||||
__all__ = ['urls']
|
||||
45
awx/api/urls/inventory.py
Normal file
45
awx/api/urls/inventory.py
Normal file
@@ -0,0 +1,45 @@
|
||||
# Copyright (c) 2017 Ansible, Inc.
|
||||
# All Rights Reserved.
|
||||
|
||||
from django.conf.urls import url
|
||||
|
||||
from awx.api.views import (
|
||||
InventoryList,
|
||||
InventoryDetail,
|
||||
InventoryHostsList,
|
||||
InventoryGroupsList,
|
||||
InventoryRootGroupsList,
|
||||
InventoryVariableData,
|
||||
InventoryScriptView,
|
||||
InventoryTreeView,
|
||||
InventoryInventorySourcesList,
|
||||
InventoryInventorySourcesUpdate,
|
||||
InventoryActivityStreamList,
|
||||
InventoryJobTemplateList,
|
||||
InventoryAdHocCommandsList,
|
||||
InventoryAccessList,
|
||||
InventoryObjectRolesList,
|
||||
InventoryInstanceGroupsList,
|
||||
)
|
||||
|
||||
|
||||
urls = [
|
||||
url(r'^$', InventoryList.as_view(), name='inventory_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/$', InventoryDetail.as_view(), name='inventory_detail'),
|
||||
url(r'^(?P<pk>[0-9]+)/hosts/$', InventoryHostsList.as_view(), name='inventory_hosts_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/groups/$', InventoryGroupsList.as_view(), name='inventory_groups_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/root_groups/$', InventoryRootGroupsList.as_view(), name='inventory_root_groups_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/variable_data/$', InventoryVariableData.as_view(), name='inventory_variable_data'),
|
||||
url(r'^(?P<pk>[0-9]+)/script/$', InventoryScriptView.as_view(), name='inventory_script_view'),
|
||||
url(r'^(?P<pk>[0-9]+)/tree/$', InventoryTreeView.as_view(), name='inventory_tree_view'),
|
||||
url(r'^(?P<pk>[0-9]+)/inventory_sources/$', InventoryInventorySourcesList.as_view(), name='inventory_inventory_sources_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/update_inventory_sources/$', InventoryInventorySourcesUpdate.as_view(), name='inventory_inventory_sources_update'),
|
||||
url(r'^(?P<pk>[0-9]+)/activity_stream/$', InventoryActivityStreamList.as_view(), name='inventory_activity_stream_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/job_templates/$', InventoryJobTemplateList.as_view(), name='inventory_job_template_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/ad_hoc_commands/$', InventoryAdHocCommandsList.as_view(), name='inventory_ad_hoc_commands_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/access_list/$', InventoryAccessList.as_view(), name='inventory_access_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/object_roles/$', InventoryObjectRolesList.as_view(), name='inventory_object_roles_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/instance_groups/$', InventoryInstanceGroupsList.as_view(), name='inventory_instance_groups_list'),
|
||||
]
|
||||
|
||||
__all__ = ['urls']
|
||||
19
awx/api/urls/inventory_script.py
Normal file
19
awx/api/urls/inventory_script.py
Normal file
@@ -0,0 +1,19 @@
|
||||
# Copyright (c) 2017 Ansible, Inc.
|
||||
# All Rights Reserved.
|
||||
|
||||
from django.conf.urls import url
|
||||
|
||||
from awx.api.views import (
|
||||
InventoryScriptList,
|
||||
InventoryScriptDetail,
|
||||
InventoryScriptObjectRolesList,
|
||||
)
|
||||
|
||||
|
||||
urls = [
|
||||
url(r'^$', InventoryScriptList.as_view(), name='inventory_script_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/$', InventoryScriptDetail.as_view(), name='inventory_script_detail'),
|
||||
url(r'^(?P<pk>[0-9]+)/object_roles/$', InventoryScriptObjectRolesList.as_view(), name='inventory_script_object_roles_list'),
|
||||
]
|
||||
|
||||
__all__ = ['urls']
|
||||
38
awx/api/urls/inventory_source.py
Normal file
38
awx/api/urls/inventory_source.py
Normal file
@@ -0,0 +1,38 @@
|
||||
# Copyright (c) 2017 Ansible, Inc.
|
||||
# All Rights Reserved.
|
||||
|
||||
from django.conf.urls import url
|
||||
|
||||
from awx.api.views import (
|
||||
InventorySourceList,
|
||||
InventorySourceDetail,
|
||||
InventorySourceUpdateView,
|
||||
InventorySourceUpdatesList,
|
||||
InventorySourceActivityStreamList,
|
||||
InventorySourceSchedulesList,
|
||||
InventorySourceGroupsList,
|
||||
InventorySourceHostsList,
|
||||
InventorySourceNotificationTemplatesAnyList,
|
||||
InventorySourceNotificationTemplatesErrorList,
|
||||
InventorySourceNotificationTemplatesSuccessList,
|
||||
)
|
||||
|
||||
|
||||
urls = [
|
||||
url(r'^$', InventorySourceList.as_view(), name='inventory_source_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/$', InventorySourceDetail.as_view(), name='inventory_source_detail'),
|
||||
url(r'^(?P<pk>[0-9]+)/update/$', InventorySourceUpdateView.as_view(), name='inventory_source_update_view'),
|
||||
url(r'^(?P<pk>[0-9]+)/inventory_updates/$', InventorySourceUpdatesList.as_view(), name='inventory_source_updates_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/activity_stream/$', InventorySourceActivityStreamList.as_view(), name='inventory_source_activity_stream_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/schedules/$', InventorySourceSchedulesList.as_view(), name='inventory_source_schedules_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/groups/$', InventorySourceGroupsList.as_view(), name='inventory_source_groups_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/hosts/$', InventorySourceHostsList.as_view(), name='inventory_source_hosts_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/notification_templates_any/$', InventorySourceNotificationTemplatesAnyList.as_view(),
|
||||
name='inventory_source_notification_templates_any_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/notification_templates_error/$', InventorySourceNotificationTemplatesErrorList.as_view(),
|
||||
name='inventory_source_notification_templates_error_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/notification_templates_success/$', InventorySourceNotificationTemplatesSuccessList.as_view(),
|
||||
name='inventory_source_notification_templates_success_list'),
|
||||
]
|
||||
|
||||
__all__ = ['urls']
|
||||
25
awx/api/urls/inventory_update.py
Normal file
25
awx/api/urls/inventory_update.py
Normal file
@@ -0,0 +1,25 @@
|
||||
# Copyright (c) 2017 Ansible, Inc.
|
||||
# All Rights Reserved.
|
||||
|
||||
from django.conf.urls import url
|
||||
|
||||
from awx.api.views import (
|
||||
InventoryUpdateList,
|
||||
InventoryUpdateDetail,
|
||||
InventoryUpdateCancel,
|
||||
InventoryUpdateStdout,
|
||||
InventoryUpdateNotificationsList,
|
||||
InventoryUpdateEventsList,
|
||||
)
|
||||
|
||||
|
||||
urls = [
|
||||
url(r'^$', InventoryUpdateList.as_view(), name='inventory_update_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/$', InventoryUpdateDetail.as_view(), name='inventory_update_detail'),
|
||||
url(r'^(?P<pk>[0-9]+)/cancel/$', InventoryUpdateCancel.as_view(), name='inventory_update_cancel'),
|
||||
url(r'^(?P<pk>[0-9]+)/stdout/$', InventoryUpdateStdout.as_view(), name='inventory_update_stdout'),
|
||||
url(r'^(?P<pk>[0-9]+)/notifications/$', InventoryUpdateNotificationsList.as_view(), name='inventory_update_notifications_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/events/$', InventoryUpdateEventsList.as_view(), name='inventory_update_events_list'),
|
||||
]
|
||||
|
||||
__all__ = ['urls']
|
||||
39
awx/api/urls/job.py
Normal file
39
awx/api/urls/job.py
Normal file
@@ -0,0 +1,39 @@
|
||||
# Copyright (c) 2017 Ansible, Inc.
|
||||
# All Rights Reserved.
|
||||
|
||||
from django.conf.urls import url
|
||||
|
||||
from awx.api.views import (
|
||||
JobList,
|
||||
JobDetail,
|
||||
JobStart,
|
||||
JobCancel,
|
||||
JobRelaunch,
|
||||
JobCreateSchedule,
|
||||
JobJobHostSummariesList,
|
||||
JobJobEventsList,
|
||||
JobActivityStreamList,
|
||||
JobStdout,
|
||||
JobNotificationsList,
|
||||
JobLabelList,
|
||||
JobHostSummaryDetail,
|
||||
)
|
||||
|
||||
|
||||
urls = [
|
||||
url(r'^$', JobList.as_view(), name='job_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/$', JobDetail.as_view(), name='job_detail'),
|
||||
url(r'^(?P<pk>[0-9]+)/start/$', JobStart.as_view(), name='job_start'), # Todo: Remove In 3.3
|
||||
url(r'^(?P<pk>[0-9]+)/cancel/$', JobCancel.as_view(), name='job_cancel'),
|
||||
url(r'^(?P<pk>[0-9]+)/relaunch/$', JobRelaunch.as_view(), name='job_relaunch'),
|
||||
url(r'^(?P<pk>[0-9]+)/create_schedule/$', JobCreateSchedule.as_view(), name='job_create_schedule'),
|
||||
url(r'^(?P<pk>[0-9]+)/job_host_summaries/$', JobJobHostSummariesList.as_view(), name='job_job_host_summaries_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/job_events/$', JobJobEventsList.as_view(), name='job_job_events_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/activity_stream/$', JobActivityStreamList.as_view(), name='job_activity_stream_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/stdout/$', JobStdout.as_view(), name='job_stdout'),
|
||||
url(r'^(?P<pk>[0-9]+)/notifications/$', JobNotificationsList.as_view(), name='job_notifications_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/labels/$', JobLabelList.as_view(), name='job_label_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/$', JobHostSummaryDetail.as_view(), name='job_host_summary_detail'),
|
||||
]
|
||||
|
||||
__all__ = ['urls']
|
||||
21
awx/api/urls/job_event.py
Normal file
21
awx/api/urls/job_event.py
Normal file
@@ -0,0 +1,21 @@
|
||||
# Copyright (c) 2017 Ansible, Inc.
|
||||
# All Rights Reserved.
|
||||
|
||||
from django.conf.urls import url
|
||||
|
||||
from awx.api.views import (
|
||||
JobEventList,
|
||||
JobEventDetail,
|
||||
JobEventChildrenList,
|
||||
JobEventHostsList,
|
||||
)
|
||||
|
||||
|
||||
urls = [
|
||||
url(r'^$', JobEventList.as_view(), name='job_event_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/$', JobEventDetail.as_view(), name='job_event_detail'),
|
||||
url(r'^(?P<pk>[0-9]+)/children/$', JobEventChildrenList.as_view(), name='job_event_children_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/hosts/$', JobEventHostsList.as_view(), name='job_event_hosts_list'),
|
||||
]
|
||||
|
||||
__all__ = ['urls']
|
||||
15
awx/api/urls/job_host_summary.py
Normal file
15
awx/api/urls/job_host_summary.py
Normal file
@@ -0,0 +1,15 @@
|
||||
# Copyright (c) 2017 Ansible, Inc.
|
||||
# All Rights Reserved.
|
||||
|
||||
from django.conf.urls import url
|
||||
|
||||
from awx.api.views import (
|
||||
JobHostSummaryDetail,
|
||||
)
|
||||
|
||||
|
||||
urls = [
|
||||
url(r'^(?P<pk>[0-9]+)/$', JobHostSummaryDetail.as_view(), name='job_host_summary_detail'),
|
||||
]
|
||||
|
||||
__all__ = ['urls']
|
||||
46
awx/api/urls/job_template.py
Normal file
46
awx/api/urls/job_template.py
Normal file
@@ -0,0 +1,46 @@
|
||||
# Copyright (c) 2017 Ansible, Inc.
|
||||
# All Rights Reserved.
|
||||
|
||||
from django.conf.urls import url
|
||||
|
||||
from awx.api.views import (
|
||||
JobTemplateList,
|
||||
JobTemplateDetail,
|
||||
JobTemplateLaunch,
|
||||
JobTemplateJobsList,
|
||||
JobTemplateCallback,
|
||||
JobTemplateSchedulesList,
|
||||
JobTemplateSurveySpec,
|
||||
JobTemplateActivityStreamList,
|
||||
JobTemplateNotificationTemplatesAnyList,
|
||||
JobTemplateNotificationTemplatesErrorList,
|
||||
JobTemplateNotificationTemplatesSuccessList,
|
||||
JobTemplateInstanceGroupsList,
|
||||
JobTemplateAccessList,
|
||||
JobTemplateObjectRolesList,
|
||||
JobTemplateLabelList,
|
||||
)
|
||||
|
||||
|
||||
urls = [
|
||||
url(r'^$', JobTemplateList.as_view(), name='job_template_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/$', JobTemplateDetail.as_view(), name='job_template_detail'),
|
||||
url(r'^(?P<pk>[0-9]+)/launch/$', JobTemplateLaunch.as_view(), name='job_template_launch'),
|
||||
url(r'^(?P<pk>[0-9]+)/jobs/$', JobTemplateJobsList.as_view(), name='job_template_jobs_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/callback/$', JobTemplateCallback.as_view(), name='job_template_callback'),
|
||||
url(r'^(?P<pk>[0-9]+)/schedules/$', JobTemplateSchedulesList.as_view(), name='job_template_schedules_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/survey_spec/$', JobTemplateSurveySpec.as_view(), name='job_template_survey_spec'),
|
||||
url(r'^(?P<pk>[0-9]+)/activity_stream/$', JobTemplateActivityStreamList.as_view(), name='job_template_activity_stream_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/notification_templates_any/$', JobTemplateNotificationTemplatesAnyList.as_view(),
|
||||
name='job_template_notification_templates_any_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/notification_templates_error/$', JobTemplateNotificationTemplatesErrorList.as_view(),
|
||||
name='job_template_notification_templates_error_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/notification_templates_success/$', JobTemplateNotificationTemplatesSuccessList.as_view(),
|
||||
name='job_template_notification_templates_success_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/instance_groups/$', JobTemplateInstanceGroupsList.as_view(), name='job_template_instance_groups_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/access_list/$', JobTemplateAccessList.as_view(), name='job_template_access_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/object_roles/$', JobTemplateObjectRolesList.as_view(), name='job_template_object_roles_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/labels/$', JobTemplateLabelList.as_view(), name='job_template_label_list'),
|
||||
]
|
||||
|
||||
__all__ = ['urls']
|
||||
17
awx/api/urls/label.py
Normal file
17
awx/api/urls/label.py
Normal file
@@ -0,0 +1,17 @@
|
||||
# Copyright (c) 2017 Ansible, Inc.
|
||||
# All Rights Reserved.
|
||||
|
||||
from django.conf.urls import url
|
||||
|
||||
from awx.api.views import (
|
||||
LabelList,
|
||||
LabelDetail,
|
||||
)
|
||||
|
||||
|
||||
urls = [
|
||||
url(r'^$', LabelList.as_view(), name='label_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/$', LabelDetail.as_view(), name='label_detail'),
|
||||
]
|
||||
|
||||
__all__ = ['urls']
|
||||
17
awx/api/urls/notification.py
Normal file
17
awx/api/urls/notification.py
Normal file
@@ -0,0 +1,17 @@
|
||||
# Copyright (c) 2017 Ansible, Inc.
|
||||
# All Rights Reserved.
|
||||
|
||||
from django.conf.urls import url
|
||||
|
||||
from awx.api.views import (
|
||||
NotificationList,
|
||||
NotificationDetail,
|
||||
)
|
||||
|
||||
|
||||
urls = [
|
||||
url(r'^$', NotificationList.as_view(), name='notification_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/$', NotificationDetail.as_view(), name='notification_detail'),
|
||||
]
|
||||
|
||||
__all__ = ['urls']
|
||||
21
awx/api/urls/notification_template.py
Normal file
21
awx/api/urls/notification_template.py
Normal file
@@ -0,0 +1,21 @@
|
||||
# Copyright (c) 2017 Ansible, Inc.
|
||||
# All Rights Reserved.
|
||||
|
||||
from django.conf.urls import url
|
||||
|
||||
from awx.api.views import (
|
||||
NotificationTemplateList,
|
||||
NotificationTemplateDetail,
|
||||
NotificationTemplateTest,
|
||||
NotificationTemplateNotificationList,
|
||||
)
|
||||
|
||||
|
||||
urls = [
|
||||
url(r'^$', NotificationTemplateList.as_view(), name='notification_template_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/$', NotificationTemplateDetail.as_view(), name='notification_template_detail'),
|
||||
url(r'^(?P<pk>[0-9]+)/test/$', NotificationTemplateTest.as_view(), name='notification_template_test'),
|
||||
url(r'^(?P<pk>[0-9]+)/notifications/$', NotificationTemplateNotificationList.as_view(), name='notification_template_notification_list'),
|
||||
]
|
||||
|
||||
__all__ = ['urls']
|
||||
50
awx/api/urls/organization.py
Normal file
50
awx/api/urls/organization.py
Normal file
@@ -0,0 +1,50 @@
|
||||
# Copyright (c) 2017 Ansible, Inc.
|
||||
# All Rights Reserved.
|
||||
|
||||
from django.conf.urls import url
|
||||
|
||||
from awx.api.views import (
|
||||
OrganizationList,
|
||||
OrganizationDetail,
|
||||
OrganizationUsersList,
|
||||
OrganizationAdminsList,
|
||||
OrganizationInventoriesList,
|
||||
OrganizationProjectsList,
|
||||
OrganizationWorkflowJobTemplatesList,
|
||||
OrganizationTeamsList,
|
||||
OrganizationCredentialList,
|
||||
OrganizationActivityStreamList,
|
||||
OrganizationNotificationTemplatesList,
|
||||
OrganizationNotificationTemplatesAnyList,
|
||||
OrganizationNotificationTemplatesErrorList,
|
||||
OrganizationNotificationTemplatesSuccessList,
|
||||
OrganizationInstanceGroupsList,
|
||||
OrganizationObjectRolesList,
|
||||
OrganizationAccessList,
|
||||
)
|
||||
|
||||
|
||||
urls = [
|
||||
url(r'^$', OrganizationList.as_view(), name='organization_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/$', OrganizationDetail.as_view(), name='organization_detail'),
|
||||
url(r'^(?P<pk>[0-9]+)/users/$', OrganizationUsersList.as_view(), name='organization_users_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/admins/$', OrganizationAdminsList.as_view(), name='organization_admins_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/inventories/$', OrganizationInventoriesList.as_view(), name='organization_inventories_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/projects/$', OrganizationProjectsList.as_view(), name='organization_projects_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/workflow_job_templates/$', OrganizationWorkflowJobTemplatesList.as_view(), name='organization_workflow_job_templates_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/teams/$', OrganizationTeamsList.as_view(), name='organization_teams_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/credentials/$', OrganizationCredentialList.as_view(), name='organization_credential_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/activity_stream/$', OrganizationActivityStreamList.as_view(), name='organization_activity_stream_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/notification_templates/$', OrganizationNotificationTemplatesList.as_view(), name='organization_notification_templates_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/notification_templates_any/$', OrganizationNotificationTemplatesAnyList.as_view(),
|
||||
name='organization_notification_templates_any_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/notification_templates_error/$', OrganizationNotificationTemplatesErrorList.as_view(),
|
||||
name='organization_notification_templates_error_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/notification_templates_success/$', OrganizationNotificationTemplatesSuccessList.as_view(),
|
||||
name='organization_notification_templates_success_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/instance_groups/$', OrganizationInstanceGroupsList.as_view(), name='organization_instance_groups_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/object_roles/$', OrganizationObjectRolesList.as_view(), name='organization_object_roles_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/access_list/$', OrganizationAccessList.as_view(), name='organization_access_list'),
|
||||
]
|
||||
|
||||
__all__ = ['urls']
|
||||
44
awx/api/urls/project.py
Normal file
44
awx/api/urls/project.py
Normal file
@@ -0,0 +1,44 @@
|
||||
# Copyright (c) 2017 Ansible, Inc.
|
||||
# All Rights Reserved.
|
||||
|
||||
from django.conf.urls import url
|
||||
|
||||
from awx.api.views import (
|
||||
ProjectList,
|
||||
ProjectDetail,
|
||||
ProjectPlaybooks,
|
||||
ProjectInventories,
|
||||
ProjectScmInventorySources,
|
||||
ProjectTeamsList,
|
||||
ProjectUpdateView,
|
||||
ProjectUpdatesList,
|
||||
ProjectActivityStreamList,
|
||||
ProjectSchedulesList,
|
||||
ProjectNotificationTemplatesAnyList,
|
||||
ProjectNotificationTemplatesErrorList,
|
||||
ProjectNotificationTemplatesSuccessList,
|
||||
ProjectObjectRolesList,
|
||||
ProjectAccessList,
|
||||
)
|
||||
|
||||
|
||||
urls = [
|
||||
url(r'^$', ProjectList.as_view(), name='project_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/$', ProjectDetail.as_view(), name='project_detail'),
|
||||
url(r'^(?P<pk>[0-9]+)/playbooks/$', ProjectPlaybooks.as_view(), name='project_playbooks'),
|
||||
url(r'^(?P<pk>[0-9]+)/inventories/$', ProjectInventories.as_view(), name='project_inventories'),
|
||||
url(r'^(?P<pk>[0-9]+)/scm_inventory_sources/$', ProjectScmInventorySources.as_view(), name='project_scm_inventory_sources'),
|
||||
url(r'^(?P<pk>[0-9]+)/teams/$', ProjectTeamsList.as_view(), name='project_teams_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/update/$', ProjectUpdateView.as_view(), name='project_update_view'),
|
||||
url(r'^(?P<pk>[0-9]+)/project_updates/$', ProjectUpdatesList.as_view(), name='project_updates_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/activity_stream/$', ProjectActivityStreamList.as_view(), name='project_activity_stream_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/schedules/$', ProjectSchedulesList.as_view(), name='project_schedules_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/notification_templates_any/$', ProjectNotificationTemplatesAnyList.as_view(), name='project_notification_templates_any_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/notification_templates_error/$', ProjectNotificationTemplatesErrorList.as_view(), name='project_notification_templates_error_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/notification_templates_success/$', ProjectNotificationTemplatesSuccessList.as_view(),
|
||||
name='project_notification_templates_success_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/object_roles/$', ProjectObjectRolesList.as_view(), name='project_object_roles_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/access_list/$', ProjectAccessList.as_view(), name='project_access_list'),
|
||||
]
|
||||
|
||||
__all__ = ['urls']
|
||||
27
awx/api/urls/project_update.py
Normal file
27
awx/api/urls/project_update.py
Normal file
@@ -0,0 +1,27 @@
|
||||
# Copyright (c) 2017 Ansible, Inc.
|
||||
# All Rights Reserved.
|
||||
|
||||
from django.conf.urls import url
|
||||
|
||||
from awx.api.views import (
|
||||
ProjectUpdateList,
|
||||
ProjectUpdateDetail,
|
||||
ProjectUpdateCancel,
|
||||
ProjectUpdateStdout,
|
||||
ProjectUpdateScmInventoryUpdates,
|
||||
ProjectUpdateNotificationsList,
|
||||
ProjectUpdateEventsList,
|
||||
)
|
||||
|
||||
|
||||
urls = [
|
||||
url(r'^$', ProjectUpdateList.as_view(), name='project_update_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/$', ProjectUpdateDetail.as_view(), name='project_update_detail'),
|
||||
url(r'^(?P<pk>[0-9]+)/cancel/$', ProjectUpdateCancel.as_view(), name='project_update_cancel'),
|
||||
url(r'^(?P<pk>[0-9]+)/stdout/$', ProjectUpdateStdout.as_view(), name='project_update_stdout'),
|
||||
url(r'^(?P<pk>[0-9]+)/scm_inventory_updates/$', ProjectUpdateScmInventoryUpdates.as_view(), name='project_update_scm_inventory_updates'),
|
||||
url(r'^(?P<pk>[0-9]+)/notifications/$', ProjectUpdateNotificationsList.as_view(), name='project_update_notifications_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/events/$', ProjectUpdateEventsList.as_view(), name='project_update_events_list'),
|
||||
]
|
||||
|
||||
__all__ = ['urls']
|
||||
25
awx/api/urls/role.py
Normal file
25
awx/api/urls/role.py
Normal file
@@ -0,0 +1,25 @@
|
||||
# Copyright (c) 2017 Ansible, Inc.
|
||||
# All Rights Reserved.
|
||||
|
||||
from django.conf.urls import url
|
||||
|
||||
from awx.api.views import (
|
||||
RoleList,
|
||||
RoleDetail,
|
||||
RoleUsersList,
|
||||
RoleTeamsList,
|
||||
RoleParentsList,
|
||||
RoleChildrenList,
|
||||
)
|
||||
|
||||
|
||||
urls = [
|
||||
url(r'^$', RoleList.as_view(), name='role_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/$', RoleDetail.as_view(), name='role_detail'),
|
||||
url(r'^(?P<pk>[0-9]+)/users/$', RoleUsersList.as_view(), name='role_users_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/teams/$', RoleTeamsList.as_view(), name='role_teams_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/parents/$', RoleParentsList.as_view(), name='role_parents_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/children/$', RoleChildrenList.as_view(), name='role_children_list'),
|
||||
]
|
||||
|
||||
__all__ = ['urls']
|
||||
21
awx/api/urls/schedule.py
Normal file
21
awx/api/urls/schedule.py
Normal file
@@ -0,0 +1,21 @@
|
||||
# Copyright (c) 2017 Ansible, Inc.
|
||||
# All Rights Reserved.
|
||||
|
||||
from django.conf.urls import url
|
||||
|
||||
from awx.api.views import (
|
||||
ScheduleList,
|
||||
ScheduleDetail,
|
||||
ScheduleUnifiedJobsList,
|
||||
ScheduleCredentialsList,
|
||||
)
|
||||
|
||||
|
||||
urls = [
|
||||
url(r'^$', ScheduleList.as_view(), name='schedule_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/$', ScheduleDetail.as_view(), name='schedule_detail'),
|
||||
url(r'^(?P<pk>[0-9]+)/jobs/$', ScheduleUnifiedJobsList.as_view(), name='schedule_unified_jobs_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/credentials/$', ScheduleCredentialsList.as_view(), name='schedule_credentials_list'),
|
||||
]
|
||||
|
||||
__all__ = ['urls']
|
||||
23
awx/api/urls/system_job.py
Normal file
23
awx/api/urls/system_job.py
Normal file
@@ -0,0 +1,23 @@
|
||||
# Copyright (c) 2017 Ansible, Inc.
|
||||
# All Rights Reserved.
|
||||
|
||||
from django.conf.urls import url
|
||||
|
||||
from awx.api.views import (
|
||||
SystemJobList,
|
||||
SystemJobDetail,
|
||||
SystemJobCancel,
|
||||
SystemJobNotificationsList,
|
||||
SystemJobEventsList
|
||||
)
|
||||
|
||||
|
||||
urls = [
|
||||
url(r'^$', SystemJobList.as_view(), name='system_job_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/$', SystemJobDetail.as_view(), name='system_job_detail'),
|
||||
url(r'^(?P<pk>[0-9]+)/cancel/$', SystemJobCancel.as_view(), name='system_job_cancel'),
|
||||
url(r'^(?P<pk>[0-9]+)/notifications/$', SystemJobNotificationsList.as_view(), name='system_job_notifications_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/events/$', SystemJobEventsList.as_view(), name='system_job_events_list'),
|
||||
]
|
||||
|
||||
__all__ = ['urls']
|
||||
32
awx/api/urls/system_job_template.py
Normal file
32
awx/api/urls/system_job_template.py
Normal file
@@ -0,0 +1,32 @@
|
||||
# Copyright (c) 2017 Ansible, Inc.
|
||||
# All Rights Reserved.
|
||||
|
||||
from django.conf.urls import url
|
||||
|
||||
from awx.api.views import (
|
||||
SystemJobTemplateList,
|
||||
SystemJobTemplateDetail,
|
||||
SystemJobTemplateLaunch,
|
||||
SystemJobTemplateJobsList,
|
||||
SystemJobTemplateSchedulesList,
|
||||
SystemJobTemplateNotificationTemplatesAnyList,
|
||||
SystemJobTemplateNotificationTemplatesErrorList,
|
||||
SystemJobTemplateNotificationTemplatesSuccessList,
|
||||
)
|
||||
|
||||
|
||||
urls = [
|
||||
url(r'^$', SystemJobTemplateList.as_view(), name='system_job_template_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/$', SystemJobTemplateDetail.as_view(), name='system_job_template_detail'),
|
||||
url(r'^(?P<pk>[0-9]+)/launch/$', SystemJobTemplateLaunch.as_view(), name='system_job_template_launch'),
|
||||
url(r'^(?P<pk>[0-9]+)/jobs/$', SystemJobTemplateJobsList.as_view(), name='system_job_template_jobs_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/schedules/$', SystemJobTemplateSchedulesList.as_view(), name='system_job_template_schedules_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/notification_templates_any/$', SystemJobTemplateNotificationTemplatesAnyList.as_view(),
|
||||
name='system_job_template_notification_templates_any_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/notification_templates_error/$', SystemJobTemplateNotificationTemplatesErrorList.as_view(),
|
||||
name='system_job_template_notification_templates_error_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/notification_templates_success/$', SystemJobTemplateNotificationTemplatesSuccessList.as_view(),
|
||||
name='system_job_template_notification_templates_success_list'),
|
||||
]
|
||||
|
||||
__all__ = ['urls']
|
||||
31
awx/api/urls/team.py
Normal file
31
awx/api/urls/team.py
Normal file
@@ -0,0 +1,31 @@
|
||||
# Copyright (c) 2017 Ansible, Inc.
|
||||
# All Rights Reserved.
|
||||
|
||||
from django.conf.urls import url
|
||||
|
||||
from awx.api.views import (
|
||||
TeamList,
|
||||
TeamDetail,
|
||||
TeamProjectsList,
|
||||
TeamUsersList,
|
||||
TeamCredentialsList,
|
||||
TeamRolesList,
|
||||
TeamObjectRolesList,
|
||||
TeamActivityStreamList,
|
||||
TeamAccessList,
|
||||
)
|
||||
|
||||
|
||||
urls = [
|
||||
url(r'^$', TeamList.as_view(), name='team_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/$', TeamDetail.as_view(), name='team_detail'),
|
||||
url(r'^(?P<pk>[0-9]+)/projects/$', TeamProjectsList.as_view(), name='team_projects_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/users/$', TeamUsersList.as_view(), name='team_users_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/credentials/$', TeamCredentialsList.as_view(), name='team_credentials_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/roles/$', TeamRolesList.as_view(), name='team_roles_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/object_roles/$', TeamObjectRolesList.as_view(), name='team_object_roles_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/activity_stream/$', TeamActivityStreamList.as_view(), name='team_activity_stream_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/access_list/$', TeamAccessList.as_view(), name='team_access_list'),
|
||||
]
|
||||
|
||||
__all__ = ['urls']
|
||||
127
awx/api/urls/urls.py
Normal file
127
awx/api/urls/urls.py
Normal file
@@ -0,0 +1,127 @@
|
||||
# Copyright (c) 2015 Ansible, Inc.
|
||||
# All Rights Reserved.
|
||||
|
||||
from __future__ import absolute_import, unicode_literals
|
||||
from django.conf.urls import include, url
|
||||
|
||||
from awx.api.views import (
|
||||
ApiRootView,
|
||||
ApiV1RootView,
|
||||
ApiV2RootView,
|
||||
ApiV1PingView,
|
||||
ApiV1ConfigView,
|
||||
AuthView,
|
||||
AuthTokenView,
|
||||
UserMeList,
|
||||
DashboardView,
|
||||
DashboardJobsGraphView,
|
||||
UnifiedJobTemplateList,
|
||||
UnifiedJobList,
|
||||
HostAnsibleFactsDetail,
|
||||
JobCredentialsList,
|
||||
JobExtraCredentialsList,
|
||||
JobTemplateCredentialsList,
|
||||
JobTemplateExtraCredentialsList,
|
||||
SchedulePreview,
|
||||
ScheduleZoneInfo,
|
||||
)
|
||||
|
||||
from .organization import urls as organization_urls
|
||||
from .user import urls as user_urls
|
||||
from .project import urls as project_urls
|
||||
from .project_update import urls as project_update_urls
|
||||
from .inventory import urls as inventory_urls
|
||||
from .team import urls as team_urls
|
||||
from .host import urls as host_urls
|
||||
from .group import urls as group_urls
|
||||
from .inventory_source import urls as inventory_source_urls
|
||||
from .inventory_update import urls as inventory_update_urls
|
||||
from .inventory_script import urls as inventory_script_urls
|
||||
from .credential_type import urls as credential_type_urls
|
||||
from .credential import urls as credential_urls
|
||||
from .role import urls as role_urls
|
||||
from .job_template import urls as job_template_urls
|
||||
from .job import urls as job_urls
|
||||
from .job_host_summary import urls as job_host_summary_urls
|
||||
from .job_event import urls as job_event_urls
|
||||
from .ad_hoc_command import urls as ad_hoc_command_urls
|
||||
from .ad_hoc_command_event import urls as ad_hoc_command_event_urls
|
||||
from .system_job_template import urls as system_job_template_urls
|
||||
from .system_job import urls as system_job_urls
|
||||
from .workflow_job_template import urls as workflow_job_template_urls
|
||||
from .workflow_job import urls as workflow_job_urls
|
||||
from .notification_template import urls as notification_template_urls
|
||||
from .notification import urls as notification_urls
|
||||
from .label import urls as label_urls
|
||||
from .workflow_job_template_node import urls as workflow_job_template_node_urls
|
||||
from .workflow_job_node import urls as workflow_job_node_urls
|
||||
from .schedule import urls as schedule_urls
|
||||
from .activity_stream import urls as activity_stream_urls
|
||||
from .instance import urls as instance_urls
|
||||
from .instance_group import urls as instance_group_urls
|
||||
|
||||
|
||||
v1_urls = [
|
||||
url(r'^$', ApiV1RootView.as_view(), name='api_v1_root_view'),
|
||||
url(r'^ping/$', ApiV1PingView.as_view(), name='api_v1_ping_view'),
|
||||
url(r'^config/$', ApiV1ConfigView.as_view(), name='api_v1_config_view'),
|
||||
url(r'^auth/$', AuthView.as_view()),
|
||||
url(r'^authtoken/$', AuthTokenView.as_view(), name='auth_token_view'),
|
||||
url(r'^me/$', UserMeList.as_view(), name='user_me_list'),
|
||||
url(r'^dashboard/$', DashboardView.as_view(), name='dashboard_view'),
|
||||
url(r'^dashboard/graphs/jobs/$', DashboardJobsGraphView.as_view(), name='dashboard_jobs_graph_view'),
|
||||
url(r'^settings/', include('awx.conf.urls')),
|
||||
url(r'^instances/', include(instance_urls)),
|
||||
url(r'^instance_groups/', include(instance_group_urls)),
|
||||
url(r'^schedules/', include(schedule_urls)),
|
||||
url(r'^organizations/', include(organization_urls)),
|
||||
url(r'^users/', include(user_urls)),
|
||||
url(r'^projects/', include(project_urls)),
|
||||
url(r'^project_updates/', include(project_update_urls)),
|
||||
url(r'^teams/', include(team_urls)),
|
||||
url(r'^inventories/', include(inventory_urls)),
|
||||
url(r'^hosts/', include(host_urls)),
|
||||
url(r'^groups/', include(group_urls)),
|
||||
url(r'^inventory_sources/', include(inventory_source_urls)),
|
||||
url(r'^inventory_updates/', include(inventory_update_urls)),
|
||||
url(r'^inventory_scripts/', include(inventory_script_urls)),
|
||||
url(r'^credentials/', include(credential_urls)),
|
||||
url(r'^roles/', include(role_urls)),
|
||||
url(r'^job_templates/', include(job_template_urls)),
|
||||
url(r'^jobs/', include(job_urls)),
|
||||
url(r'^job_host_summaries/', include(job_host_summary_urls)),
|
||||
url(r'^job_events/', include(job_event_urls)),
|
||||
url(r'^ad_hoc_commands/', include(ad_hoc_command_urls)),
|
||||
url(r'^ad_hoc_command_events/', include(ad_hoc_command_event_urls)),
|
||||
url(r'^system_job_templates/', include(system_job_template_urls)),
|
||||
url(r'^system_jobs/', include(system_job_urls)),
|
||||
url(r'^notification_templates/', include(notification_template_urls)),
|
||||
url(r'^notifications/', include(notification_urls)),
|
||||
url(r'^workflow_job_templates/', include(workflow_job_template_urls)),
|
||||
url(r'^workflow_jobs/', include(workflow_job_urls)),
|
||||
url(r'^labels/', include(label_urls)),
|
||||
url(r'^workflow_job_template_nodes/', include(workflow_job_template_node_urls)),
|
||||
url(r'^workflow_job_nodes/', include(workflow_job_node_urls)),
|
||||
url(r'^unified_job_templates/$', UnifiedJobTemplateList.as_view(), name='unified_job_template_list'),
|
||||
url(r'^unified_jobs/$', UnifiedJobList.as_view(), name='unified_job_list'),
|
||||
url(r'^activity_stream/', include(activity_stream_urls)),
|
||||
]
|
||||
|
||||
v2_urls = [
|
||||
url(r'^$', ApiV2RootView.as_view(), name='api_v2_root_view'),
|
||||
url(r'^credential_types/', include(credential_type_urls)),
|
||||
url(r'^hosts/(?P<pk>[0-9]+)/ansible_facts/$', HostAnsibleFactsDetail.as_view(), name='host_ansible_facts_detail'),
|
||||
url(r'^jobs/(?P<pk>[0-9]+)/extra_credentials/$', JobExtraCredentialsList.as_view(), name='job_extra_credentials_list'),
|
||||
url(r'^jobs/(?P<pk>[0-9]+)/credentials/$', JobCredentialsList.as_view(), name='job_credentials_list'),
|
||||
url(r'^job_templates/(?P<pk>[0-9]+)/extra_credentials/$', JobTemplateExtraCredentialsList.as_view(), name='job_template_extra_credentials_list'),
|
||||
url(r'^job_templates/(?P<pk>[0-9]+)/credentials/$', JobTemplateCredentialsList.as_view(), name='job_template_credentials_list'),
|
||||
url(r'^schedules/preview/$', SchedulePreview.as_view(), name='schedule_rrule'),
|
||||
url(r'^schedules/zoneinfo/$', ScheduleZoneInfo.as_view(), name='schedule_zoneinfo'),
|
||||
]
|
||||
|
||||
app_name = 'api'
|
||||
urlpatterns = [
|
||||
url(r'^$', ApiRootView.as_view(), name='api_root_view'),
|
||||
url(r'^(?P<version>(v2))/', include(v2_urls)),
|
||||
url(r'^(?P<version>(v1|v2))/', include(v1_urls))
|
||||
]
|
||||
33
awx/api/urls/user.py
Normal file
33
awx/api/urls/user.py
Normal file
@@ -0,0 +1,33 @@
|
||||
# Copyright (c) 2017 Ansible, Inc.
|
||||
# All Rights Reserved.
|
||||
|
||||
from django.conf.urls import url
|
||||
|
||||
from awx.api.views import (
|
||||
UserList,
|
||||
UserDetail,
|
||||
UserTeamsList,
|
||||
UserOrganizationsList,
|
||||
UserAdminOfOrganizationsList,
|
||||
UserProjectsList,
|
||||
UserCredentialsList,
|
||||
UserRolesList,
|
||||
UserActivityStreamList,
|
||||
UserAccessList,
|
||||
)
|
||||
|
||||
|
||||
urls = [
|
||||
url(r'^$', UserList.as_view(), name='user_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/$', UserDetail.as_view(), name='user_detail'),
|
||||
url(r'^(?P<pk>[0-9]+)/teams/$', UserTeamsList.as_view(), name='user_teams_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/organizations/$', UserOrganizationsList.as_view(), name='user_organizations_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/admin_of_organizations/$', UserAdminOfOrganizationsList.as_view(), name='user_admin_of_organizations_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/projects/$', UserProjectsList.as_view(), name='user_projects_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/credentials/$', UserCredentialsList.as_view(), name='user_credentials_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/roles/$', UserRolesList.as_view(), name='user_roles_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/activity_stream/$', UserActivityStreamList.as_view(), name='user_activity_stream_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/access_list/$', UserAccessList.as_view(), name='user_access_list'),
|
||||
]
|
||||
|
||||
__all__ = ['urls']
|
||||
29
awx/api/urls/workflow_job.py
Normal file
29
awx/api/urls/workflow_job.py
Normal file
@@ -0,0 +1,29 @@
|
||||
# Copyright (c) 2017 Ansible, Inc.
|
||||
# All Rights Reserved.
|
||||
|
||||
from django.conf.urls import url
|
||||
|
||||
from awx.api.views import (
|
||||
WorkflowJobList,
|
||||
WorkflowJobDetail,
|
||||
WorkflowJobWorkflowNodesList,
|
||||
WorkflowJobLabelList,
|
||||
WorkflowJobCancel,
|
||||
WorkflowJobRelaunch,
|
||||
WorkflowJobNotificationsList,
|
||||
WorkflowJobActivityStreamList,
|
||||
)
|
||||
|
||||
|
||||
urls = [
|
||||
url(r'^$', WorkflowJobList.as_view(), name='workflow_job_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/$', WorkflowJobDetail.as_view(), name='workflow_job_detail'),
|
||||
url(r'^(?P<pk>[0-9]+)/workflow_nodes/$', WorkflowJobWorkflowNodesList.as_view(), name='workflow_job_workflow_nodes_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/labels/$', WorkflowJobLabelList.as_view(), name='workflow_job_label_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/cancel/$', WorkflowJobCancel.as_view(), name='workflow_job_cancel'),
|
||||
url(r'^(?P<pk>[0-9]+)/relaunch/$', WorkflowJobRelaunch.as_view(), name='workflow_job_relaunch'),
|
||||
url(r'^(?P<pk>[0-9]+)/notifications/$', WorkflowJobNotificationsList.as_view(), name='workflow_job_notifications_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/activity_stream/$', WorkflowJobActivityStreamList.as_view(), name='workflow_job_activity_stream_list'),
|
||||
]
|
||||
|
||||
__all__ = ['urls']
|
||||
25
awx/api/urls/workflow_job_node.py
Normal file
25
awx/api/urls/workflow_job_node.py
Normal file
@@ -0,0 +1,25 @@
|
||||
# Copyright (c) 2017 Ansible, Inc.
|
||||
# All Rights Reserved.
|
||||
|
||||
from django.conf.urls import url
|
||||
|
||||
from awx.api.views import (
|
||||
WorkflowJobNodeList,
|
||||
WorkflowJobNodeDetail,
|
||||
WorkflowJobNodeSuccessNodesList,
|
||||
WorkflowJobNodeFailureNodesList,
|
||||
WorkflowJobNodeAlwaysNodesList,
|
||||
WorkflowJobNodeCredentialsList,
|
||||
)
|
||||
|
||||
|
||||
urls = [
|
||||
url(r'^$', WorkflowJobNodeList.as_view(), name='workflow_job_node_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/$', WorkflowJobNodeDetail.as_view(), name='workflow_job_node_detail'),
|
||||
url(r'^(?P<pk>[0-9]+)/success_nodes/$', WorkflowJobNodeSuccessNodesList.as_view(), name='workflow_job_node_success_nodes_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/failure_nodes/$', WorkflowJobNodeFailureNodesList.as_view(), name='workflow_job_node_failure_nodes_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/always_nodes/$', WorkflowJobNodeAlwaysNodesList.as_view(), name='workflow_job_node_always_nodes_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/credentials/$', WorkflowJobNodeCredentialsList.as_view(), name='workflow_job_node_credentials_list'),
|
||||
]
|
||||
|
||||
__all__ = ['urls']
|
||||
46
awx/api/urls/workflow_job_template.py
Normal file
46
awx/api/urls/workflow_job_template.py
Normal file
@@ -0,0 +1,46 @@
|
||||
# Copyright (c) 2017 Ansible, Inc.
|
||||
# All Rights Reserved.
|
||||
|
||||
from django.conf.urls import url
|
||||
|
||||
from awx.api.views import (
|
||||
WorkflowJobTemplateList,
|
||||
WorkflowJobTemplateDetail,
|
||||
WorkflowJobTemplateJobsList,
|
||||
WorkflowJobTemplateLaunch,
|
||||
WorkflowJobTemplateCopy,
|
||||
WorkflowJobTemplateSchedulesList,
|
||||
WorkflowJobTemplateSurveySpec,
|
||||
WorkflowJobTemplateWorkflowNodesList,
|
||||
WorkflowJobTemplateActivityStreamList,
|
||||
WorkflowJobTemplateNotificationTemplatesAnyList,
|
||||
WorkflowJobTemplateNotificationTemplatesErrorList,
|
||||
WorkflowJobTemplateNotificationTemplatesSuccessList,
|
||||
WorkflowJobTemplateAccessList,
|
||||
WorkflowJobTemplateObjectRolesList,
|
||||
WorkflowJobTemplateLabelList,
|
||||
)
|
||||
|
||||
|
||||
urls = [
|
||||
url(r'^$', WorkflowJobTemplateList.as_view(), name='workflow_job_template_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/$', WorkflowJobTemplateDetail.as_view(), name='workflow_job_template_detail'),
|
||||
url(r'^(?P<pk>[0-9]+)/workflow_jobs/$', WorkflowJobTemplateJobsList.as_view(), name='workflow_job_template_jobs_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/launch/$', WorkflowJobTemplateLaunch.as_view(), name='workflow_job_template_launch'),
|
||||
url(r'^(?P<pk>[0-9]+)/copy/$', WorkflowJobTemplateCopy.as_view(), name='workflow_job_template_copy'),
|
||||
url(r'^(?P<pk>[0-9]+)/schedules/$', WorkflowJobTemplateSchedulesList.as_view(), name='workflow_job_template_schedules_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/survey_spec/$', WorkflowJobTemplateSurveySpec.as_view(), name='workflow_job_template_survey_spec'),
|
||||
url(r'^(?P<pk>[0-9]+)/workflow_nodes/$', WorkflowJobTemplateWorkflowNodesList.as_view(), name='workflow_job_template_workflow_nodes_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/activity_stream/$', WorkflowJobTemplateActivityStreamList.as_view(), name='workflow_job_template_activity_stream_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/notification_templates_any/$', WorkflowJobTemplateNotificationTemplatesAnyList.as_view(),
|
||||
name='workflow_job_template_notification_templates_any_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/notification_templates_error/$', WorkflowJobTemplateNotificationTemplatesErrorList.as_view(),
|
||||
name='workflow_job_template_notification_templates_error_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/notification_templates_success/$', WorkflowJobTemplateNotificationTemplatesSuccessList.as_view(),
|
||||
name='workflow_job_template_notification_templates_success_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/access_list/$', WorkflowJobTemplateAccessList.as_view(), name='workflow_job_template_access_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/object_roles/$', WorkflowJobTemplateObjectRolesList.as_view(), name='workflow_job_template_object_roles_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/labels/$', WorkflowJobTemplateLabelList.as_view(), name='workflow_job_template_label_list'),
|
||||
]
|
||||
|
||||
__all__ = ['urls']
|
||||
25
awx/api/urls/workflow_job_template_node.py
Normal file
25
awx/api/urls/workflow_job_template_node.py
Normal file
@@ -0,0 +1,25 @@
|
||||
# Copyright (c) 2017 Ansible, Inc.
|
||||
# All Rights Reserved.
|
||||
|
||||
from django.conf.urls import url
|
||||
|
||||
from awx.api.views import (
|
||||
WorkflowJobTemplateNodeList,
|
||||
WorkflowJobTemplateNodeDetail,
|
||||
WorkflowJobTemplateNodeSuccessNodesList,
|
||||
WorkflowJobTemplateNodeFailureNodesList,
|
||||
WorkflowJobTemplateNodeAlwaysNodesList,
|
||||
WorkflowJobTemplateNodeCredentialsList,
|
||||
)
|
||||
|
||||
|
||||
urls = [
|
||||
url(r'^$', WorkflowJobTemplateNodeList.as_view(), name='workflow_job_template_node_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/$', WorkflowJobTemplateNodeDetail.as_view(), name='workflow_job_template_node_detail'),
|
||||
url(r'^(?P<pk>[0-9]+)/success_nodes/$', WorkflowJobTemplateNodeSuccessNodesList.as_view(), name='workflow_job_template_node_success_nodes_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/failure_nodes/$', WorkflowJobTemplateNodeFailureNodesList.as_view(), name='workflow_job_template_node_failure_nodes_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/always_nodes/$', WorkflowJobTemplateNodeAlwaysNodesList.as_view(), name='workflow_job_template_node_always_nodes_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/credentials/$', WorkflowJobTemplateNodeCredentialsList.as_view(), name='workflow_job_template_node_credentials_list'),
|
||||
]
|
||||
|
||||
__all__ = ['urls']
|
||||
874
awx/api/views.py
874
awx/api/views.py
File diff suppressed because it is too large
Load Diff
23
awx/celery.py
Normal file
23
awx/celery.py
Normal file
@@ -0,0 +1,23 @@
|
||||
# Copyright (c) 2017 Ansible, Inc.
|
||||
# All Rights Reserved.
|
||||
|
||||
from __future__ import absolute_import, unicode_literals
|
||||
|
||||
import os
|
||||
from celery import Celery
|
||||
|
||||
|
||||
try:
|
||||
import awx.devonly # noqa
|
||||
MODE = 'development'
|
||||
except ImportError: # pragma: no cover
|
||||
MODE = 'production'
|
||||
|
||||
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'awx.settings.%s' % MODE)
|
||||
|
||||
app = Celery('awx')
|
||||
app.config_from_object('django.conf:settings', namespace='CELERY')
|
||||
app.autodiscover_tasks()
|
||||
|
||||
if __name__ == '__main__':
|
||||
app.start()
|
||||
@@ -1,6 +1,7 @@
|
||||
# Python
|
||||
import logging
|
||||
import urlparse
|
||||
from collections import OrderedDict
|
||||
|
||||
# Django
|
||||
from django.core.validators import URLValidator
|
||||
@@ -53,6 +54,47 @@ class StringListField(ListField):
|
||||
return super(StringListField, self).to_representation(value)
|
||||
|
||||
|
||||
class StringListBooleanField(ListField):
|
||||
|
||||
default_error_messages = {
|
||||
'type_error': _('Expected None, True, False, a string or list of strings but got {input_type} instead.'),
|
||||
}
|
||||
child = CharField()
|
||||
|
||||
def to_representation(self, value):
|
||||
try:
|
||||
if isinstance(value, (list, tuple)):
|
||||
return super(StringListBooleanField, self).to_representation(value)
|
||||
elif value in NullBooleanField.TRUE_VALUES:
|
||||
return True
|
||||
elif value in NullBooleanField.FALSE_VALUES:
|
||||
return False
|
||||
elif value in NullBooleanField.NULL_VALUES:
|
||||
return None
|
||||
elif isinstance(value, basestring):
|
||||
return self.child.to_representation(value)
|
||||
except TypeError:
|
||||
pass
|
||||
|
||||
self.fail('type_error', input_type=type(value))
|
||||
|
||||
def to_internal_value(self, data):
|
||||
try:
|
||||
if isinstance(data, (list, tuple)):
|
||||
return super(StringListBooleanField, self).to_internal_value(data)
|
||||
elif data in NullBooleanField.TRUE_VALUES:
|
||||
return True
|
||||
elif data in NullBooleanField.FALSE_VALUES:
|
||||
return False
|
||||
elif data in NullBooleanField.NULL_VALUES:
|
||||
return None
|
||||
elif isinstance(data, basestring):
|
||||
return self.child.run_validation(data)
|
||||
except TypeError:
|
||||
pass
|
||||
self.fail('type_error', input_type=type(data))
|
||||
|
||||
|
||||
class URLField(CharField):
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
@@ -83,7 +125,7 @@ class URLField(CharField):
|
||||
else:
|
||||
netloc = '{}@{}' % (url_parts.username, netloc)
|
||||
value = urlparse.urlunsplit([url_parts.scheme, netloc, url_parts.path, url_parts.query, url_parts.fragment])
|
||||
except:
|
||||
except Exception:
|
||||
raise # If something fails here, just fall through and let the validators check it.
|
||||
super(URLField, self).run_validators(value)
|
||||
|
||||
@@ -98,5 +140,29 @@ class KeyValueField(DictField):
|
||||
ret = super(KeyValueField, self).to_internal_value(data)
|
||||
for value in data.values():
|
||||
if not isinstance(value, six.string_types + six.integer_types + (float,)):
|
||||
if isinstance(value, OrderedDict):
|
||||
value = dict(value)
|
||||
self.fail('invalid_child', input=value)
|
||||
return ret
|
||||
|
||||
|
||||
class ListTuplesField(ListField):
|
||||
default_error_messages = {
|
||||
'type_error': _('Expected a list of tuples of max length 2 but got {input_type} instead.'),
|
||||
}
|
||||
|
||||
def to_representation(self, value):
|
||||
if isinstance(value, (list, tuple)):
|
||||
return super(ListTuplesField, self).to_representation(value)
|
||||
else:
|
||||
self.fail('type_error', input_type=type(value))
|
||||
|
||||
def to_internal_value(self, data):
|
||||
if isinstance(data, list):
|
||||
for x in data:
|
||||
if not isinstance(x, (list, tuple)) or len(x) > 2:
|
||||
self.fail('type_error', input_type=type(x))
|
||||
|
||||
return super(ListTuplesField, self).to_internal_value(data)
|
||||
else:
|
||||
self.fail('type_error', input_type=type(data))
|
||||
|
||||
@@ -120,6 +120,9 @@ class SettingsRegistry(object):
|
||||
def is_setting_read_only(self, setting):
|
||||
return bool(self._registry.get(setting, {}).get('read_only', False))
|
||||
|
||||
def get_setting_category(self, setting):
|
||||
return self._registry.get(setting, {}).get('category_slug', None)
|
||||
|
||||
def get_setting_field(self, setting, mixin_class=None, for_user=False, **kwargs):
|
||||
from rest_framework.fields import empty
|
||||
field_kwargs = {}
|
||||
@@ -159,14 +162,14 @@ class SettingsRegistry(object):
|
||||
if category_slug == 'user' and for_user:
|
||||
try:
|
||||
field_instance.default = original_field_instance.to_representation(getattr(self.settings, setting))
|
||||
except:
|
||||
except Exception:
|
||||
logger.warning('Unable to retrieve default value for user setting "%s".', setting, exc_info=True)
|
||||
elif not field_instance.read_only or field_instance.default is empty or field_instance.defined_in_file:
|
||||
try:
|
||||
field_instance.default = original_field_instance.to_representation(self.settings._awx_conf_settings._get_default(setting))
|
||||
except AttributeError:
|
||||
pass
|
||||
except:
|
||||
except Exception:
|
||||
logger.warning('Unable to retrieve default value for setting "%s".', setting, exc_info=True)
|
||||
|
||||
# `PENDO_TRACKING_STATE` is disabled for the open source awx license
|
||||
|
||||
@@ -16,7 +16,7 @@ class SettingSerializer(BaseSerializer):
|
||||
class Meta:
|
||||
model = Setting
|
||||
fields = ('id', 'key', 'value')
|
||||
readonly_fields = ('id', 'key', 'value')
|
||||
read_only_fields = ('id', 'key', 'value')
|
||||
|
||||
def __init__(self, instance=None, data=serializers.empty, **kwargs):
|
||||
if instance is None and data is not serializers.empty and 'key' in data:
|
||||
@@ -87,8 +87,10 @@ class SettingSingletonSerializer(serializers.Serializer):
|
||||
if self.instance and not hasattr(self.instance, key):
|
||||
continue
|
||||
extra_kwargs = {}
|
||||
# Make LICENSE read-only here; update via /api/v1/config/ only.
|
||||
if key == 'LICENSE':
|
||||
# Make LICENSE and AWX_ISOLATED_KEY_GENERATION read-only here;
|
||||
# LICENSE is only updated via /api/v1/config/
|
||||
# AWX_ISOLATED_KEY_GENERATION is only set/unset via the setup playbook
|
||||
if key in ('LICENSE', 'AWX_ISOLATED_KEY_GENERATION'):
|
||||
extra_kwargs['read_only'] = True
|
||||
field = settings_registry.get_setting_field(key, mixin_class=SettingFieldMixin, for_user=bool(category_slug == 'user'), **extra_kwargs)
|
||||
fields[key] = field
|
||||
|
||||
@@ -9,10 +9,12 @@ import time
|
||||
import six
|
||||
|
||||
# Django
|
||||
from django.conf import LazySettings
|
||||
from django.conf import settings, UserSettingsHolder
|
||||
from django.core.cache import cache as django_cache
|
||||
from django.core.exceptions import ImproperlyConfigured
|
||||
from django.db import ProgrammingError, OperationalError
|
||||
from django.utils.functional import cached_property
|
||||
|
||||
# Django REST Framework
|
||||
from rest_framework.fields import empty, SkipField
|
||||
@@ -229,7 +231,8 @@ class SettingsWrapper(UserSettingsHolder):
|
||||
self.__dict__['cache'] = EncryptedCacheProxy(cache, registry)
|
||||
self.__dict__['registry'] = registry
|
||||
|
||||
def _get_supported_settings(self):
|
||||
@cached_property
|
||||
def all_supported_settings(self):
|
||||
return self.registry.get_registered_settings()
|
||||
|
||||
def _preload_cache(self):
|
||||
@@ -366,7 +369,7 @@ class SettingsWrapper(UserSettingsHolder):
|
||||
return internal_value
|
||||
else:
|
||||
return field.run_validation(value)
|
||||
except:
|
||||
except Exception:
|
||||
logger.warning(
|
||||
'The current value "%r" for setting "%s" is invalid.',
|
||||
value, name, exc_info=True)
|
||||
@@ -381,7 +384,7 @@ class SettingsWrapper(UserSettingsHolder):
|
||||
|
||||
def __getattr__(self, name):
|
||||
value = empty
|
||||
if name in self._get_supported_settings():
|
||||
if name in self.all_supported_settings:
|
||||
with _log_database_error():
|
||||
value = self._get_local(name)
|
||||
if value is not empty:
|
||||
@@ -413,7 +416,7 @@ class SettingsWrapper(UserSettingsHolder):
|
||||
# post_save handler will delete from cache when changed.
|
||||
|
||||
def __setattr__(self, name, value):
|
||||
if name in self._get_supported_settings():
|
||||
if name in self.all_supported_settings:
|
||||
with _log_database_error():
|
||||
self._set_local(name, value)
|
||||
else:
|
||||
@@ -429,7 +432,7 @@ class SettingsWrapper(UserSettingsHolder):
|
||||
# pre_delete handler will delete from cache.
|
||||
|
||||
def __delattr__(self, name):
|
||||
if name in self._get_supported_settings():
|
||||
if name in self.all_supported_settings:
|
||||
with _log_database_error():
|
||||
self._del_local(name)
|
||||
else:
|
||||
@@ -439,7 +442,7 @@ class SettingsWrapper(UserSettingsHolder):
|
||||
keys = []
|
||||
with _log_database_error():
|
||||
for setting in Setting.objects.filter(
|
||||
key__in=self._get_supported_settings(), user__isnull=True):
|
||||
key__in=self.all_supported_settings, user__isnull=True):
|
||||
# Skip returning settings that have been overridden but are
|
||||
# considered to be "not set".
|
||||
if setting.value is None and SETTING_CACHE_NOTSET == SETTING_CACHE_NONE:
|
||||
@@ -453,8 +456,24 @@ class SettingsWrapper(UserSettingsHolder):
|
||||
|
||||
def is_overridden(self, setting):
|
||||
set_locally = False
|
||||
if setting in self._get_supported_settings():
|
||||
if setting in self.all_supported_settings:
|
||||
with _log_database_error():
|
||||
set_locally = Setting.objects.filter(key=setting, user__isnull=True).exists()
|
||||
set_on_default = getattr(self.default_settings, 'is_overridden', lambda s: False)(setting)
|
||||
return (set_locally or set_on_default)
|
||||
|
||||
|
||||
def __getattr_without_cache__(self, name):
|
||||
# Django 1.10 added an optimization to settings lookup:
|
||||
# https://code.djangoproject.com/ticket/27625
|
||||
# https://github.com/django/django/commit/c1b221a9b913315998a1bcec2f29a9361a74d1ac
|
||||
# This change caches settings lookups on the __dict__ of the LazySettings
|
||||
# object, which is not okay to do in an environment where settings can
|
||||
# change in-process (the entire point of awx's custom settings implementation)
|
||||
# This restores the original behavior that *does not* cache.
|
||||
if self._wrapped is empty:
|
||||
self._setup(name)
|
||||
return getattr(self._wrapped, name)
|
||||
|
||||
|
||||
LazySettings.__getattr__ = __getattr_without_cache__
|
||||
|
||||
86
awx/conf/tests/unit/test_fields.py
Normal file
86
awx/conf/tests/unit/test_fields.py
Normal file
@@ -0,0 +1,86 @@
|
||||
import pytest
|
||||
|
||||
from rest_framework.fields import ValidationError
|
||||
from awx.conf.fields import StringListBooleanField, ListTuplesField
|
||||
|
||||
|
||||
class TestStringListBooleanField():
|
||||
|
||||
FIELD_VALUES = [
|
||||
("hello", "hello"),
|
||||
(("a", "b"), ["a", "b"]),
|
||||
(["a", "b", 1, 3.13, "foo", "bar", "foobar"], ["a", "b", "1", "3.13", "foo", "bar", "foobar"]),
|
||||
("True", True),
|
||||
("TRUE", True),
|
||||
("true", True),
|
||||
(True, True),
|
||||
("False", False),
|
||||
("FALSE", False),
|
||||
("false", False),
|
||||
(False, False),
|
||||
("", None),
|
||||
("null", None),
|
||||
("NULL", None),
|
||||
]
|
||||
|
||||
FIELD_VALUES_INVALID = [
|
||||
1.245,
|
||||
{"a": "b"},
|
||||
]
|
||||
|
||||
@pytest.mark.parametrize("value_in, value_known", FIELD_VALUES)
|
||||
def test_to_internal_value_valid(self, value_in, value_known):
|
||||
field = StringListBooleanField()
|
||||
v = field.to_internal_value(value_in)
|
||||
assert v == value_known
|
||||
|
||||
@pytest.mark.parametrize("value", FIELD_VALUES_INVALID)
|
||||
def test_to_internal_value_invalid(self, value):
|
||||
field = StringListBooleanField()
|
||||
with pytest.raises(ValidationError) as e:
|
||||
field.to_internal_value(value)
|
||||
assert e.value.detail[0] == "Expected None, True, False, a string or list " \
|
||||
"of strings but got {} instead.".format(type(value))
|
||||
|
||||
@pytest.mark.parametrize("value_in, value_known", FIELD_VALUES)
|
||||
def test_to_representation_valid(self, value_in, value_known):
|
||||
field = StringListBooleanField()
|
||||
v = field.to_representation(value_in)
|
||||
assert v == value_known
|
||||
|
||||
@pytest.mark.parametrize("value", FIELD_VALUES_INVALID)
|
||||
def test_to_representation_invalid(self, value):
|
||||
field = StringListBooleanField()
|
||||
with pytest.raises(ValidationError) as e:
|
||||
field.to_representation(value)
|
||||
assert e.value.detail[0] == "Expected None, True, False, a string or list " \
|
||||
"of strings but got {} instead.".format(type(value))
|
||||
|
||||
|
||||
class TestListTuplesField():
|
||||
|
||||
FIELD_VALUES = [
|
||||
([('a', 'b'), ('abc', '123')], [("a", "b"), ("abc", "123")]),
|
||||
]
|
||||
|
||||
FIELD_VALUES_INVALID = [
|
||||
("abc", type("abc")),
|
||||
([('a', 'b', 'c'), ('abc', '123', '456')], type(('a',))),
|
||||
(['a', 'b'], type('a')),
|
||||
(123, type(123)),
|
||||
]
|
||||
|
||||
@pytest.mark.parametrize("value_in, value_known", FIELD_VALUES)
|
||||
def test_to_internal_value_valid(self, value_in, value_known):
|
||||
field = ListTuplesField()
|
||||
v = field.to_internal_value(value_in)
|
||||
assert v == value_known
|
||||
|
||||
@pytest.mark.parametrize("value, t", FIELD_VALUES_INVALID)
|
||||
def test_to_internal_value_invalid(self, value, t):
|
||||
field = ListTuplesField()
|
||||
with pytest.raises(ValidationError) as e:
|
||||
field.to_internal_value(value)
|
||||
assert e.value.detail[0] == "Expected a list of tuples of max length 2 " \
|
||||
"but got {} instead.".format(t)
|
||||
|
||||
@@ -1,16 +1,17 @@
|
||||
# Copyright (c) 2016 Ansible, Inc.
|
||||
# All Rights Reserved.
|
||||
|
||||
# Django
|
||||
from django.conf.urls import patterns
|
||||
|
||||
# Tower
|
||||
from awx.api.urls import url
|
||||
|
||||
|
||||
urlpatterns = patterns(
|
||||
'awx.conf.views',
|
||||
url(r'^$', 'setting_category_list'),
|
||||
url(r'^(?P<category_slug>[a-z0-9-]+)/$', 'setting_singleton_detail'),
|
||||
url(r'^logging/test/$', 'setting_logging_test'),
|
||||
from django.conf.urls import url
|
||||
from awx.conf.views import (
|
||||
SettingCategoryList,
|
||||
SettingSingletonDetail,
|
||||
SettingLoggingTest,
|
||||
)
|
||||
|
||||
|
||||
urlpatterns = [
|
||||
url(r'^$', SettingCategoryList.as_view(), name='setting_category_list'),
|
||||
url(r'^(?P<category_slug>[a-z0-9-]+)/$', SettingSingletonDetail.as_view(), name='setting_singleton_detail'),
|
||||
url(r'^logging/test/$', SettingLoggingTest.as_view(), name='setting_logging_test'),
|
||||
]
|
||||
|
||||
@@ -6,10 +6,10 @@ import glob
|
||||
import os
|
||||
import shutil
|
||||
|
||||
# RedBaron
|
||||
from redbaron import RedBaron, indent
|
||||
# AWX
|
||||
from awx.conf.registry import settings_registry
|
||||
|
||||
__all__ = ['comment_assignments']
|
||||
__all__ = ['comment_assignments', 'conf_to_dict']
|
||||
|
||||
|
||||
def comment_assignments(patterns, assignment_names, dry_run=True, backup_suffix='.old'):
|
||||
@@ -30,6 +30,8 @@ def comment_assignments(patterns, assignment_names, dry_run=True, backup_suffix=
|
||||
|
||||
|
||||
def comment_assignments_in_file(filename, assignment_names, dry_run=True, backup_filename=None):
|
||||
from redbaron import RedBaron, indent
|
||||
|
||||
if isinstance(assignment_names, basestring):
|
||||
assignment_names = [assignment_names]
|
||||
else:
|
||||
@@ -103,6 +105,13 @@ def comment_assignments_in_file(filename, assignment_names, dry_run=True, backup
|
||||
return '\n'.join(diff_lines)
|
||||
|
||||
|
||||
def conf_to_dict(obj):
|
||||
return {
|
||||
'category': settings_registry.get_setting_category(obj.key),
|
||||
'name': obj.key,
|
||||
}
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
pattern = os.path.join(os.path.dirname(__file__), '..', 'settings', 'local_*.py')
|
||||
diffs = comment_assignments(pattern, ['AUTH_LDAP_ORGANIZATION_MAP'])
|
||||
|
||||
@@ -123,6 +123,8 @@ class EventContext(object):
|
||||
event_data['job_id'] = int(os.getenv('JOB_ID', '0'))
|
||||
if os.getenv('AD_HOC_COMMAND_ID', ''):
|
||||
event_data['ad_hoc_command_id'] = int(os.getenv('AD_HOC_COMMAND_ID', '0'))
|
||||
if os.getenv('PROJECT_UPDATE_ID', ''):
|
||||
event_data['project_update_id'] = int(os.getenv('PROJECT_UPDATE_ID', '0'))
|
||||
event_data.setdefault('pid', os.getpid())
|
||||
event_data.setdefault('uuid', str(uuid.uuid4()))
|
||||
event_data.setdefault('created', datetime.datetime.utcnow().isoformat())
|
||||
@@ -145,7 +147,7 @@ class EventContext(object):
|
||||
event_data['res'] = {}
|
||||
event_dict = dict(event=event, event_data=event_data)
|
||||
for key in event_data.keys():
|
||||
if key in ('job_id', 'ad_hoc_command_id', 'uuid', 'parent_uuid', 'created',):
|
||||
if key in ('job_id', 'ad_hoc_command_id', 'project_update_id', 'uuid', 'parent_uuid', 'created',):
|
||||
event_dict[key] = event_data.pop(key)
|
||||
elif key in ('verbosity', 'pid'):
|
||||
event_dict[key] = event_data[key]
|
||||
|
||||
@@ -16,7 +16,7 @@ class argv_placeholder(object):
|
||||
def __del__(self):
|
||||
try:
|
||||
argv_ready(sys.argv)
|
||||
except:
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
|
||||
|
||||
@@ -1,3 +1,8 @@
|
||||
# Copyright (c) 2017 Ansible by Red Hat
|
||||
# All Rights Reserved
|
||||
|
||||
from __future__ import absolute_import
|
||||
|
||||
from collections import OrderedDict
|
||||
import json
|
||||
import mock
|
||||
@@ -23,9 +28,9 @@ with mock.patch.dict(os.environ, {'ANSIBLE_STDOUT_CALLBACK': CALLBACK,
|
||||
'ANSIBLE_CALLBACK_PLUGINS': PLUGINS}):
|
||||
from ansible.cli.playbook import PlaybookCLI
|
||||
from ansible.executor.playbook_executor import PlaybookExecutor
|
||||
from ansible.inventory import Inventory
|
||||
from ansible.inventory.manager import InventoryManager
|
||||
from ansible.parsing.dataloader import DataLoader
|
||||
from ansible.vars import VariableManager
|
||||
from ansible.vars.manager import VariableManager
|
||||
|
||||
# Add awx/lib to sys.path so we can use the plugin
|
||||
path = os.path.abspath(os.path.join(PLUGINS, '..', '..'))
|
||||
@@ -62,9 +67,8 @@ def executor(tmpdir_factory, request):
|
||||
cli.parse()
|
||||
options = cli.parser.parse_args(['-v'])[0]
|
||||
loader = DataLoader()
|
||||
variable_manager = VariableManager()
|
||||
inventory = Inventory(loader=loader, variable_manager=variable_manager,
|
||||
host_list=['localhost'])
|
||||
variable_manager = VariableManager(loader=loader)
|
||||
inventory = InventoryManager(loader=loader, sources='localhost,')
|
||||
variable_manager.set_inventory(inventory)
|
||||
|
||||
return PlaybookExecutor(playbooks=playbook_files, inventory=inventory,
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@@ -70,14 +70,9 @@ register(
|
||||
label=_('Remote Host Headers'),
|
||||
help_text=_('HTTP headers and meta keys to search to determine remote host '
|
||||
'name or IP. Add additional items to this list, such as '
|
||||
'"HTTP_X_FORWARDED_FOR", if behind a reverse proxy.\n\n'
|
||||
'Note: The headers will be searched in order and the first '
|
||||
'found remote host name or IP will be used.\n\n'
|
||||
'In the below example 8.8.8.7 would be the chosen IP address.\n'
|
||||
'X-Forwarded-For: 8.8.8.7, 192.168.2.1, 127.0.0.1\n'
|
||||
'Host: 127.0.0.1\n'
|
||||
'REMOTE_HOST_HEADERS = [\'HTTP_X_FORWARDED_FOR\', '
|
||||
'\'REMOTE_ADDR\', \'REMOTE_HOST\']'),
|
||||
'"HTTP_X_FORWARDED_FOR", if behind a reverse proxy. '
|
||||
'See the "Proxy Support" section of the Adminstrator guide for'
|
||||
'more details.'),
|
||||
category=_('System'),
|
||||
category_slug='system',
|
||||
)
|
||||
@@ -88,9 +83,7 @@ register(
|
||||
label=_('Proxy IP Whitelist'),
|
||||
help_text=_("If Tower is behind a reverse proxy/load balancer, use this setting "
|
||||
"to whitelist the proxy IP addresses from which Tower should trust "
|
||||
"custom REMOTE_HOST_HEADERS header values\n"
|
||||
"REMOTE_HOST_HEADERS = ['HTTP_X_FORWARDED_FOR', ''REMOTE_ADDR', 'REMOTE_HOST']\n"
|
||||
"PROXY_IP_WHITELIST = ['10.0.1.100', '10.0.1.101']\n"
|
||||
"custom REMOTE_HOST_HEADERS header values. "
|
||||
"If this setting is an empty list (the default), the headers specified by "
|
||||
"REMOTE_HOST_HEADERS will be trusted unconditionally')"),
|
||||
category=_('System'),
|
||||
@@ -105,7 +98,7 @@ def _load_default_license_from_file():
|
||||
license_data = json.load(open(license_file))
|
||||
logger.debug('Read license data from "%s".', license_file)
|
||||
return license_data
|
||||
except:
|
||||
except Exception:
|
||||
logger.warning('Could not read license from "%s".', license_file, exc_info=True)
|
||||
return {}
|
||||
|
||||
@@ -268,7 +261,8 @@ register(
|
||||
field_class=fields.IntegerField,
|
||||
min_value=0,
|
||||
label=_('Job Event Standard Output Maximum Display Size'),
|
||||
help_text=_(u'Maximum Size of Standard Output in bytes to display for a single job or ad hoc command event. `stdout` will end with `\u2026` when truncated.'),
|
||||
help_text=_(
|
||||
u'Maximum Size of Standard Output in bytes to display for a single job or ad hoc command event. `stdout` will end with `\u2026` when truncated.'),
|
||||
category=_('Jobs'),
|
||||
category_slug='jobs',
|
||||
)
|
||||
|
||||
@@ -5,7 +5,9 @@ import re
|
||||
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
|
||||
CLOUD_PROVIDERS = ('azure_rm', 'ec2', 'gce', 'vmware', 'openstack', 'satellite6', 'cloudforms')
|
||||
CLOUD_PROVIDERS = ('azure_rm', 'ec2', 'gce', 'vmware', 'openstack', 'rhv', 'satellite6', 'cloudforms', 'tower')
|
||||
SCHEDULEABLE_PROVIDERS = CLOUD_PROVIDERS + ('custom', 'scm',)
|
||||
PRIVILEGE_ESCALATION_METHODS = [ ('sudo', _('Sudo')), ('su', _('Su')), ('pbrun', _('Pbrun')), ('pfexec', _('Pfexec')), ('dzdo', _('DZDO')), ('pmrun', _('Pmrun')), ('runas', _('Runas'))]
|
||||
PRIVILEGE_ESCALATION_METHODS = [
|
||||
('sudo', _('Sudo')), ('su', _('Su')), ('pbrun', _('Pbrun')), ('pfexec', _('Pfexec')),
|
||||
('dzdo', _('DZDO')), ('pmrun', _('Pmrun')), ('runas', _('Runas'))]
|
||||
ANSI_SGR_PATTERN = re.compile(r'\x1b\[[0-9;]*m')
|
||||
|
||||
@@ -24,7 +24,7 @@ def discard_groups(message):
|
||||
|
||||
@channel_session
|
||||
def ws_connect(message):
|
||||
connect_text = {'accept':False, 'user':None}
|
||||
message.reply_channel.send({"accept": True})
|
||||
|
||||
message.content['method'] = 'FAKE'
|
||||
request = AsgiRequest(message)
|
||||
@@ -35,11 +35,12 @@ def ws_connect(message):
|
||||
auth_token = AuthToken.objects.get(key=token)
|
||||
if auth_token.in_valid_tokens:
|
||||
message.channel_session['user_id'] = auth_token.user_id
|
||||
connect_text['accept'] = True
|
||||
connect_text['user'] = auth_token.user_id
|
||||
message.reply_channel.send({"text": json.dumps({"accept": True, "user": auth_token.user_id})})
|
||||
return None
|
||||
except AuthToken.DoesNotExist:
|
||||
logger.error("auth_token provided was invalid.")
|
||||
message.reply_channel.send({"text": json.dumps(connect_text)})
|
||||
message.reply_channel.send({"close": True})
|
||||
return None
|
||||
|
||||
|
||||
@channel_session
|
||||
@@ -81,7 +82,8 @@ def ws_receive(message):
|
||||
if access_cls is not None:
|
||||
user_access = access_cls(user)
|
||||
if not user_access.get_queryset().filter(pk=oid).exists():
|
||||
message.reply_channel.send({"text": json.dumps({"error": "access denied to channel {0} for resource id {1}".format(group_name, oid)})})
|
||||
message.reply_channel.send({"text": json.dumps(
|
||||
{"error": "access denied to channel {0} for resource id {1}".format(group_name, oid)})})
|
||||
continue
|
||||
current_groups.add(name)
|
||||
Group(name).add(message.reply_channel)
|
||||
|
||||
@@ -1,24 +1,36 @@
|
||||
class AwxTaskError(Exception):
|
||||
"""Base exception for errors in unified job runs"""
|
||||
def __init__(self, task, message=None):
|
||||
# Copyright (c) 2018 Ansible by Red Hat
|
||||
# All Rights Reserved.
|
||||
|
||||
# Celery does not respect exception type when using a serializer different than pickle;
|
||||
# and awx uses the json serializer
|
||||
# https://github.com/celery/celery/issues/3586
|
||||
|
||||
|
||||
class _AwxTaskError():
|
||||
def build_exception(self, task, message=None):
|
||||
if message is None:
|
||||
message = "Execution error running {}".format(task.log_format)
|
||||
super(AwxTaskError, self).__init__(message)
|
||||
self.task = task
|
||||
|
||||
|
||||
class TaskCancel(AwxTaskError):
|
||||
"""Canceled flag caused run_pexpect to kill the job run"""
|
||||
def __init__(self, task, rc):
|
||||
super(TaskCancel, self).__init__(
|
||||
task, message="{} was canceled (rc={})".format(task.log_format, rc))
|
||||
self.rc = rc
|
||||
e = Exception(message)
|
||||
e.task = task
|
||||
e.is_awx_task_error = True
|
||||
return e
|
||||
|
||||
def TaskCancel(self, task, rc):
|
||||
"""Canceled flag caused run_pexpect to kill the job run"""
|
||||
message="{} was canceled (rc={})".format(task.log_format, rc)
|
||||
e = self.build_exception(task, message)
|
||||
e.rc = rc
|
||||
e.awx_task_error_type = "TaskCancel"
|
||||
return e
|
||||
|
||||
def TaskError(self, task, rc):
|
||||
"""Userspace error (non-zero exit code) in run_pexpect subprocess"""
|
||||
message = "{} encountered an error (rc={}), please see task stdout for details.".format(task.log_format, rc)
|
||||
e = self.build_exception(task, message)
|
||||
e.rc = rc
|
||||
e.awx_task_error_type = "TaskError"
|
||||
return e
|
||||
|
||||
|
||||
class TaskError(AwxTaskError):
|
||||
"""Userspace error (non-zero exit code) in run_pexpect subprocess"""
|
||||
def __init__(self, task, rc):
|
||||
super(TaskError, self).__init__(
|
||||
task, message="%s encountered an error (rc=%s), please see task stdout for details.".format(task.log_format, rc))
|
||||
self.rc = rc
|
||||
AwxTaskError = _AwxTaskError()
|
||||
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
import base64
|
||||
import cStringIO
|
||||
import codecs
|
||||
import StringIO
|
||||
import json
|
||||
@@ -9,6 +8,7 @@ import stat
|
||||
import tempfile
|
||||
import time
|
||||
import logging
|
||||
from distutils.version import LooseVersion as Version
|
||||
|
||||
from django.conf import settings
|
||||
|
||||
@@ -142,7 +142,7 @@ class IsolatedManager(object):
|
||||
|
||||
# if an ssh private key fifo exists, read its contents and delete it
|
||||
if self.ssh_key_path:
|
||||
buff = cStringIO.StringIO()
|
||||
buff = StringIO.StringIO()
|
||||
with open(self.ssh_key_path, 'r') as fifo:
|
||||
for line in fifo:
|
||||
buff.write(line)
|
||||
@@ -182,7 +182,7 @@ class IsolatedManager(object):
|
||||
job_timeout=settings.AWX_ISOLATED_LAUNCH_TIMEOUT,
|
||||
pexpect_timeout=5
|
||||
)
|
||||
output = buff.getvalue()
|
||||
output = buff.getvalue().encode('utf-8')
|
||||
playbook_logger.info('Isolated job {} dispatch:\n{}'.format(self.instance.id, output))
|
||||
if status != 'successful':
|
||||
self.stdout_handle.write(output)
|
||||
@@ -282,7 +282,7 @@ class IsolatedManager(object):
|
||||
status = 'failed'
|
||||
output = ''
|
||||
rc = None
|
||||
buff = cStringIO.StringIO()
|
||||
buff = StringIO.StringIO()
|
||||
last_check = time.time()
|
||||
seek = 0
|
||||
job_timeout = remaining = self.job_timeout
|
||||
@@ -303,7 +303,7 @@ class IsolatedManager(object):
|
||||
time.sleep(1)
|
||||
continue
|
||||
|
||||
buff = cStringIO.StringIO()
|
||||
buff = StringIO.StringIO()
|
||||
logger.debug('Checking on isolated job {} with `check_isolated.yml`.'.format(self.instance.id))
|
||||
status, rc = IsolatedManager.run_pexpect(
|
||||
args, self.awx_playbook_path(), self.management_env, buff,
|
||||
@@ -313,7 +313,7 @@ class IsolatedManager(object):
|
||||
pexpect_timeout=5,
|
||||
proot_cmd=self.proot_cmd
|
||||
)
|
||||
output = buff.getvalue()
|
||||
output = buff.getvalue().encode('utf-8')
|
||||
playbook_logger.info('Isolated job {} check:\n{}'.format(self.instance.id, output))
|
||||
|
||||
path = self.path_to('artifacts', 'stdout')
|
||||
@@ -355,14 +355,14 @@ class IsolatedManager(object):
|
||||
}
|
||||
args = self._build_args('clean_isolated.yml', '%s,' % self.host, extra_vars)
|
||||
logger.debug('Cleaning up job {} on isolated host with `clean_isolated.yml` playbook.'.format(self.instance.id))
|
||||
buff = cStringIO.StringIO()
|
||||
buff = StringIO.StringIO()
|
||||
timeout = max(60, 2 * settings.AWX_ISOLATED_CONNECTION_TIMEOUT)
|
||||
status, rc = IsolatedManager.run_pexpect(
|
||||
args, self.awx_playbook_path(), self.management_env, buff,
|
||||
idle_timeout=timeout, job_timeout=timeout,
|
||||
pexpect_timeout=5
|
||||
)
|
||||
output = buff.getvalue()
|
||||
output = buff.getvalue().encode('utf-8')
|
||||
playbook_logger.info('Isolated job {} cleanup:\n{}'.format(self.instance.id, output))
|
||||
|
||||
if status != 'successful':
|
||||
@@ -370,7 +370,24 @@ class IsolatedManager(object):
|
||||
logger.warning('Isolated job {} cleanup error, output:\n{}'.format(self.instance.id, output))
|
||||
|
||||
@classmethod
|
||||
def health_check(cls, instance_qs):
|
||||
def update_capacity(cls, instance, task_result, awx_application_version):
|
||||
instance.version = task_result['version']
|
||||
|
||||
isolated_version = instance.version.split("-", 1)[0]
|
||||
cluster_version = awx_application_version.split("-", 1)[0]
|
||||
|
||||
if Version(cluster_version) > Version(isolated_version):
|
||||
err_template = "Isolated instance {} reports version {}, cluster node is at {}, setting capacity to zero."
|
||||
logger.error(err_template.format(instance.hostname, instance.version, awx_application_version))
|
||||
instance.capacity = 0
|
||||
else:
|
||||
if instance.capacity == 0 and task_result['capacity']:
|
||||
logger.warning('Isolated instance {} has re-joined.'.format(instance.hostname))
|
||||
instance.capacity = int(task_result['capacity'])
|
||||
instance.save(update_fields=['capacity', 'version', 'modified'])
|
||||
|
||||
@classmethod
|
||||
def health_check(cls, instance_qs, awx_application_version):
|
||||
'''
|
||||
:param instance_qs: List of Django objects representing the
|
||||
isolated instances to manage
|
||||
@@ -388,14 +405,14 @@ class IsolatedManager(object):
|
||||
env = cls._base_management_env()
|
||||
env['ANSIBLE_STDOUT_CALLBACK'] = 'json'
|
||||
|
||||
buff = cStringIO.StringIO()
|
||||
buff = StringIO.StringIO()
|
||||
timeout = max(60, 2 * settings.AWX_ISOLATED_CONNECTION_TIMEOUT)
|
||||
status, rc = IsolatedManager.run_pexpect(
|
||||
args, cls.awx_playbook_path(), env, buff,
|
||||
idle_timeout=timeout, job_timeout=timeout,
|
||||
pexpect_timeout=5
|
||||
)
|
||||
output = buff.getvalue()
|
||||
output = buff.getvalue().encode('utf-8')
|
||||
buff.close()
|
||||
|
||||
try:
|
||||
@@ -412,11 +429,7 @@ class IsolatedManager(object):
|
||||
except (KeyError, IndexError):
|
||||
task_result = {}
|
||||
if 'capacity' in task_result:
|
||||
instance.version = task_result['version']
|
||||
if instance.capacity == 0 and task_result['capacity']:
|
||||
logger.warning('Isolated instance {} has re-joined.'.format(instance.hostname))
|
||||
instance.capacity = int(task_result['capacity'])
|
||||
instance.save(update_fields=['capacity', 'version', 'modified'])
|
||||
cls.update_capacity(instance, task_result, awx_application_version)
|
||||
elif instance.capacity == 0:
|
||||
logger.debug('Isolated instance {} previously marked as lost, could not re-join.'.format(
|
||||
instance.hostname))
|
||||
@@ -431,7 +444,7 @@ class IsolatedManager(object):
|
||||
instance.hostname, instance.modified))
|
||||
|
||||
@staticmethod
|
||||
def wrap_stdout_handle(instance, private_data_dir, stdout_handle, event_data_key='job_id'):
|
||||
def get_stdout_handle(instance, private_data_dir, event_data_key='job_id'):
|
||||
dispatcher = CallbackQueueDispatcher()
|
||||
|
||||
def job_event_callback(event_data):
|
||||
@@ -449,7 +462,7 @@ class IsolatedManager(object):
|
||||
event_data.get('event', ''), event_data['uuid'], instance.id, event_data))
|
||||
dispatcher.dispatch(event_data)
|
||||
|
||||
return OutputEventFilter(stdout_handle, job_event_callback)
|
||||
return OutputEventFilter(job_event_callback)
|
||||
|
||||
def run(self, instance, host, private_data_dir, proot_temp_dir):
|
||||
"""
|
||||
|
||||
@@ -99,7 +99,6 @@ def run_pexpect(args, cwd, env, logfile,
|
||||
password_patterns = expect_passwords.keys()
|
||||
password_values = expect_passwords.values()
|
||||
|
||||
logfile_pos = logfile.tell()
|
||||
child = pexpect.spawn(
|
||||
args[0], args[1:], cwd=cwd, env=env, ignore_sighup=True,
|
||||
encoding='utf-8', echo=False,
|
||||
@@ -116,13 +115,11 @@ def run_pexpect(args, cwd, env, logfile,
|
||||
password = password_values[result_id]
|
||||
if password is not None:
|
||||
child.sendline(password)
|
||||
if logfile_pos != logfile.tell():
|
||||
logfile_pos = logfile.tell()
|
||||
last_stdout_update = time.time()
|
||||
if cancelled_callback:
|
||||
try:
|
||||
canceled = cancelled_callback()
|
||||
except:
|
||||
except Exception:
|
||||
logger.exception('Could not check cancel callback - canceling immediately')
|
||||
if isinstance(extra_update_fields, dict):
|
||||
extra_update_fields['job_explanation'] = "System error during job execution, check system logs"
|
||||
@@ -271,12 +268,8 @@ def __run__(private_data_dir):
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
__version__ = '3.2.0'
|
||||
try:
|
||||
import awx
|
||||
__version__ = awx.__version__
|
||||
except ImportError:
|
||||
pass # in devel, `awx` isn't an installed package
|
||||
import awx
|
||||
__version__ = awx.__version__
|
||||
parser = argparse.ArgumentParser(description='manage a daemonized, isolated ansible playbook')
|
||||
parser.add_argument('--version', action='version', version=__version__ + '-isolated')
|
||||
parser.add_argument('command', choices=['start', 'stop', 'is-alive'])
|
||||
|
||||
@@ -6,6 +6,7 @@ import copy
|
||||
import json
|
||||
import re
|
||||
import six
|
||||
import urllib
|
||||
|
||||
from jinja2 import Environment, StrictUndefined
|
||||
from jinja2.exceptions import UndefinedError
|
||||
@@ -18,12 +19,12 @@ from django.db.models.signals import (
|
||||
)
|
||||
from django.db.models.signals import m2m_changed
|
||||
from django.db import models
|
||||
from django.db.models.fields.related import (
|
||||
add_lazy_relation,
|
||||
SingleRelatedObjectDescriptor,
|
||||
ReverseSingleRelatedObjectDescriptor,
|
||||
ManyRelatedObjectsDescriptor,
|
||||
ReverseManyRelatedObjectsDescriptor,
|
||||
from django.db.models.fields.related import add_lazy_relation
|
||||
from django.db.models.fields.related_descriptors import (
|
||||
ReverseOneToOneDescriptor,
|
||||
ForwardManyToOneDescriptor,
|
||||
ManyToManyDescriptor,
|
||||
ReverseManyToOneDescriptor,
|
||||
)
|
||||
from django.utils.encoding import smart_text
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
@@ -96,7 +97,7 @@ class JSONBField(upstream_JSONBField):
|
||||
# https://bitbucket.org/offline/django-annoying/src/a0de8b294db3/annoying/fields.py
|
||||
|
||||
|
||||
class AutoSingleRelatedObjectDescriptor(SingleRelatedObjectDescriptor):
|
||||
class AutoSingleRelatedObjectDescriptor(ReverseOneToOneDescriptor):
|
||||
"""Descriptor for access to the object from its related class."""
|
||||
|
||||
def __get__(self, instance, instance_type=None):
|
||||
@@ -139,7 +140,7 @@ def resolve_role_field(obj, field):
|
||||
raise Exception(smart_text('{} refers to a {}, not a Role'.format(field, type(obj))))
|
||||
ret.append(obj.id)
|
||||
else:
|
||||
if type(obj) is ManyRelatedObjectsDescriptor:
|
||||
if type(obj) is ManyToManyDescriptor:
|
||||
for o in obj.all():
|
||||
ret += resolve_role_field(o, field_components[1])
|
||||
else:
|
||||
@@ -179,7 +180,7 @@ def is_implicit_parent(parent_role, child_role):
|
||||
return False
|
||||
|
||||
|
||||
class ImplicitRoleDescriptor(ReverseSingleRelatedObjectDescriptor):
|
||||
class ImplicitRoleDescriptor(ForwardManyToOneDescriptor):
|
||||
pass
|
||||
|
||||
|
||||
@@ -230,18 +231,18 @@ class ImplicitRoleField(models.ForeignKey):
|
||||
field_name, sep, field_attr = field_name.partition('.')
|
||||
field = getattr(cls, field_name)
|
||||
|
||||
if type(field) is ReverseManyRelatedObjectsDescriptor or \
|
||||
type(field) is ManyRelatedObjectsDescriptor:
|
||||
if type(field) is ReverseManyToOneDescriptor or \
|
||||
type(field) is ManyToManyDescriptor:
|
||||
|
||||
if '.' in field_attr:
|
||||
raise Exception('Referencing deep roles through ManyToMany fields is unsupported.')
|
||||
|
||||
if type(field) is ReverseManyRelatedObjectsDescriptor:
|
||||
if type(field) is ReverseManyToOneDescriptor:
|
||||
sender = field.through
|
||||
else:
|
||||
sender = field.related.through
|
||||
|
||||
reverse = type(field) is ManyRelatedObjectsDescriptor
|
||||
reverse = type(field) is ManyToManyDescriptor
|
||||
m2m_changed.connect(self.m2m_update(field_attr, reverse), sender, weak=False)
|
||||
|
||||
def m2m_update(self, field_attr, _reverse):
|
||||
@@ -352,6 +353,7 @@ class SmartFilterField(models.TextField):
|
||||
# https://docs.python.org/2/library/stdtypes.html#truth-value-testing
|
||||
if not value:
|
||||
return None
|
||||
value = urllib.unquote(value)
|
||||
try:
|
||||
SmartFilter().query_from_string(value)
|
||||
except RuntimeError, e:
|
||||
@@ -415,6 +417,13 @@ class JSONSchemaField(JSONBField):
|
||||
return value
|
||||
|
||||
|
||||
@JSONSchemaField.format_checker.checks('vault_id')
|
||||
def format_vault_id(value):
|
||||
if '@' in value:
|
||||
raise jsonschema.exceptions.FormatError('@ is not an allowed character')
|
||||
return True
|
||||
|
||||
|
||||
@JSONSchemaField.format_checker.checks('ssh_private_key')
|
||||
def format_ssh_private_key(value):
|
||||
# Sanity check: GCE, in particular, provides JSON-encoded private
|
||||
@@ -754,3 +763,22 @@ class CredentialTypeInjectorField(JSONSchemaField):
|
||||
code='invalid',
|
||||
params={'value': value},
|
||||
)
|
||||
|
||||
|
||||
class AskForField(models.BooleanField):
|
||||
"""
|
||||
Denotes whether to prompt on launch for another field on the same template
|
||||
"""
|
||||
def __init__(self, allows_field=None, **kwargs):
|
||||
super(AskForField, self).__init__(**kwargs)
|
||||
self._allows_field = allows_field
|
||||
|
||||
@property
|
||||
def allows_field(self):
|
||||
if self._allows_field is None:
|
||||
try:
|
||||
return self.name[len('ask_'):-len('_on_launch')]
|
||||
except AttributeError:
|
||||
# self.name will be set by the model metaclass, not this field
|
||||
raise Exception('Corresponding allows_field cannot be accessed until model is initialized.')
|
||||
return self._allows_field
|
||||
|
||||
@@ -2,12 +2,12 @@
|
||||
# All Rights Reserved
|
||||
|
||||
from awx.main.utils import get_licenser
|
||||
from django.core.management.base import NoArgsCommand
|
||||
from django.core.management.base import BaseCommand
|
||||
|
||||
|
||||
class Command(NoArgsCommand):
|
||||
class Command(BaseCommand):
|
||||
"""Returns license type, e.g., 'enterprise', 'open', 'none'"""
|
||||
|
||||
def handle(self, **options):
|
||||
def handle(self, *args, **options):
|
||||
super(Command, self).__init__()
|
||||
return get_licenser().validate().get('license_type', 'none')
|
||||
|
||||
@@ -4,29 +4,28 @@
|
||||
# Python
|
||||
import datetime
|
||||
import logging
|
||||
from optparse import make_option
|
||||
|
||||
# Django
|
||||
from django.core.management.base import NoArgsCommand
|
||||
from django.core.management.base import BaseCommand
|
||||
from django.utils.timezone import now
|
||||
|
||||
# AWX
|
||||
from awx.main.models import ActivityStream
|
||||
|
||||
|
||||
class Command(NoArgsCommand):
|
||||
class Command(BaseCommand):
|
||||
'''
|
||||
Management command to purge old activity stream events.
|
||||
'''
|
||||
|
||||
help = 'Remove old activity stream events from the database'
|
||||
|
||||
option_list = NoArgsCommand.option_list + (
|
||||
make_option('--days', dest='days', type='int', default=90, metavar='N',
|
||||
help='Remove activity stream events more than N days old'),
|
||||
make_option('--dry-run', dest='dry_run', action='store_true',
|
||||
default=False, help='Dry run mode (show items that would '
|
||||
'be removed)'),)
|
||||
def add_arguments(self, parser):
|
||||
parser.add_argument('--days', dest='days', type=int, default=90, metavar='N',
|
||||
help='Remove activity stream events more than N days old')
|
||||
parser.add_argument('--dry-run', dest='dry_run', action='store_true',
|
||||
default=False, help='Dry run mode (show items that would '
|
||||
'be removed)')
|
||||
|
||||
def init_logging(self):
|
||||
log_levels = dict(enumerate([logging.ERROR, logging.INFO,
|
||||
@@ -61,7 +60,7 @@ class Command(NoArgsCommand):
|
||||
n_deleted_items += len(pks_to_delete)
|
||||
self.logger.log(99, "Removed %d items", n_deleted_items)
|
||||
|
||||
def handle_noargs(self, **options):
|
||||
def handle(self, *args, **options):
|
||||
self.verbosity = int(options.get('verbosity', 1))
|
||||
self.init_logging()
|
||||
self.days = int(options.get('days', 30))
|
||||
|
||||
@@ -4,7 +4,6 @@
|
||||
# Python
|
||||
import re
|
||||
from dateutil.relativedelta import relativedelta
|
||||
from optparse import make_option
|
||||
|
||||
# Django
|
||||
from django.core.management.base import BaseCommand, CommandError
|
||||
@@ -93,19 +92,20 @@ class CleanupFacts(object):
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = 'Cleanup facts. For each host older than the value specified, keep one fact scan for each time window (granularity).'
|
||||
option_list = BaseCommand.option_list + (
|
||||
make_option('--older_than',
|
||||
dest='older_than',
|
||||
default='30d',
|
||||
help='Specify the relative time to consider facts older than (w)eek (d)ay or (y)ear (i.e. 5d, 2w, 1y). Defaults to 30d.'),
|
||||
make_option('--granularity',
|
||||
dest='granularity',
|
||||
default='1w',
|
||||
help='Window duration to group same hosts by for deletion (w)eek (d)ay or (y)ear (i.e. 5d, 2w, 1y). Defaults to 1w.'),
|
||||
make_option('--module',
|
||||
dest='module',
|
||||
default=None,
|
||||
help='Limit cleanup to a particular module.'),)
|
||||
|
||||
def add_arguments(self, parser):
|
||||
parser.add_argument('--older_than',
|
||||
dest='older_than',
|
||||
default='30d',
|
||||
help='Specify the relative time to consider facts older than (w)eek (d)ay or (y)ear (i.e. 5d, 2w, 1y). Defaults to 30d.')
|
||||
parser.add_argument('--granularity',
|
||||
dest='granularity',
|
||||
default='1w',
|
||||
help='Window duration to group same hosts by for deletion (w)eek (d)ay or (y)ear (i.e. 5d, 2w, 1y). Defaults to 1w.')
|
||||
parser.add_argument('--module',
|
||||
dest='module',
|
||||
default=None,
|
||||
help='Limit cleanup to a particular module.')
|
||||
|
||||
def __init__(self):
|
||||
super(Command, self).__init__()
|
||||
|
||||
@@ -4,10 +4,9 @@
|
||||
# Python
|
||||
import datetime
|
||||
import logging
|
||||
from optparse import make_option
|
||||
|
||||
# Django
|
||||
from django.core.management.base import NoArgsCommand, CommandError
|
||||
from django.core.management.base import BaseCommand, CommandError
|
||||
from django.db import transaction
|
||||
from django.utils.timezone import now
|
||||
|
||||
@@ -25,41 +24,40 @@ from awx.main.signals import ( # noqa
|
||||
from django.db.models.signals import post_save, post_delete, m2m_changed # noqa
|
||||
|
||||
|
||||
class Command(NoArgsCommand):
|
||||
class Command(BaseCommand):
|
||||
'''
|
||||
Management command to cleanup old jobs and project updates.
|
||||
'''
|
||||
|
||||
help = 'Remove old jobs, project and inventory updates from the database.'
|
||||
|
||||
option_list = NoArgsCommand.option_list + (
|
||||
make_option('--days', dest='days', type='int', default=90, metavar='N',
|
||||
help='Remove jobs/updates executed more than N days ago. Defaults to 90.'),
|
||||
make_option('--dry-run', dest='dry_run', action='store_true',
|
||||
default=False, help='Dry run mode (show items that would '
|
||||
'be removed)'),
|
||||
make_option('--jobs', dest='only_jobs', action='store_true',
|
||||
default=False,
|
||||
help='Remove jobs'),
|
||||
make_option('--ad-hoc-commands', dest='only_ad_hoc_commands',
|
||||
action='store_true', default=False,
|
||||
help='Remove ad hoc commands'),
|
||||
make_option('--project-updates', dest='only_project_updates',
|
||||
action='store_true', default=False,
|
||||
help='Remove project updates'),
|
||||
make_option('--inventory-updates', dest='only_inventory_updates',
|
||||
action='store_true', default=False,
|
||||
help='Remove inventory updates'),
|
||||
make_option('--management-jobs', default=False,
|
||||
action='store_true', dest='only_management_jobs',
|
||||
help='Remove management jobs'),
|
||||
make_option('--notifications', dest='only_notifications',
|
||||
action='store_true', default=False,
|
||||
help='Remove notifications'),
|
||||
make_option('--workflow-jobs', default=False,
|
||||
action='store_true', dest='only_workflow_jobs',
|
||||
help='Remove workflow jobs')
|
||||
)
|
||||
def add_arguments(self, parser):
|
||||
parser.add_argument('--days', dest='days', type=int, default=90, metavar='N',
|
||||
help='Remove jobs/updates executed more than N days ago. Defaults to 90.')
|
||||
parser.add_argument('--dry-run', dest='dry_run', action='store_true',
|
||||
default=False, help='Dry run mode (show items that would '
|
||||
'be removed)')
|
||||
parser.add_argument('--jobs', dest='only_jobs', action='store_true',
|
||||
default=False,
|
||||
help='Remove jobs')
|
||||
parser.add_argument('--ad-hoc-commands', dest='only_ad_hoc_commands',
|
||||
action='store_true', default=False,
|
||||
help='Remove ad hoc commands')
|
||||
parser.add_argument('--project-updates', dest='only_project_updates',
|
||||
action='store_true', default=False,
|
||||
help='Remove project updates')
|
||||
parser.add_argument('--inventory-updates', dest='only_inventory_updates',
|
||||
action='store_true', default=False,
|
||||
help='Remove inventory updates')
|
||||
parser.add_argument('--management-jobs', default=False,
|
||||
action='store_true', dest='only_management_jobs',
|
||||
help='Remove management jobs')
|
||||
parser.add_argument('--notifications', dest='only_notifications',
|
||||
action='store_true', default=False,
|
||||
help='Remove notifications')
|
||||
parser.add_argument('--workflow-jobs', default=False,
|
||||
action='store_true', dest='only_workflow_jobs',
|
||||
help='Remove workflow jobs')
|
||||
|
||||
def cleanup_jobs(self):
|
||||
#jobs_qs = Job.objects.exclude(status__in=('pending', 'running'))
|
||||
@@ -223,7 +221,7 @@ class Command(NoArgsCommand):
|
||||
return skipped, deleted
|
||||
|
||||
@transaction.atomic
|
||||
def handle_noargs(self, **options):
|
||||
def handle(self, *args, **options):
|
||||
self.verbosity = int(options.get('verbosity', 1))
|
||||
self.init_logging()
|
||||
self.days = int(options.get('days', 90))
|
||||
|
||||
@@ -45,10 +45,10 @@ class Command(BaseCommand):
|
||||
inventory=i,
|
||||
variables="ansible_connection: local",
|
||||
created_by=superuser)
|
||||
JobTemplate.objects.create(name='Demo Job Template',
|
||||
playbook='hello_world.yml',
|
||||
project=p,
|
||||
inventory=i,
|
||||
credential=c)
|
||||
jt = JobTemplate.objects.create(name='Demo Job Template',
|
||||
playbook='hello_world.yml',
|
||||
project=p,
|
||||
inventory=i)
|
||||
jt.credentials.add(c)
|
||||
print('Default organization added.')
|
||||
print('Demo Credential, Inventory, and Job Template added.')
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
# Copyright (c) 2016 Ansible, Inc.
|
||||
# All Rights Reserved
|
||||
|
||||
from optparse import make_option
|
||||
import subprocess
|
||||
import warnings
|
||||
|
||||
@@ -22,12 +21,11 @@ class Command(BaseCommand):
|
||||
'Specify `--hostname` to use this command.'
|
||||
)
|
||||
|
||||
option_list = BaseCommand.option_list + (
|
||||
make_option('--hostname', dest='hostname', type='string',
|
||||
help='Hostname used during provisioning'),
|
||||
make_option('--name', dest='name', type='string',
|
||||
help='(PENDING DEPRECIATION) Hostname used during provisioning'),
|
||||
)
|
||||
def add_arguments(self, parser):
|
||||
parser.add_argument('--hostname', dest='hostname', type=str,
|
||||
help='Hostname used during provisioning')
|
||||
parser.add_argument('--name', dest='name', type=str,
|
||||
help='(PENDING DEPRECIATION) Hostname used during provisioning')
|
||||
|
||||
@transaction.atomic
|
||||
def handle(self, *args, **options):
|
||||
|
||||
@@ -4,7 +4,6 @@
|
||||
# Python
|
||||
import json
|
||||
import logging
|
||||
from optparse import make_option
|
||||
import os
|
||||
import re
|
||||
import subprocess
|
||||
@@ -15,7 +14,7 @@ import shutil
|
||||
|
||||
# Django
|
||||
from django.conf import settings
|
||||
from django.core.management.base import NoArgsCommand, CommandError
|
||||
from django.core.management.base import BaseCommand, CommandError
|
||||
from django.core.exceptions import ImproperlyConfigured
|
||||
from django.db import connection, transaction
|
||||
from django.utils.encoding import smart_text
|
||||
@@ -86,10 +85,8 @@ class AnsibleInventoryLoader(object):
|
||||
env['ANSIBLE_INVENTORY_UNPARSED_FAILED'] = '1'
|
||||
venv_libdir = os.path.join(settings.ANSIBLE_VENV_PATH, "lib")
|
||||
env.pop('PYTHONPATH', None) # default to none if no python_ver matches
|
||||
for python_ver in ["python2.7", "python2.6"]:
|
||||
if os.path.isdir(os.path.join(venv_libdir, python_ver)):
|
||||
env['PYTHONPATH'] = os.path.join(venv_libdir, python_ver, "site-packages") + ":"
|
||||
break
|
||||
if os.path.isdir(os.path.join(venv_libdir, "python2.7")):
|
||||
env['PYTHONPATH'] = os.path.join(venv_libdir, "python2.7", "site-packages") + ":"
|
||||
return env
|
||||
|
||||
def get_base_args(self):
|
||||
@@ -168,7 +165,7 @@ class AnsibleInventoryLoader(object):
|
||||
data = json.loads(stdout)
|
||||
if not isinstance(data, dict):
|
||||
raise TypeError('Returned JSON must be a dictionary, got %s instead' % str(type(data)))
|
||||
except:
|
||||
except Exception:
|
||||
logger.error('Failed to load JSON from: %s', stdout)
|
||||
raise
|
||||
return data
|
||||
@@ -176,6 +173,7 @@ class AnsibleInventoryLoader(object):
|
||||
def load(self):
|
||||
base_args = self.get_base_args()
|
||||
logger.info('Reading Ansible inventory source: %s', self.source)
|
||||
|
||||
data = self.command_to_json(base_args + ['--list'])
|
||||
|
||||
# TODO: remove after we run custom scripts through ansible-inventory
|
||||
@@ -228,6 +226,7 @@ def load_inventory_source(source, group_filter_re=None,
|
||||
'''
|
||||
# Sanity check: We sanitize these module names for our API but Ansible proper doesn't follow
|
||||
# good naming conventions
|
||||
source = source.replace('rhv.py', 'ovirt4.py')
|
||||
source = source.replace('satellite6.py', 'foreman.py')
|
||||
source = source.replace('vmware.py', 'vmware_inventory.py')
|
||||
if not os.path.exists(source):
|
||||
@@ -251,7 +250,7 @@ def load_inventory_source(source, group_filter_re=None,
|
||||
return inventory.all_group
|
||||
|
||||
|
||||
class Command(NoArgsCommand):
|
||||
class Command(BaseCommand):
|
||||
'''
|
||||
Management command to import inventory from a directory, ini file, or
|
||||
dynamic inventory script.
|
||||
@@ -259,50 +258,46 @@ class Command(NoArgsCommand):
|
||||
|
||||
help = 'Import or sync external inventory sources'
|
||||
|
||||
option_list = NoArgsCommand.option_list + (
|
||||
make_option('--inventory-name', dest='inventory_name', type='str',
|
||||
default=None, metavar='n',
|
||||
help='name of inventory to sync'),
|
||||
make_option('--inventory-id', dest='inventory_id', type='int',
|
||||
default=None, metavar='i', help='id of inventory to sync'),
|
||||
make_option('--overwrite', dest='overwrite', action='store_true',
|
||||
metavar="o", default=False,
|
||||
help='overwrite the destination hosts and groups'),
|
||||
make_option('--overwrite-vars', dest='overwrite_vars',
|
||||
action='store_true', metavar="V", default=False,
|
||||
help='overwrite (rather than merge) variables'),
|
||||
make_option('--keep-vars', dest='keep_vars', action='store_true',
|
||||
metavar="k", default=False,
|
||||
help='use database variables if set'),
|
||||
make_option('--custom', dest='custom', action='store_true',
|
||||
metavar="c", default=False,
|
||||
help='this is a custom inventory script'),
|
||||
make_option('--source', dest='source', type='str', default=None,
|
||||
metavar='s', help='inventory directory, file, or script '
|
||||
'to load'),
|
||||
make_option('--enabled-var', dest='enabled_var', type='str',
|
||||
default=None, metavar='v', help='host variable used to '
|
||||
'set/clear enabled flag when host is online/offline, may '
|
||||
'be specified as "foo.bar" to traverse nested dicts.'),
|
||||
make_option('--enabled-value', dest='enabled_value', type='str',
|
||||
default=None, metavar='v', help='value of host variable '
|
||||
'specified by --enabled-var that indicates host is '
|
||||
'enabled/online.'),
|
||||
make_option('--group-filter', dest='group_filter', type='str',
|
||||
default=None, metavar='regex', help='regular expression '
|
||||
'to filter group name(s); only matches are imported.'),
|
||||
make_option('--host-filter', dest='host_filter', type='str',
|
||||
default=None, metavar='regex', help='regular expression '
|
||||
'to filter host name(s); only matches are imported.'),
|
||||
make_option('--exclude-empty-groups', dest='exclude_empty_groups',
|
||||
action='store_true', default=False, help='when set, '
|
||||
'exclude all groups that have no child groups, hosts, or '
|
||||
'variables.'),
|
||||
make_option('--instance-id-var', dest='instance_id_var', type='str',
|
||||
default=None, metavar='v', help='host variable that '
|
||||
'specifies the unique, immutable instance ID, may be '
|
||||
'specified as "foo.bar" to traverse nested dicts.'),
|
||||
)
|
||||
def add_arguments(self, parser):
|
||||
parser.add_argument('--inventory-name', dest='inventory_name',
|
||||
type=str, default=None, metavar='n',
|
||||
help='name of inventory to sync')
|
||||
parser.add_argument('--inventory-id', dest='inventory_id', type=int,
|
||||
default=None, metavar='i',
|
||||
help='id of inventory to sync')
|
||||
parser.add_argument('--overwrite', dest='overwrite', action='store_true', default=False,
|
||||
help='overwrite the destination hosts and groups')
|
||||
parser.add_argument('--overwrite-vars', dest='overwrite_vars',
|
||||
action='store_true', default=False,
|
||||
help='overwrite (rather than merge) variables')
|
||||
parser.add_argument('--keep-vars', dest='keep_vars', action='store_true', default=False,
|
||||
help='use database variables if set')
|
||||
parser.add_argument('--custom', dest='custom', action='store_true', default=False,
|
||||
help='this is a custom inventory script')
|
||||
parser.add_argument('--source', dest='source', type=str, default=None,
|
||||
metavar='s', help='inventory directory, file, or script to load')
|
||||
parser.add_argument('--enabled-var', dest='enabled_var', type=str,
|
||||
default=None, metavar='v', help='host variable used to '
|
||||
'set/clear enabled flag when host is online/offline, may '
|
||||
'be specified as "foo.bar" to traverse nested dicts.')
|
||||
parser.add_argument('--enabled-value', dest='enabled_value', type=str,
|
||||
default=None, metavar='v', help='value of host variable '
|
||||
'specified by --enabled-var that indicates host is '
|
||||
'enabled/online.')
|
||||
parser.add_argument('--group-filter', dest='group_filter', type=str,
|
||||
default=None, metavar='regex', help='regular expression '
|
||||
'to filter group name(s); only matches are imported.')
|
||||
parser.add_argument('--host-filter', dest='host_filter', type=str,
|
||||
default=None, metavar='regex', help='regular expression '
|
||||
'to filter host name(s); only matches are imported.')
|
||||
parser.add_argument('--exclude-empty-groups', dest='exclude_empty_groups',
|
||||
action='store_true', default=False, help='when set, '
|
||||
'exclude all groups that have no child groups, hosts, or '
|
||||
'variables.')
|
||||
parser.add_argument('--instance-id-var', dest='instance_id_var', type=str,
|
||||
default=None, metavar='v', help='host variable that '
|
||||
'specifies the unique, immutable instance ID, may be '
|
||||
'specified as "foo.bar" to traverse nested dicts.')
|
||||
|
||||
def set_logging_level(self):
|
||||
log_levels = dict(enumerate([logging.WARNING, logging.INFO,
|
||||
@@ -352,7 +347,12 @@ class Command(NoArgsCommand):
|
||||
enabled = bool(unicode(enabled_value) == unicode(enabled))
|
||||
else:
|
||||
enabled = bool(enabled)
|
||||
return enabled
|
||||
if enabled is default:
|
||||
return None
|
||||
elif isinstance(enabled, bool):
|
||||
return enabled
|
||||
else:
|
||||
raise NotImplementedError('Value of enabled {} not understood.'.format(enabled))
|
||||
|
||||
def load_inventory_from_database(self):
|
||||
'''
|
||||
@@ -400,10 +400,10 @@ class Command(NoArgsCommand):
|
||||
overwrite_vars=self.overwrite_vars,
|
||||
)
|
||||
self.inventory_update = self.inventory_source.create_inventory_update(
|
||||
job_args=json.dumps(sys.argv),
|
||||
job_env=dict(os.environ.items()),
|
||||
job_cwd=os.getcwd(),
|
||||
_eager_fields=dict(
|
||||
job_args=json.dumps(sys.argv),
|
||||
job_env=dict(os.environ.items()),
|
||||
job_cwd=os.getcwd(),
|
||||
execution_node=settings.CLUSTER_HOST_ID,
|
||||
instance_group=InstanceGroup.objects.get(name='tower'))
|
||||
)
|
||||
@@ -602,27 +602,20 @@ class Command(NoArgsCommand):
|
||||
|
||||
def _update_inventory(self):
|
||||
'''
|
||||
Update/overwrite variables from "all" group. If importing from a
|
||||
cloud source attached to a specific group, variables will be set on
|
||||
the base group, otherwise they will be set on the whole inventory.
|
||||
Update inventory variables from "all" group.
|
||||
'''
|
||||
# FIXME: figure out how "all" variables are handled in the new inventory source system
|
||||
# TODO: We disable variable overwrite here in case user-defined inventory variables get
|
||||
# mangled. But we still need to figure out a better way of processing multiple inventory
|
||||
# update variables mixing with each other.
|
||||
all_obj = self.inventory
|
||||
all_name = 'inventory'
|
||||
db_variables = all_obj.variables_dict
|
||||
if self.overwrite_vars:
|
||||
db_variables = self.all_group.variables
|
||||
else:
|
||||
db_variables.update(self.all_group.variables)
|
||||
db_variables.update(self.all_group.variables)
|
||||
if db_variables != all_obj.variables_dict:
|
||||
all_obj.variables = json.dumps(db_variables)
|
||||
all_obj.save(update_fields=['variables'])
|
||||
if self.overwrite_vars:
|
||||
logger.info('%s variables replaced from "all" group', all_name.capitalize())
|
||||
else:
|
||||
logger.info('%s variables updated from "all" group', all_name.capitalize())
|
||||
logger.info('Inventory variables updated from "all" group')
|
||||
else:
|
||||
logger.info('%s variables unmodified', all_name.capitalize())
|
||||
logger.info('Inventory variables unmodified')
|
||||
|
||||
def _create_update_groups(self):
|
||||
'''
|
||||
@@ -927,7 +920,7 @@ class Command(NoArgsCommand):
|
||||
self.inventory_update.license_error = True
|
||||
self.inventory_update.save(update_fields=['license_error'])
|
||||
|
||||
def handle_noargs(self, **options):
|
||||
def handle(self, *args, **options):
|
||||
self.verbosity = int(options.get('verbosity', 1))
|
||||
self.set_logging_level()
|
||||
self.inventory_name = options.get('inventory_name', None)
|
||||
|
||||
@@ -2,14 +2,14 @@
|
||||
# All Rights Reserved
|
||||
|
||||
from awx.main.models import Instance, InstanceGroup
|
||||
from django.core.management.base import NoArgsCommand
|
||||
from django.core.management.base import BaseCommand
|
||||
|
||||
|
||||
class Command(NoArgsCommand):
|
||||
class Command(BaseCommand):
|
||||
"""List instances from the Tower database
|
||||
"""
|
||||
|
||||
def handle(self, **options):
|
||||
def handle(self, *args, **options):
|
||||
super(Command, self).__init__()
|
||||
|
||||
for instance in Instance.objects.all():
|
||||
|
||||
@@ -5,7 +5,6 @@ from awx.main.models import Instance
|
||||
from awx.main.utils.pglock import advisory_lock
|
||||
from django.conf import settings
|
||||
|
||||
from optparse import make_option
|
||||
from django.db import transaction
|
||||
from django.core.management.base import BaseCommand, CommandError
|
||||
|
||||
@@ -21,10 +20,9 @@ class Command(BaseCommand):
|
||||
'Specify `--hostname` to use this command.'
|
||||
)
|
||||
|
||||
option_list = BaseCommand.option_list + (
|
||||
make_option('--hostname', dest='hostname', type='string',
|
||||
help='Hostname used during provisioning'),
|
||||
)
|
||||
def add_arguments(self, parser):
|
||||
parser.add_argument('--hostname', dest='hostname', type=str,
|
||||
help='Hostname used during provisioning')
|
||||
|
||||
def _register_hostname(self, hostname):
|
||||
if not hostname:
|
||||
|
||||
@@ -5,20 +5,18 @@ import sys
|
||||
from awx.main.utils.pglock import advisory_lock
|
||||
from awx.main.models import Instance, InstanceGroup
|
||||
|
||||
from optparse import make_option
|
||||
from django.core.management.base import BaseCommand, CommandError
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
|
||||
option_list = BaseCommand.option_list + (
|
||||
make_option('--queuename', dest='queuename', type='string',
|
||||
help='Queue to create/update'),
|
||||
make_option('--hostnames', dest='hostnames', type='string',
|
||||
help='Comma-Delimited Hosts to add to the Queue'),
|
||||
make_option('--controller', dest='controller', type='string', default='',
|
||||
help='The controlling group (makes this an isolated group)'),
|
||||
)
|
||||
def add_arguments(self, parser):
|
||||
parser.add_argument('--queuename', dest='queuename', type=str,
|
||||
help='Queue to create/update')
|
||||
parser.add_argument('--hostnames', dest='hostnames', type=str,
|
||||
help='Comma-Delimited Hosts to add to the Queue')
|
||||
parser.add_argument('--controller', dest='controller', type=str,
|
||||
default='', help='The controlling group (makes this an isolated group)')
|
||||
|
||||
def handle(self, **options):
|
||||
queuename = options.get('queuename')
|
||||
|
||||
@@ -3,8 +3,6 @@
|
||||
import sys
|
||||
|
||||
from awx.main.models import Instance, InstanceGroup
|
||||
|
||||
from optparse import make_option
|
||||
from django.core.management.base import BaseCommand, CommandError
|
||||
|
||||
|
||||
@@ -14,14 +12,13 @@ class Command(BaseCommand):
|
||||
"Remove an instance (specified by --hostname) from the specified queue (instance group).\n"
|
||||
"In order remove the queue, use the `unregister_queue` command.")
|
||||
|
||||
option_list = BaseCommand.option_list + (
|
||||
make_option('--queuename', dest='queuename', type='string',
|
||||
help='Queue to be removed from'),
|
||||
make_option('--hostname', dest='hostname', type='string',
|
||||
help='Host to remove from queue'),
|
||||
)
|
||||
def add_arguments(self, parser):
|
||||
parser.add_argument('--queuename', dest='queuename', type=str,
|
||||
help='Queue to be removed from')
|
||||
parser.add_argument('--hostname', dest='hostname', type=str,
|
||||
help='Host to remove from queue')
|
||||
|
||||
def handle(self, **options):
|
||||
def handle(self, *arg, **options):
|
||||
if not options.get('queuename'):
|
||||
raise CommandError('Must specify `--queuename` in order to use command.')
|
||||
ig = InstanceGroup.objects.filter(name=options.get('queuename'))
|
||||
@@ -36,4 +33,3 @@ class Command(BaseCommand):
|
||||
i = i.first()
|
||||
ig.instances.remove(i)
|
||||
print("Instance removed from instance group")
|
||||
|
||||
|
||||
201
awx/main/management/commands/replay_job_events.py
Normal file
201
awx/main/management/commands/replay_job_events.py
Normal file
@@ -0,0 +1,201 @@
|
||||
# Copyright (c) 2017 Ansible by Red Hat
|
||||
# All Rights Reserved.
|
||||
|
||||
import sys
|
||||
import time
|
||||
import json
|
||||
|
||||
from django.utils import timezone
|
||||
from django.core.management.base import BaseCommand
|
||||
|
||||
from awx.main.models import (
|
||||
UnifiedJob,
|
||||
Job,
|
||||
AdHocCommand,
|
||||
ProjectUpdate,
|
||||
InventoryUpdate,
|
||||
SystemJob
|
||||
)
|
||||
from awx.main.consumers import emit_channel_notification
|
||||
from awx.api.serializers import (
|
||||
JobEventWebSocketSerializer,
|
||||
AdHocCommandEventWebSocketSerializer,
|
||||
ProjectUpdateEventWebSocketSerializer,
|
||||
InventoryUpdateEventWebSocketSerializer,
|
||||
SystemJobEventWebSocketSerializer
|
||||
)
|
||||
|
||||
|
||||
class ReplayJobEvents():
|
||||
|
||||
recording_start = None
|
||||
replay_start = None
|
||||
|
||||
def now(self):
|
||||
return timezone.now()
|
||||
|
||||
def start(self, first_event_created):
|
||||
self.recording_start = first_event_created
|
||||
self.replay_start = self.now()
|
||||
|
||||
def lateness(self, now, created):
|
||||
time_passed = now - self.recording_start
|
||||
job_event_time = created - self.replay_start
|
||||
|
||||
return (time_passed - job_event_time).total_seconds()
|
||||
|
||||
def get_job(self, job_id):
|
||||
try:
|
||||
unified_job = UnifiedJob.objects.get(id=job_id)
|
||||
except UnifiedJob.DoesNotExist:
|
||||
print("UnifiedJob {} not found.".format(job_id))
|
||||
sys.exit(1)
|
||||
|
||||
return unified_job.get_real_instance()
|
||||
|
||||
def sleep(self, seconds):
|
||||
time.sleep(seconds)
|
||||
|
||||
def replay_elapsed(self):
|
||||
return (self.now() - self.replay_start)
|
||||
|
||||
def recording_elapsed(self, created):
|
||||
return (created - self.recording_start)
|
||||
|
||||
def replay_offset(self, created, speed):
|
||||
return self.replay_elapsed().total_seconds() - (self.recording_elapsed(created).total_seconds() * (1.0 / speed))
|
||||
|
||||
def get_job_events(self, job):
|
||||
if type(job) is Job:
|
||||
job_events = job.job_events.order_by('created')
|
||||
elif type(job) is AdHocCommand:
|
||||
job_events = job.ad_hoc_command_events.order_by('created')
|
||||
elif type(job) is ProjectUpdate:
|
||||
job_events = job.project_update_events.order_by('created')
|
||||
elif type(job) is InventoryUpdate:
|
||||
job_events = job.inventory_update_events.order_by('created')
|
||||
elif type(job) is SystemJob:
|
||||
job_events = job.system_job_events.order_by('created')
|
||||
if job_events.count() == 0:
|
||||
raise RuntimeError("No events for job id {}".format(job.id))
|
||||
return job_events
|
||||
|
||||
def get_serializer(self, job):
|
||||
if type(job) is Job:
|
||||
return JobEventWebSocketSerializer
|
||||
elif type(job) is AdHocCommand:
|
||||
return AdHocCommandEventWebSocketSerializer
|
||||
elif type(job) is ProjectUpdate:
|
||||
return ProjectUpdateEventWebSocketSerializer
|
||||
elif type(job) is InventoryUpdate:
|
||||
return InventoryUpdateEventWebSocketSerializer
|
||||
elif type(job) is SystemJob:
|
||||
return SystemJobEventWebSocketSerializer
|
||||
else:
|
||||
raise RuntimeError("Job is of type {} and replay is not yet supported.".format(type(job)))
|
||||
sys.exit(1)
|
||||
|
||||
def run(self, job_id, speed=1.0, verbosity=0):
|
||||
stats = {
|
||||
'events_ontime': {
|
||||
'total': 0,
|
||||
'percentage': 0,
|
||||
},
|
||||
'events_late': {
|
||||
'total': 0,
|
||||
'percentage': 0,
|
||||
'lateness_total': 0,
|
||||
'lateness_average': 0,
|
||||
},
|
||||
'events_total': 0,
|
||||
'events_distance_total': 0,
|
||||
'events_distance_average': 0,
|
||||
'recording_start': 0,
|
||||
'recording_end': 0,
|
||||
'recording_duration': 0,
|
||||
'replay_start': 0,
|
||||
'replay_end': 0,
|
||||
'replay_duration': 0,
|
||||
}
|
||||
try:
|
||||
job = self.get_job(job_id)
|
||||
job_events = self.get_job_events(job)
|
||||
serializer = self.get_serializer(job)
|
||||
except RuntimeError as e:
|
||||
print("{}".format(e.message))
|
||||
sys.exit(1)
|
||||
|
||||
je_previous = None
|
||||
for je_current in job_events:
|
||||
if not je_previous:
|
||||
stats['recording_start'] = je_current.created
|
||||
self.start(je_current.created)
|
||||
stats['replay_start'] = self.replay_start
|
||||
je_previous = je_current
|
||||
|
||||
je_serialized = serializer(je_current).data
|
||||
emit_channel_notification('{}-{}'.format(je_serialized['group_name'], job.id), je_serialized)
|
||||
|
||||
replay_offset = self.replay_offset(je_previous.created, speed)
|
||||
recording_diff = (je_current.created - je_previous.created).total_seconds() * (1.0 / speed)
|
||||
stats['events_distance_total'] += recording_diff
|
||||
if verbosity >= 3:
|
||||
print("recording: next job in {} seconds".format(recording_diff))
|
||||
if replay_offset >= 0:
|
||||
replay_diff = recording_diff - replay_offset
|
||||
|
||||
if replay_diff > 0:
|
||||
stats['events_ontime']['total'] += 1
|
||||
if verbosity >= 3:
|
||||
print("\treplay: sleep for {} seconds".format(replay_diff))
|
||||
self.sleep(replay_diff)
|
||||
else:
|
||||
stats['events_late']['total'] += 1
|
||||
stats['events_late']['lateness_total'] += (replay_diff * -1)
|
||||
if verbosity >= 3:
|
||||
print("\treplay: too far behind to sleep {} seconds".format(replay_diff))
|
||||
else:
|
||||
replay_offset = self.replay_offset(je_current.created, speed)
|
||||
stats['events_late']['lateness_total'] += (replay_offset * -1)
|
||||
stats['events_late']['total'] += 1
|
||||
if verbosity >= 3:
|
||||
print("\treplay: behind by {} seconds".format(replay_offset))
|
||||
|
||||
stats['events_total'] += 1
|
||||
je_previous = je_current
|
||||
|
||||
stats['replay_end'] = self.now()
|
||||
stats['replay_duration'] = (stats['replay_end'] - stats['replay_start']).total_seconds()
|
||||
stats['replay_start'] = stats['replay_start'].isoformat()
|
||||
stats['replay_end'] = stats['replay_end'].isoformat()
|
||||
|
||||
stats['recording_end'] = je_current.created
|
||||
stats['recording_duration'] = (stats['recording_end'] - stats['recording_start']).total_seconds()
|
||||
stats['recording_start'] = stats['recording_start'].isoformat()
|
||||
stats['recording_end'] = stats['recording_end'].isoformat()
|
||||
|
||||
stats['events_ontime']['percentage'] = (stats['events_ontime']['total'] / float(stats['events_total'])) * 100.00
|
||||
stats['events_late']['percentage'] = (stats['events_late']['total'] / float(stats['events_total'])) * 100.00
|
||||
stats['events_distance_average'] = stats['events_distance_total'] / stats['events_total']
|
||||
stats['events_late']['lateness_average'] = stats['events_late']['lateness_total'] / stats['events_late']['total']
|
||||
if verbosity >= 2:
|
||||
print(json.dumps(stats, indent=4, sort_keys=True))
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
|
||||
help = 'Replay job events over websockets ordered by created on date.'
|
||||
|
||||
def add_arguments(self, parser):
|
||||
parser.add_argument('--job_id', dest='job_id', type=int, metavar='j',
|
||||
help='Id of the job to replay (job or adhoc)')
|
||||
parser.add_argument('--speed', dest='speed', type=int, metavar='s',
|
||||
help='Speedup factor.')
|
||||
|
||||
def handle(self, *args, **options):
|
||||
job_id = options.get('job_id')
|
||||
speed = options.get('speed') or 1
|
||||
verbosity = options.get('verbosity') or 0
|
||||
|
||||
replayer = ReplayJobEvents()
|
||||
replayer.run(job_id, speed, verbosity)
|
||||
@@ -3,26 +3,29 @@
|
||||
|
||||
# Python
|
||||
import logging
|
||||
import os
|
||||
import signal
|
||||
import time
|
||||
from uuid import UUID
|
||||
from multiprocessing import Process
|
||||
from multiprocessing import Queue as MPQueue
|
||||
from Queue import Empty as QueueEmpty
|
||||
from Queue import Full as QueueFull
|
||||
import os
|
||||
|
||||
from kombu import Connection, Exchange, Queue
|
||||
from kombu.mixins import ConsumerMixin
|
||||
|
||||
# Django
|
||||
from django.conf import settings
|
||||
from django.core.management.base import NoArgsCommand
|
||||
from django.core.management.base import BaseCommand
|
||||
from django.db import connection as django_connection
|
||||
from django.db import DatabaseError
|
||||
from django.db import DatabaseError, OperationalError
|
||||
from django.db.utils import InterfaceError, InternalError
|
||||
from django.core.cache import cache as django_cache
|
||||
|
||||
# AWX
|
||||
from awx.main.models import * # noqa
|
||||
from awx.main.consumers import emit_channel_notification
|
||||
|
||||
logger = logging.getLogger('awx.main.commands.run_callback_receiver')
|
||||
|
||||
@@ -39,6 +42,9 @@ class WorkerSignalHandler:
|
||||
|
||||
|
||||
class CallbackBrokerWorker(ConsumerMixin):
|
||||
|
||||
MAX_RETRIES = 2
|
||||
|
||||
def __init__(self, connection, use_workers=True):
|
||||
self.connection = connection
|
||||
self.worker_queues = []
|
||||
@@ -123,8 +129,17 @@ class CallbackBrokerWorker(ConsumerMixin):
|
||||
logger.error("Exception on worker thread, restarting: " + str(e))
|
||||
continue
|
||||
try:
|
||||
if 'job_id' not in body and 'ad_hoc_command_id' not in body:
|
||||
raise Exception('Payload does not have a job_id or ad_hoc_command_id')
|
||||
|
||||
event_map = {
|
||||
'job_id': JobEvent,
|
||||
'ad_hoc_command_id': AdHocCommandEvent,
|
||||
'project_update_id': ProjectUpdateEvent,
|
||||
'inventory_update_id': InventoryUpdateEvent,
|
||||
'system_job_id': SystemJobEvent,
|
||||
}
|
||||
|
||||
if not any([key in body for key in event_map]):
|
||||
raise Exception('Payload does not have a job identifier')
|
||||
if settings.DEBUG:
|
||||
from pygments import highlight
|
||||
from pygments.lexers import PythonLexer
|
||||
@@ -132,14 +147,51 @@ class CallbackBrokerWorker(ConsumerMixin):
|
||||
from pprint import pformat
|
||||
logger.info('Body: {}'.format(
|
||||
highlight(pformat(body, width=160), PythonLexer(), Terminal256Formatter(style='friendly'))
|
||||
))
|
||||
try:
|
||||
if 'job_id' in body:
|
||||
JobEvent.create_from_data(**body)
|
||||
elif 'ad_hoc_command_id' in body:
|
||||
AdHocCommandEvent.create_from_data(**body)
|
||||
except DatabaseError as e:
|
||||
logger.error('Database Error Saving Job Event: {}'.format(e))
|
||||
)[:1024 * 4])
|
||||
|
||||
def _save_event_data():
|
||||
for key, cls in event_map.items():
|
||||
if key in body:
|
||||
cls.create_from_data(**body)
|
||||
|
||||
job_identifier = 'unknown job'
|
||||
for key in event_map.keys():
|
||||
if key in body:
|
||||
job_identifier = body[key]
|
||||
break
|
||||
|
||||
if body.get('event') == 'EOF':
|
||||
# EOF events are sent when stdout for the running task is
|
||||
# closed. don't actually persist them to the database; we
|
||||
# just use them to report `summary` websocket events as an
|
||||
# approximation for when a job is "done"
|
||||
emit_channel_notification(
|
||||
'jobs-summary',
|
||||
dict(group_name='jobs', unified_job_id=job_identifier)
|
||||
)
|
||||
continue
|
||||
|
||||
retries = 0
|
||||
while retries <= self.MAX_RETRIES:
|
||||
try:
|
||||
_save_event_data()
|
||||
break
|
||||
except (OperationalError, InterfaceError, InternalError) as e:
|
||||
if retries >= self.MAX_RETRIES:
|
||||
logger.exception('Worker could not re-establish database connectivity, shutting down gracefully: Job {}'.format(job_identifier))
|
||||
os.kill(os.getppid(), signal.SIGINT)
|
||||
return
|
||||
delay = 60 * retries
|
||||
logger.exception('Database Error Saving Job Event, retry #{i} in {delay} seconds:'.format(
|
||||
i=retries + 1,
|
||||
delay=delay
|
||||
))
|
||||
django_connection.close()
|
||||
time.sleep(delay)
|
||||
retries += 1
|
||||
except DatabaseError as e:
|
||||
logger.exception('Database Error Saving Job Event for Job {}'.format(job_identifier))
|
||||
break
|
||||
except Exception as exc:
|
||||
import traceback
|
||||
tb = traceback.format_exc()
|
||||
@@ -147,7 +199,7 @@ class CallbackBrokerWorker(ConsumerMixin):
|
||||
logger.error('Detail: {}'.format(tb))
|
||||
|
||||
|
||||
class Command(NoArgsCommand):
|
||||
class Command(BaseCommand):
|
||||
'''
|
||||
Save Job Callback receiver (see awx.plugins.callbacks.job_event_callback)
|
||||
Runs as a management command and receives job save events. It then hands
|
||||
@@ -155,8 +207,8 @@ class Command(NoArgsCommand):
|
||||
'''
|
||||
help = 'Launch the job callback receiver'
|
||||
|
||||
def handle_noargs(self, **options):
|
||||
with Connection(settings.BROKER_URL) as conn:
|
||||
def handle(self, *arg, **options):
|
||||
with Connection(settings.CELERY_BROKER_URL) as conn:
|
||||
try:
|
||||
worker = CallbackBrokerWorker(conn)
|
||||
worker.run()
|
||||
|
||||
@@ -1,13 +1,11 @@
|
||||
# Copyright (c) 2015 Ansible, Inc.
|
||||
# All Rights Reserved
|
||||
|
||||
from optparse import make_option
|
||||
|
||||
# Django
|
||||
from django.core.management.base import BaseCommand
|
||||
|
||||
# AWX
|
||||
from awx.main.models import * # noqa
|
||||
from awx.main.models import UnifiedJob
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
@@ -17,14 +15,13 @@ class Command(BaseCommand):
|
||||
|
||||
help = 'Display some simple statistics'
|
||||
|
||||
option_list = BaseCommand.option_list + (
|
||||
make_option('--stat',
|
||||
action='store',
|
||||
dest='stat',
|
||||
type="string",
|
||||
default="jobs_running",
|
||||
help='Select which stat to get information for'),
|
||||
)
|
||||
def add_arguments(self, parser):
|
||||
parser.add_argument('--stat',
|
||||
action='store',
|
||||
dest='stat',
|
||||
type=str,
|
||||
default="jobs_running",
|
||||
help='Select which stat to get information for')
|
||||
|
||||
def job_stats(self, state):
|
||||
return UnifiedJob.objects.filter(status=state).count()
|
||||
@@ -34,5 +31,3 @@ class Command(BaseCommand):
|
||||
self.stdout.write(str(self.job_stats(options['stat'][5:])))
|
||||
else:
|
||||
self.stdout.write("Supported stats: jobs_{state}")
|
||||
|
||||
|
||||
|
||||
50
awx/main/management/commands/test_isolated_connection.py
Normal file
50
awx/main/management/commands/test_isolated_connection.py
Normal file
@@ -0,0 +1,50 @@
|
||||
import os
|
||||
import shutil
|
||||
import subprocess
|
||||
import sys
|
||||
import tempfile
|
||||
from optparse import make_option
|
||||
|
||||
from django.conf import settings
|
||||
from django.core.management.base import BaseCommand, CommandError
|
||||
|
||||
from awx.main.expect import run
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
"""Tests SSH connectivity between a controller and target isolated node"""
|
||||
help = 'Tests SSH connectivity between a controller and target isolated node'
|
||||
|
||||
option_list = BaseCommand.option_list + (
|
||||
make_option('--hostname', dest='hostname', type='string',
|
||||
help='Hostname of an isolated node'),
|
||||
)
|
||||
|
||||
def handle(self, *args, **options):
|
||||
hostname = options.get('hostname')
|
||||
if not hostname:
|
||||
raise CommandError("--hostname is a required argument")
|
||||
|
||||
try:
|
||||
path = tempfile.mkdtemp(prefix='awx_isolated_ssh', dir=settings.AWX_PROOT_BASE_PATH)
|
||||
args = [
|
||||
'ansible', 'all', '-i', '{},'.format(hostname), '-u',
|
||||
settings.AWX_ISOLATED_USERNAME, '-T5', '-m', 'shell',
|
||||
'-a', 'hostname', '-vvv'
|
||||
]
|
||||
if all([
|
||||
getattr(settings, 'AWX_ISOLATED_KEY_GENERATION', False) is True,
|
||||
getattr(settings, 'AWX_ISOLATED_PRIVATE_KEY', None)
|
||||
]):
|
||||
ssh_key_path = os.path.join(path, '.isolated')
|
||||
ssh_auth_sock = os.path.join(path, 'ssh_auth.sock')
|
||||
run.open_fifo_write(ssh_key_path, settings.AWX_ISOLATED_PRIVATE_KEY)
|
||||
args = run.wrap_args_with_ssh_agent(args, ssh_key_path, ssh_auth_sock)
|
||||
try:
|
||||
print ' '.join(args)
|
||||
subprocess.check_call(args)
|
||||
except subprocess.CalledProcessError as e:
|
||||
sys.exit(e.returncode)
|
||||
finally:
|
||||
shutil.rmtree(path)
|
||||
|
||||
@@ -5,7 +5,6 @@ import sys
|
||||
from awx.main.utils.pglock import advisory_lock
|
||||
from awx.main.models import InstanceGroup
|
||||
|
||||
from optparse import make_option
|
||||
from django.db import transaction
|
||||
from django.core.management.base import BaseCommand, CommandError
|
||||
|
||||
@@ -17,13 +16,12 @@ class Command(BaseCommand):
|
||||
"Instances inside of queue will continue to exist, \n"
|
||||
"but jobs will no longer be processed by queue.")
|
||||
|
||||
option_list = BaseCommand.option_list + (
|
||||
make_option('--queuename', dest='queuename', type='string',
|
||||
help='Queue to create/update'),
|
||||
)
|
||||
def add_arguments(self, parser):
|
||||
parser.add_argument('--queuename', dest='queuename', type=str,
|
||||
help='Queue to create/update')
|
||||
|
||||
@transaction.atomic
|
||||
def handle(self, **options):
|
||||
def handle(self, *args, **options):
|
||||
queuename = options.get('queuename')
|
||||
if not queuename:
|
||||
raise CommandError('Must specify `--queuename` in order to use command.')
|
||||
|
||||
@@ -1,9 +1,6 @@
|
||||
# Copyright (c) 2016 Ansible, Inc.
|
||||
# All Rights Reserved
|
||||
|
||||
# Python
|
||||
from optparse import make_option
|
||||
|
||||
# Django
|
||||
from django.core.management.base import BaseCommand
|
||||
from django.core.management.base import CommandError
|
||||
@@ -25,12 +22,11 @@ class UpdatePassword(object):
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
option_list = BaseCommand.option_list + (
|
||||
make_option('--username', dest='username', action='store', type='string', default=None,
|
||||
help='username to change the password for'),
|
||||
make_option('--password', dest='password', action='store', type='string', default=None,
|
||||
help='new password for user'),
|
||||
)
|
||||
def add_arguments(self, parser):
|
||||
parser.add_argument('--username', dest='username', action='store', type=str, default=None,
|
||||
help='username to change the password for')
|
||||
parser.add_argument('--password', dest='password', action='store', type=str, default=None,
|
||||
help='new password for user')
|
||||
|
||||
def handle(self, *args, **options):
|
||||
if not options['username']:
|
||||
@@ -43,5 +39,3 @@ class Command(BaseCommand):
|
||||
if res:
|
||||
return "Password updated"
|
||||
return "Password not updated"
|
||||
|
||||
|
||||
|
||||
@@ -21,11 +21,15 @@ class HostManager(models.Manager):
|
||||
"""Custom manager class for Hosts model."""
|
||||
|
||||
def active_count(self):
|
||||
"""Return count of active, unique hosts for licensing."""
|
||||
try:
|
||||
return self.order_by('name').distinct('name').count()
|
||||
except NotImplementedError: # For unit tests only, SQLite doesn't support distinct('name')
|
||||
return len(set(self.values_list('name', flat=True)))
|
||||
"""Return count of active, unique hosts for licensing.
|
||||
Construction of query involves:
|
||||
- remove any ordering specified in model's Meta
|
||||
- Exclude hosts sourced from another Tower
|
||||
- Restrict the query to only return the name column
|
||||
- Only consider results that are unique
|
||||
- Return the count of this query
|
||||
"""
|
||||
return self.order_by().exclude(inventory_sources__source='tower').values('name').distinct().count()
|
||||
|
||||
def get_queryset(self):
|
||||
"""When the parent instance of the host query set has a `kind=smart` and a `host_filter`
|
||||
|
||||
@@ -5,6 +5,10 @@ import logging
|
||||
import threading
|
||||
import uuid
|
||||
import six
|
||||
import time
|
||||
import cProfile
|
||||
import pstats
|
||||
import os
|
||||
|
||||
from django.conf import settings
|
||||
from django.contrib.auth.models import User
|
||||
@@ -25,6 +29,40 @@ from awx.conf import fields, register
|
||||
|
||||
logger = logging.getLogger('awx.main.middleware')
|
||||
analytics_logger = logging.getLogger('awx.analytics.activity_stream')
|
||||
perf_logger = logging.getLogger('awx.analytics.performance')
|
||||
|
||||
|
||||
class TimingMiddleware(threading.local):
|
||||
|
||||
dest = '/var/lib/awx/profile'
|
||||
|
||||
def process_request(self, request):
|
||||
self.start_time = time.time()
|
||||
if settings.AWX_REQUEST_PROFILE:
|
||||
self.prof = cProfile.Profile()
|
||||
self.prof.enable()
|
||||
|
||||
def process_response(self, request, response):
|
||||
if not hasattr(self, 'start_time'): # some tools may not invoke process_request
|
||||
return response
|
||||
total_time = time.time() - self.start_time
|
||||
response['X-API-Total-Time'] = '%0.3fs' % total_time
|
||||
if settings.AWX_REQUEST_PROFILE:
|
||||
self.prof.disable()
|
||||
cprofile_file = self.save_profile_file(request)
|
||||
response['cprofile_file'] = cprofile_file
|
||||
perf_logger.info('api response times', extra=dict(python_objects=dict(request=request, response=response)))
|
||||
return response
|
||||
|
||||
def save_profile_file(self, request):
|
||||
if not os.path.isdir(self.dest):
|
||||
os.makedirs(self.dest)
|
||||
filename = '%.3fs-%s' % (pstats.Stats(self.prof).total_tt, uuid.uuid4())
|
||||
filepath = os.path.join(self.dest, filename)
|
||||
with open(filepath, 'w') as f:
|
||||
f.write('%s %s\n' % (request.method, request.get_full_path()))
|
||||
pstats.Stats(self.prof, stream=f).sort_stats('cumulative').print_stats()
|
||||
return filepath
|
||||
|
||||
|
||||
class ActivityStreamMiddleware(threading.local):
|
||||
|
||||
@@ -8,14 +8,9 @@ from __future__ import unicode_literals
|
||||
from django.db import migrations, models
|
||||
from django.conf import settings
|
||||
import awx.main.fields
|
||||
import jsonfield.fields
|
||||
|
||||
|
||||
def update_dashed_host_variables(apps, schema_editor):
|
||||
Host = apps.get_model('main', 'Host')
|
||||
for host in Host.objects.filter(variables='---'):
|
||||
host.variables = ''
|
||||
host.save()
|
||||
import _squashed
|
||||
from _squashed_30 import SQUASHED_30
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
@@ -27,13 +22,7 @@ class Migration(migrations.Migration):
|
||||
(b'main', '0025_v300_update_rbac_parents'),
|
||||
(b'main', '0026_v300_credential_unique'),
|
||||
(b'main', '0027_v300_team_migrations'),
|
||||
(b'main', '0028_v300_org_team_cascade'),
|
||||
(b'main', '0029_v302_add_ask_skip_tags'),
|
||||
(b'main', '0030_v302_job_survey_passwords'),
|
||||
(b'main', '0031_v302_migrate_survey_passwords'),
|
||||
(b'main', '0032_v302_credential_permissions_update'),
|
||||
(b'main', '0033_v303_v245_host_variable_fix'),]
|
||||
|
||||
(b'main', '0028_v300_org_team_cascade')] + _squashed.replaces(SQUASHED_30, applied=True)
|
||||
|
||||
dependencies = [
|
||||
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
|
||||
@@ -130,27 +119,4 @@ class Migration(migrations.Migration):
|
||||
field=models.ForeignKey(related_name='teams', to='main.Organization'),
|
||||
preserve_default=False,
|
||||
),
|
||||
# add ask skip tags
|
||||
migrations.AddField(
|
||||
model_name='jobtemplate',
|
||||
name='ask_skip_tags_on_launch',
|
||||
field=models.BooleanField(default=False),
|
||||
),
|
||||
# job survery passwords
|
||||
migrations.AddField(
|
||||
model_name='job',
|
||||
name='survey_passwords',
|
||||
field=jsonfield.fields.JSONField(default={}, editable=False, blank=True),
|
||||
),
|
||||
# RBAC credential permission updates
|
||||
migrations.AlterField(
|
||||
model_name='credential',
|
||||
name='admin_role',
|
||||
field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=[b'singleton:system_administrator', b'organization.admin_role'], to='main.Role', null=b'True'),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='credential',
|
||||
name='use_role',
|
||||
field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=[b'admin_role'], to='main.Role', null=b'True'),
|
||||
),
|
||||
]
|
||||
] + _squashed.operations(SQUASHED_30, applied=True)
|
||||
|
||||
@@ -8,6 +8,9 @@ import django.db.models.deletion
|
||||
import awx.main.models.workflow
|
||||
import awx.main.fields
|
||||
|
||||
import _squashed
|
||||
from _squashed_30 import SQUASHED_30
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
@@ -15,11 +18,11 @@ class Migration(migrations.Migration):
|
||||
('main', '0003_squashed_v300_v303_updates'),
|
||||
]
|
||||
|
||||
replaces = [
|
||||
replaces = _squashed.replaces(SQUASHED_30) + [
|
||||
(b'main', '0034_v310_release'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
operations = _squashed.operations(SQUASHED_30) + [
|
||||
# Create ChannelGroup table
|
||||
migrations.CreateModel(
|
||||
name='ChannelGroup',
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user