mirror of
https://github.com/ansible/awx.git
synced 2026-02-07 04:28:23 -03:30
Compare commits
1024 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
05bec924e4 | ||
|
|
40d1f2671f | ||
|
|
202161f090 | ||
|
|
7243f871b4 | ||
|
|
2c64a2ce63 | ||
|
|
86eb0353c5 | ||
|
|
282290e151 | ||
|
|
8d348f916b | ||
|
|
659d31324d | ||
|
|
1bc2d83403 | ||
|
|
8c90d36290 | ||
|
|
9be438a60a | ||
|
|
c62430c282 | ||
|
|
1be3c77ac6 | ||
|
|
4adfb9804e | ||
|
|
64ac1ee238 | ||
|
|
0bf06479d5 | ||
|
|
1f8cab4171 | ||
|
|
526bcc4a68 | ||
|
|
9dcdf20fb0 | ||
|
|
be0f66fd94 | ||
|
|
2135291f35 | ||
|
|
a9aae91634 | ||
|
|
905ff7dad7 | ||
|
|
e59a724efa | ||
|
|
1c8217936d | ||
|
|
72a8854c27 | ||
|
|
98df442ced | ||
|
|
5ada021a6e | ||
|
|
34a8e0a9b6 | ||
|
|
cd8a4b4669 | ||
|
|
7fc896e183 | ||
|
|
da0b686369 | ||
|
|
9488105381 | ||
|
|
ec14ae1930 | ||
|
|
e1e225d6a0 | ||
|
|
3ad174b15b | ||
|
|
b5644ed65b | ||
|
|
13d84b8d35 | ||
|
|
9275b024de | ||
|
|
4f8d4994cf | ||
|
|
a3144ee234 | ||
|
|
7fe22e9c53 | ||
|
|
42d8368596 | ||
|
|
eecf997856 | ||
|
|
21bdea05a0 | ||
|
|
a3071c2a1f | ||
|
|
cf0cc2e2f2 | ||
|
|
e7918ad637 | ||
|
|
dfc154ed95 | ||
|
|
a1f8f65add | ||
|
|
fde5a8850d | ||
|
|
c359c072c4 | ||
|
|
ee0aa40542 | ||
|
|
81f2184aa7 | ||
|
|
96c66b1e20 | ||
|
|
dbb9ffbaf4 | ||
|
|
06a7c024fe | ||
|
|
1229a10f35 | ||
|
|
f15b1ae549 | ||
|
|
71fea2e360 | ||
|
|
5baa371739 | ||
|
|
cc8b5bc808 | ||
|
|
53c6248a6d | ||
|
|
c4bc310271 | ||
|
|
1899795d08 | ||
|
|
43c58b5bf5 | ||
|
|
2c06bfc9ce | ||
|
|
5602b5d2d7 | ||
|
|
032318494b | ||
|
|
40c22dcec8 | ||
|
|
04f682bf7a | ||
|
|
070a12a10c | ||
|
|
53460db4d7 | ||
|
|
37a44c439e | ||
|
|
6609f38fa2 | ||
|
|
8f5be46d52 | ||
|
|
3866dcaaae | ||
|
|
8cede51bac | ||
|
|
a880f47925 | ||
|
|
383c3cfe3e | ||
|
|
32fcb84cf6 | ||
|
|
34195a1b35 | ||
|
|
c723ba5289 | ||
|
|
3ff9fa9931 | ||
|
|
3202e77b57 | ||
|
|
a858093db8 | ||
|
|
86a559caef | ||
|
|
33ff10728d | ||
|
|
ff1f322c88 | ||
|
|
d3da899459 | ||
|
|
9fe524cd20 | ||
|
|
1481a62b23 | ||
|
|
ce6d96feda | ||
|
|
6c57a3bb68 | ||
|
|
565b0b82dd | ||
|
|
98f2d936d9 | ||
|
|
9b5371f2ab | ||
|
|
c4e6fc23fc | ||
|
|
71127c039d | ||
|
|
127da5525c | ||
|
|
0f52ab47a0 | ||
|
|
b06a508ceb | ||
|
|
8cb5ce8307 | ||
|
|
c1aa4129f9 | ||
|
|
d6b10b7f44 | ||
|
|
e2aa9dc599 | ||
|
|
a043369d07 | ||
|
|
03eca250d9 | ||
|
|
65d01d508b | ||
|
|
3a2ec25fb4 | ||
|
|
fa09d68603 | ||
|
|
eb140d9e69 | ||
|
|
5852c16ba6 | ||
|
|
ebd8941439 | ||
|
|
32cb18fc85 | ||
|
|
aeb8eb3d1e | ||
|
|
6654cc35f7 | ||
|
|
28ce9b700e | ||
|
|
0558bd82bb | ||
|
|
f887aaa71f | ||
|
|
69ada03b7b | ||
|
|
ee6beae50a | ||
|
|
799feac0e1 | ||
|
|
0d86678a44 | ||
|
|
38f893c124 | ||
|
|
a2b444f179 | ||
|
|
f46bacdeaa | ||
|
|
9ee77a95c6 | ||
|
|
93b80307db | ||
|
|
0a883edd4d | ||
|
|
4cd9556f7b | ||
|
|
9ed2a0da8f | ||
|
|
7eac219eae | ||
|
|
805170ffd7 | ||
|
|
d696f6c3f6 | ||
|
|
3cdeb446c4 | ||
|
|
58737a8e28 | ||
|
|
2fb74f5b02 | ||
|
|
768a3f62f1 | ||
|
|
b03a64dd53 | ||
|
|
386382c456 | ||
|
|
d9f8f7721a | ||
|
|
d2711f4af0 | ||
|
|
77fd7ea4a8 | ||
|
|
faa5a5e024 | ||
|
|
798d27c2cb | ||
|
|
5b4dc9e7ee | ||
|
|
f118e27047 | ||
|
|
2ab33467d8 | ||
|
|
42a6757a10 | ||
|
|
aa38c1123c | ||
|
|
5fcff09aae | ||
|
|
5e2ecda413 | ||
|
|
25dc3f8778 | ||
|
|
2957f5bc7f | ||
|
|
8713e38c44 | ||
|
|
96904968d8 | ||
|
|
6d6bbbb627 | ||
|
|
14c5123fda | ||
|
|
de376292ba | ||
|
|
8faf588775 | ||
|
|
e8fd40ace0 | ||
|
|
a2b18a9f6e | ||
|
|
8f6289707b | ||
|
|
4cd2f93c31 | ||
|
|
79f450df8e | ||
|
|
aab66b8ce8 | ||
|
|
0afe94c4d4 | ||
|
|
6c1919273b | ||
|
|
1ed3a8f0e9 | ||
|
|
7dc30ab866 | ||
|
|
8f82fc26a2 | ||
|
|
74c9b9cf6a | ||
|
|
632ff959ff | ||
|
|
19d093f7aa | ||
|
|
270a41443c | ||
|
|
8666512d99 | ||
|
|
c827e73dac | ||
|
|
b70f7bd866 | ||
|
|
77e11fe8fe | ||
|
|
93f35b037d | ||
|
|
d056cb22ef | ||
|
|
4883876dc5 | ||
|
|
863b5e2e8e | ||
|
|
0f8e073d10 | ||
|
|
0579db1162 | ||
|
|
7f20118d48 | ||
|
|
89d0f90e27 | ||
|
|
41c84b4652 | ||
|
|
0ae9283fba | ||
|
|
f1813c35ed | ||
|
|
0c5978715e | ||
|
|
5c1a6b7d6d | ||
|
|
84c439b774 | ||
|
|
655759a5fc | ||
|
|
6c85902ce8 | ||
|
|
ef0c2086eb | ||
|
|
ffb148aaa9 | ||
|
|
bf281f6ea9 | ||
|
|
641897713f | ||
|
|
d7ae95684c | ||
|
|
8b39b3b41a | ||
|
|
0876d7825c | ||
|
|
3953366a9e | ||
|
|
d7f5ef6564 | ||
|
|
63cf681369 | ||
|
|
a0f1c8fc7c | ||
|
|
4fbfddaa93 | ||
|
|
bc7793def1 | ||
|
|
4e16b19ae6 | ||
|
|
b4a446dba0 | ||
|
|
641b18fe13 | ||
|
|
c680327ec3 | ||
|
|
e5d2eb9f3d | ||
|
|
da25f4104c | ||
|
|
871dc81da3 | ||
|
|
1285e8ffef | ||
|
|
a8947c3b96 | ||
|
|
565d116955 | ||
|
|
1fe9f43690 | ||
|
|
4a522fd10f | ||
|
|
5e349590fd | ||
|
|
625c0ad578 | ||
|
|
3800a16f3e | ||
|
|
d70a0c8c24 | ||
|
|
e999b35c42 | ||
|
|
553e81f888 | ||
|
|
73ece87e68 | ||
|
|
90f63774f4 | ||
|
|
17aecc17d2 | ||
|
|
9157f53d43 | ||
|
|
9bb696aa6e | ||
|
|
32da686724 | ||
|
|
cee81e9df6 | ||
|
|
b544922da1 | ||
|
|
b3c2f35358 | ||
|
|
7d767f8f63 | ||
|
|
834e6c692c | ||
|
|
fabdab78ef | ||
|
|
a313109b15 | ||
|
|
7b36630f47 | ||
|
|
cc5f329d33 | ||
|
|
31c1e1684d | ||
|
|
083e56d97d | ||
|
|
098a407e25 | ||
|
|
6347db56c5 | ||
|
|
e660879a00 | ||
|
|
5635f5fb49 | ||
|
|
0497a4ba96 | ||
|
|
275e02a8cf | ||
|
|
887a09d052 | ||
|
|
02af117f51 | ||
|
|
6e2de1b4b0 | ||
|
|
47f743e623 | ||
|
|
4f0fa57a1b | ||
|
|
3d5f301a07 | ||
|
|
f9c991e660 | ||
|
|
6e3f4a7a6e | ||
|
|
07139820cb | ||
|
|
764356bf47 | ||
|
|
ea683344f5 | ||
|
|
bff13e168a | ||
|
|
774a3da7f4 | ||
|
|
5f3b4575de | ||
|
|
59f9967dba | ||
|
|
058475c131 | ||
|
|
3685cb5517 | ||
|
|
4e2cf62e89 | ||
|
|
5e17d72922 | ||
|
|
67df298f21 | ||
|
|
353a9a55c7 | ||
|
|
0ac3598ca5 | ||
|
|
06f06173b0 | ||
|
|
da5e6883d4 | ||
|
|
ef05df9224 | ||
|
|
8b8c0e325f | ||
|
|
5bb06fdb50 | ||
|
|
993fa9290d | ||
|
|
5924571904 | ||
|
|
9cc4520a34 | ||
|
|
62987196cb | ||
|
|
cfa21af432 | ||
|
|
6f1c7ee733 | ||
|
|
bcd2a8f211 | ||
|
|
ee15db4c7c | ||
|
|
ad0e43dc52 | ||
|
|
5287e5c111 | ||
|
|
e19a57c50a | ||
|
|
113d62a95f | ||
|
|
b5899c193a | ||
|
|
8b41810189 | ||
|
|
f25ab7c6da | ||
|
|
f03b40aa50 | ||
|
|
9dd4c7aaa3 | ||
|
|
d4af743805 | ||
|
|
8b395c934c | ||
|
|
ae0855614b | ||
|
|
169cd1a466 | ||
|
|
82f81752e4 | ||
|
|
8b6cc0e323 | ||
|
|
c0996f5fb1 | ||
|
|
3998796bf0 | ||
|
|
6fa283fc98 | ||
|
|
70f8ec78de | ||
|
|
e2c398ade2 | ||
|
|
87e695ff6c | ||
|
|
8a8d710e20 | ||
|
|
7365e4a63f | ||
|
|
d4fc4bcd61 | ||
|
|
4237b9ed5c | ||
|
|
2dd2541550 | ||
|
|
edda5e5420 | ||
|
|
721674f0cd | ||
|
|
f97ca9c42f | ||
|
|
8f883d8d43 | ||
|
|
5bcf704b76 | ||
|
|
2818bb5833 | ||
|
|
e8b79cde4a | ||
|
|
81c14ce942 | ||
|
|
eacbeef660 | ||
|
|
02e3f45422 | ||
|
|
00afc87af1 | ||
|
|
e48bebb761 | ||
|
|
4c5ec2fb3a | ||
|
|
cad8710ac7 | ||
|
|
d02221702f | ||
|
|
4955dd9ab3 | ||
|
|
40f57b2552 | ||
|
|
e19b4e0c8c | ||
|
|
96fd07d0f3 | ||
|
|
692007072d | ||
|
|
2f8155763b | ||
|
|
5c5293783d | ||
|
|
7486503a16 | ||
|
|
e7f4e79248 | ||
|
|
f49ed838c7 | ||
|
|
8c117e9ef1 | ||
|
|
681770e25a | ||
|
|
06210624ce | ||
|
|
713c9614a9 | ||
|
|
cbb95b3bd4 | ||
|
|
f3c5607d6e | ||
|
|
c9ffc0e7c1 | ||
|
|
3d71c93262 | ||
|
|
140af9ea47 | ||
|
|
8980e70918 | ||
|
|
ca4a1f37cd | ||
|
|
c234b9fb99 | ||
|
|
480e20136a | ||
|
|
91bceefee7 | ||
|
|
b338959279 | ||
|
|
d9e360e575 | ||
|
|
a7ec5876ce | ||
|
|
cdc6096515 | ||
|
|
31e559b6a1 | ||
|
|
1bb72cc680 | ||
|
|
128eaaf60f | ||
|
|
c57d39dcd9 | ||
|
|
cd35704b0b | ||
|
|
46287f6923 | ||
|
|
9219f0f682 | ||
|
|
feb0cd5e7c | ||
|
|
e877651a1e | ||
|
|
3c29baea61 | ||
|
|
b05925cc0e | ||
|
|
a5c028cd45 | ||
|
|
3e38a0c17d | ||
|
|
e8b45d3be1 | ||
|
|
f4c9617f95 | ||
|
|
8a8a064821 | ||
|
|
0b11a0d642 | ||
|
|
7861cda6fe | ||
|
|
32920739ef | ||
|
|
0eadd69934 | ||
|
|
297904462d | ||
|
|
a886747676 | ||
|
|
d4d2f04fc3 | ||
|
|
1e9b5824a4 | ||
|
|
dcdb094c92 | ||
|
|
cfe8a6580b | ||
|
|
61b741030d | ||
|
|
6c19bbb6de | ||
|
|
0193377d18 | ||
|
|
f3a8d612f3 | ||
|
|
fddecfdd25 | ||
|
|
09a6a326d1 | ||
|
|
069ca1c755 | ||
|
|
e1804400ec | ||
|
|
e233926a4a | ||
|
|
1752a97315 | ||
|
|
4d24318f70 | ||
|
|
ba23f1728a | ||
|
|
2ec675cf54 | ||
|
|
5b29e51a24 | ||
|
|
e67549e4a1 | ||
|
|
bfccd49a64 | ||
|
|
5a95f2c793 | ||
|
|
723f4b0a7a | ||
|
|
b89d1781a2 | ||
|
|
9bae656676 | ||
|
|
45c516bf02 | ||
|
|
2e7b4529f8 | ||
|
|
ddb689bded | ||
|
|
a6e6298575 | ||
|
|
50b4002280 | ||
|
|
22887c47bc | ||
|
|
2677a17c8a | ||
|
|
8dae16bc98 | ||
|
|
4c0fd573c6 | ||
|
|
5bec43fc8e | ||
|
|
827cea53e4 | ||
|
|
82b2fd102c | ||
|
|
3201436edb | ||
|
|
647d272a31 | ||
|
|
bb625264d4 | ||
|
|
8fff5db758 | ||
|
|
37003a52cf | ||
|
|
a4a17fe14c | ||
|
|
101c1d7229 | ||
|
|
2ad26008cc | ||
|
|
c9ce751bf0 | ||
|
|
bd5e33c2f4 | ||
|
|
fa28d680c4 | ||
|
|
b9f2aa3437 | ||
|
|
4ad2af30cf | ||
|
|
0bb7b0700d | ||
|
|
0b0afe91ab | ||
|
|
3c062b36df | ||
|
|
39cc427668 | ||
|
|
9e9692971f | ||
|
|
e9e027ecd7 | ||
|
|
723449818d | ||
|
|
851bccccee | ||
|
|
75eace1b67 | ||
|
|
b2b45ddeda | ||
|
|
904cb4af34 | ||
|
|
8d9ef4445a | ||
|
|
913d65106a | ||
|
|
851f01ddd2 | ||
|
|
34ca4e5623 | ||
|
|
ba9668623a | ||
|
|
73a37a281d | ||
|
|
46eb1727ad | ||
|
|
93341d6108 | ||
|
|
9662719660 | ||
|
|
a2e947ff70 | ||
|
|
55259b4445 | ||
|
|
d0a38816b5 | ||
|
|
523734a6af | ||
|
|
7a052e6f0c | ||
|
|
003e0e7a91 | ||
|
|
bc7640e4ac | ||
|
|
186d61aac4 | ||
|
|
159fdfe7ad | ||
|
|
26d393e5c2 | ||
|
|
a298f797b5 | ||
|
|
4236654b0c | ||
|
|
328b70ceb8 | ||
|
|
dba83674a2 | ||
|
|
25e503219f | ||
|
|
44af8ac629 | ||
|
|
6b6dcc46ca | ||
|
|
4aa841f44b | ||
|
|
28c1718965 | ||
|
|
557780bf08 | ||
|
|
d0cb11acb3 | ||
|
|
2306c34689 | ||
|
|
1e03654c0b | ||
|
|
7960525c20 | ||
|
|
f00dc426e0 | ||
|
|
428eca1c35 | ||
|
|
f1219ea001 | ||
|
|
0d08509e6d | ||
|
|
062ff7153d | ||
|
|
439beeab3b | ||
|
|
0b1d8ccd92 | ||
|
|
e6777577b6 | ||
|
|
095a93d895 | ||
|
|
c265ed2722 | ||
|
|
0f4523fabf | ||
|
|
80a944dd31 | ||
|
|
2889df8013 | ||
|
|
11b2bc33fe | ||
|
|
1beaccb9c9 | ||
|
|
9db91b4344 | ||
|
|
892ca98709 | ||
|
|
7268dd473d | ||
|
|
d6a6087ff9 | ||
|
|
c868f7e91f | ||
|
|
bab4cbbcf7 | ||
|
|
2ea1aee8ba | ||
|
|
4c7c1aa8b3 | ||
|
|
d31afdc749 | ||
|
|
44be46a436 | ||
|
|
55662efd4c | ||
|
|
e4fa0e8a39 | ||
|
|
e23be23a83 | ||
|
|
ef4388f414 | ||
|
|
1f083b57de | ||
|
|
b7d4f0b5ff | ||
|
|
9c2cf6b7ca | ||
|
|
0b50794614 | ||
|
|
fad12e3a13 | ||
|
|
777ef3fe90 | ||
|
|
cec9507504 | ||
|
|
6b7126ab6b | ||
|
|
bb6fab08a3 | ||
|
|
ee762d4bbd | ||
|
|
d271757eba | ||
|
|
3fb6b5594d | ||
|
|
88ffb32d4f | ||
|
|
9b7eb52772 | ||
|
|
1af9c43b5b | ||
|
|
b46c6e4aad | ||
|
|
52aa38e742 | ||
|
|
476dae5418 | ||
|
|
bc4b622e6b | ||
|
|
458779d897 | ||
|
|
b593adcf84 | ||
|
|
3b83d6639a | ||
|
|
f455f1b257 | ||
|
|
f5ccb51ef2 | ||
|
|
120b5297e3 | ||
|
|
5c108bfa1b | ||
|
|
710aaaa2c8 | ||
|
|
1925a439d7 | ||
|
|
50d6981695 | ||
|
|
c63c523089 | ||
|
|
e6631d1516 | ||
|
|
87b58e6bc2 | ||
|
|
84f9c49b6f | ||
|
|
2fe5c2ac83 | ||
|
|
21e3078853 | ||
|
|
64415872a0 | ||
|
|
0ee3b0e59b | ||
|
|
cfe1f1e8e4 | ||
|
|
14b0f9aa24 | ||
|
|
4dd265633e | ||
|
|
4c28e04e5e | ||
|
|
0b40331107 | ||
|
|
a08a158672 | ||
|
|
540bda1d09 | ||
|
|
97609746c0 | ||
|
|
68b924efe5 | ||
|
|
37487b2427 | ||
|
|
6a4b4edea3 | ||
|
|
ebbd42322d | ||
|
|
c07a4ff93d | ||
|
|
8091b84344 | ||
|
|
cba8914aae | ||
|
|
663257a690 | ||
|
|
6a9bfa74c4 | ||
|
|
4bbd486389 | ||
|
|
27d5eb4ef9 | ||
|
|
f1a9c4ef0e | ||
|
|
a2f4134ee0 | ||
|
|
2c7abc8bca | ||
|
|
f7312691e1 | ||
|
|
9cbe2faed6 | ||
|
|
dafd6acf1a | ||
|
|
2a9c97f96d | ||
|
|
573d35feff | ||
|
|
ab87f81ad8 | ||
|
|
19f96b1f6f | ||
|
|
05b4b875e2 | ||
|
|
ece93c45c9 | ||
|
|
ca72722b39 | ||
|
|
1b48b99ba7 | ||
|
|
52c0a37174 | ||
|
|
ff385ddf17 | ||
|
|
80e692656c | ||
|
|
6374be15fd | ||
|
|
f9185b02b5 | ||
|
|
418acbf218 | ||
|
|
9765623d03 | ||
|
|
af56a75dce | ||
|
|
325e7f3cce | ||
|
|
24465bee79 | ||
|
|
8576e8d51a | ||
|
|
8b7639fd3a | ||
|
|
713e368557 | ||
|
|
47c976ee91 | ||
|
|
645d2d852f | ||
|
|
7a958a1af1 | ||
|
|
1bb6c17fe2 | ||
|
|
4657f29df9 | ||
|
|
68d240b77b | ||
|
|
cd3ba08c7d | ||
|
|
bdcd3f9dc2 | ||
|
|
69122a5454 | ||
|
|
e8e06f2b71 | ||
|
|
684c637ac0 | ||
|
|
03832c9065 | ||
|
|
29451cf88c | ||
|
|
f347caf825 | ||
|
|
436df168f0 | ||
|
|
7ccedfb1df | ||
|
|
bd95197709 | ||
|
|
5897aebdf9 | ||
|
|
dd62e8ce92 | ||
|
|
eab3cb8efd | ||
|
|
57c9224b5c | ||
|
|
8c2b9905d1 | ||
|
|
bd5846d143 | ||
|
|
39994186df | ||
|
|
549737405b | ||
|
|
a5ade10bff | ||
|
|
28a26e6e81 | ||
|
|
7dc7f7815a | ||
|
|
e7ce2fcc8d | ||
|
|
25c457f578 | ||
|
|
7c7c7db345 | ||
|
|
acbf9f517b | ||
|
|
9cf683ea75 | ||
|
|
5cc0552b05 | ||
|
|
f44adb98cb | ||
|
|
f90771ee1a | ||
|
|
e7eeb86709 | ||
|
|
93927717c8 | ||
|
|
761e42c5d6 | ||
|
|
1ce3c7937b | ||
|
|
d3df5de0ce | ||
|
|
ca094f0e86 | ||
|
|
19d838ff49 | ||
|
|
6cd77dc6b8 | ||
|
|
769ee8ac54 | ||
|
|
28fa5077d5 | ||
|
|
f0cf325831 | ||
|
|
b8651bfd72 | ||
|
|
7e754e20ae | ||
|
|
2dda863baf | ||
|
|
10a3959f52 | ||
|
|
309577f3ef | ||
|
|
39756e6dc4 | ||
|
|
66d78bca8a | ||
|
|
bd42dfe474 | ||
|
|
50e3a9dee6 | ||
|
|
bd088d31ca | ||
|
|
d308946360 | ||
|
|
8a282030a1 | ||
|
|
de2a77adec | ||
|
|
87eab79f70 | ||
|
|
9827a4b758 | ||
|
|
18fa0d5057 | ||
|
|
c2b4756a4f | ||
|
|
8e22bc2ce5 | ||
|
|
2882ac4da8 | ||
|
|
b0d7a16f49 | ||
|
|
ae5b309de0 | ||
|
|
96156f148e | ||
|
|
5009f283d5 | ||
|
|
c695234ed1 | ||
|
|
d9e50ab6d5 | ||
|
|
644dc9f4a2 | ||
|
|
f1c9d5a8f4 | ||
|
|
9f3a0c0716 | ||
|
|
87472484d6 | ||
|
|
c7f498fd01 | ||
|
|
f44dcd7c02 | ||
|
|
e00c65072b | ||
|
|
6dbbd7d605 | ||
|
|
4213960ec3 | ||
|
|
054710e1a2 | ||
|
|
a9c9ecb5ea | ||
|
|
a2ca0e6012 | ||
|
|
6068eafeb6 | ||
|
|
f2ddf2af95 | ||
|
|
3118092e51 | ||
|
|
434d115fb3 | ||
|
|
d38264660e | ||
|
|
4cc58a221b | ||
|
|
e5043093eb | ||
|
|
b39db745d4 | ||
|
|
0e04b8e4d4 | ||
|
|
6f26f88bbd | ||
|
|
2f14dc7e5d | ||
|
|
7296bfd5ee | ||
|
|
bf7c96defb | ||
|
|
d503091b30 | ||
|
|
25612f8809 | ||
|
|
3e50a5da70 | ||
|
|
b63d4da048 | ||
|
|
b8e628958e | ||
|
|
d3ca35ac42 | ||
|
|
22359b427f | ||
|
|
a1bd84af5e | ||
|
|
bb0e968704 | ||
|
|
66865c8b63 | ||
|
|
7c920c305f | ||
|
|
aa8bb1f45d | ||
|
|
0cd34c1498 | ||
|
|
399e0e5e24 | ||
|
|
2fb9b6cf25 | ||
|
|
c6ae6b84d5 | ||
|
|
2620da1f4e | ||
|
|
11b06a2e5e | ||
|
|
f4770d065e | ||
|
|
42ee804464 | ||
|
|
c5c926f29c | ||
|
|
98c7f1181e | ||
|
|
1098f7d9f4 | ||
|
|
493094dc18 | ||
|
|
4067cf639b | ||
|
|
ccbf7af7f2 | ||
|
|
19ebd0aa68 | ||
|
|
8e1e60c187 | ||
|
|
133e9fed4a | ||
|
|
fba8a48d91 | ||
|
|
75699f82e9 | ||
|
|
7a2a4cc4a9 | ||
|
|
e131c0e5b9 | ||
|
|
05ce5e6e2f | ||
|
|
878e7ef49f | ||
|
|
1a9379b41d | ||
|
|
0da416b141 | ||
|
|
a825c84521 | ||
|
|
a8841e2e2b | ||
|
|
21ae187d02 | ||
|
|
6915c21092 | ||
|
|
37bbb0758c | ||
|
|
3816791aa4 | ||
|
|
534fefd82f | ||
|
|
c57c17546e | ||
|
|
33c0f8c721 | ||
|
|
dfaca1c42b | ||
|
|
6abffcb1df | ||
|
|
0489d945fb | ||
|
|
023431165a | ||
|
|
830012e2f4 | ||
|
|
0388568ea0 | ||
|
|
7965f9df6c | ||
|
|
81ef293361 | ||
|
|
b565fd2ec1 | ||
|
|
2f3124edf4 | ||
|
|
8cb55107d0 | ||
|
|
56e9d7b8e2 | ||
|
|
9524a5c860 | ||
|
|
42bbd7d47a | ||
|
|
b833451257 | ||
|
|
ea61aa1e1c | ||
|
|
8f4bb3f19c | ||
|
|
4704800ce3 | ||
|
|
2f915c2182 | ||
|
|
77f07ca0e3 | ||
|
|
41940687f1 | ||
|
|
bfea00f6dc | ||
|
|
44702c5cfd | ||
|
|
37c2d01260 | ||
|
|
ea91fabba0 | ||
|
|
a8d882dcc5 | ||
|
|
35b44af030 | ||
|
|
4127aad3d4 | ||
|
|
67d1a86d81 | ||
|
|
df8ab0d9a9 | ||
|
|
a93cdf8c86 | ||
|
|
ecf92e8fe4 | ||
|
|
9329092c32 | ||
|
|
94d4a7e3f0 | ||
|
|
48e583026f | ||
|
|
ce6aea7437 | ||
|
|
986f2c0941 | ||
|
|
1238c788e1 | ||
|
|
64b6b18a81 | ||
|
|
03932c7bdb | ||
|
|
8a4d915c8d | ||
|
|
86a6e5ee63 | ||
|
|
757a91a7d2 | ||
|
|
a09bd5a90d | ||
|
|
c70362cb57 | ||
|
|
768c7ba3dc | ||
|
|
e053e5a84e | ||
|
|
2857bfd8a0 | ||
|
|
4877dc4612 | ||
|
|
10a0c0164b | ||
|
|
a9f4b4883a | ||
|
|
f314367bc8 | ||
|
|
c866b9d768 | ||
|
|
6f96df6bbb | ||
|
|
7318cbae9b | ||
|
|
a36a141141 | ||
|
|
e2a0fd7b0b | ||
|
|
1cdab96e02 | ||
|
|
a3c8bd6b6f | ||
|
|
faa45cc606 | ||
|
|
ddc2af0691 | ||
|
|
381a27957a | ||
|
|
41799521c6 | ||
|
|
1617700ee0 | ||
|
|
839f3a4d2c | ||
|
|
2b99ddda02 | ||
|
|
c3a7adcb0d | ||
|
|
9efdd01e2b | ||
|
|
5cbdadc3e8 | ||
|
|
c5d4ea01f6 | ||
|
|
12537c2739 | ||
|
|
15f57eb911 | ||
|
|
3530b643b6 | ||
|
|
ddb588f7a6 | ||
|
|
68391301b0 | ||
|
|
276bed2d0b | ||
|
|
2027047220 | ||
|
|
00a49482f2 | ||
|
|
e60fcf93ba | ||
|
|
35110ef738 | ||
|
|
383d6d6071 | ||
|
|
2419e5594b | ||
|
|
5f416cce74 | ||
|
|
ceafa3803a | ||
|
|
39a4dee84f | ||
|
|
ec4877f10b | ||
|
|
89db868d02 | ||
|
|
e0a10fb9d6 | ||
|
|
1c05c60bd4 | ||
|
|
5501d42de0 | ||
|
|
f8c6690d6d | ||
|
|
33c470ebc5 | ||
|
|
c1ffa6e5d9 | ||
|
|
853d38b757 | ||
|
|
f1af6b9bf2 | ||
|
|
69710366b6 | ||
|
|
de58d6764f | ||
|
|
7192d5c4bb | ||
|
|
de3b9fc70d | ||
|
|
2385c28046 | ||
|
|
d3507eb42c | ||
|
|
78ab13a44e | ||
|
|
2daaef2a99 | ||
|
|
3adc332ea7 | ||
|
|
6941df940f | ||
|
|
20e82eba42 | ||
|
|
5754952660 | ||
|
|
7a1f674abc | ||
|
|
3874476d84 | ||
|
|
69c12aa9e3 | ||
|
|
e679997ecc | ||
|
|
5d3bc95283 | ||
|
|
ff96a750e1 | ||
|
|
067beb90c9 | ||
|
|
ca62c8931f | ||
|
|
100413fdff | ||
|
|
0362d42007 | ||
|
|
9b15beead7 | ||
|
|
beae7ffa20 | ||
|
|
663d95c388 | ||
|
|
9d91875832 | ||
|
|
748c8543b9 | ||
|
|
327be00e8f | ||
|
|
2f6e16f7ba | ||
|
|
68cccb77a0 | ||
|
|
a3b29a196c | ||
|
|
d7fd3a467a | ||
|
|
a144738094 | ||
|
|
e2d943fd1a | ||
|
|
ba8326285e | ||
|
|
d54eb93f26 | ||
|
|
351b88b6ca | ||
|
|
41b1dc2ddd | ||
|
|
4043ec3bbd | ||
|
|
2b1df7ba63 | ||
|
|
8d6de99a12 | ||
|
|
a5321179ac | ||
|
|
ffcab7e33b | ||
|
|
291d5fb0b5 | ||
|
|
c0c6170ba6 | ||
|
|
1cbe3c3fe5 | ||
|
|
0be16239a0 | ||
|
|
d0c9d8ce4e | ||
|
|
c2d59e4815 | ||
|
|
5327a4c622 | ||
|
|
d01d8b86b2 | ||
|
|
48d37c8522 | ||
|
|
495d6e9887 | ||
|
|
c904ed62fe | ||
|
|
3b2149f252 | ||
|
|
fd08314dcb | ||
|
|
d5c1f34016 | ||
|
|
ce3c969c08 | ||
|
|
34a375542e | ||
|
|
d97cf263c1 | ||
|
|
853b9d1001 | ||
|
|
90bff467a4 | ||
|
|
11e924b344 | ||
|
|
e6d4de301b | ||
|
|
baa7478ff0 | ||
|
|
db3ee977fa | ||
|
|
a64bd7e829 | ||
|
|
5cc5d4deff | ||
|
|
5be6d5bf26 | ||
|
|
e1ee95234e | ||
|
|
ddbb845e36 | ||
|
|
78b132580b | ||
|
|
5be3105d60 | ||
|
|
d1503e974b | ||
|
|
74e9834c42 | ||
|
|
bd7cd9281b | ||
|
|
50ff18ba25 | ||
|
|
b4e78b6131 | ||
|
|
be4ddab218 | ||
|
|
6e12fee5ec | ||
|
|
097c6463ad | ||
|
|
dbc95b39b7 | ||
|
|
3aa9d2e64c | ||
|
|
29fcf336c1 | ||
|
|
1270a9365b | ||
|
|
ba1d196a42 | ||
|
|
9ac29a95ce | ||
|
|
ba88d1822b | ||
|
|
991e1916bf | ||
|
|
51c3d77a92 | ||
|
|
0cbe083dcf | ||
|
|
105cead4da | ||
|
|
4e6722a171 | ||
|
|
05b4bff388 | ||
|
|
dd7ebbc461 | ||
|
|
813a66cafe | ||
|
|
e7e47c8906 | ||
|
|
2d0cac7c4d | ||
|
|
d3826c5c50 | ||
|
|
f107372464 | ||
|
|
c3fec00562 | ||
|
|
5d79d7a14a | ||
|
|
9503a5c07b | ||
|
|
c2e8200c81 | ||
|
|
d2be1e83ce | ||
|
|
a9dcf6650b | ||
|
|
16ef7e1950 | ||
|
|
d2378637fa | ||
|
|
6e4d886ea4 | ||
|
|
9007965a8e | ||
|
|
4fe8cde660 | ||
|
|
03903c59b6 | ||
|
|
031b5f3269 | ||
|
|
223a198bc4 | ||
|
|
c321819547 | ||
|
|
f3f10df361 | ||
|
|
022de2aa3e | ||
|
|
4d41e31639 | ||
|
|
f14a47caa0 | ||
|
|
1df3432130 | ||
|
|
59fcf4b8ca | ||
|
|
ee4364996d | ||
|
|
f6b60bb200 | ||
|
|
7d7b917523 | ||
|
|
45813bea16 | ||
|
|
b5903ab7f4 | ||
|
|
3d03855cd9 | ||
|
|
49aa43f8e4 | ||
|
|
b281b563e0 | ||
|
|
e5efbcf42f | ||
|
|
a589bc4562 | ||
|
|
22afd8b044 | ||
|
|
50852e406a | ||
|
|
7a5899a968 | ||
|
|
9b84670cf0 | ||
|
|
d243b587f4 | ||
|
|
0b68ad9b10 | ||
|
|
ff51fe3050 | ||
|
|
0daa203fe0 | ||
|
|
ca9a1a0ca1 | ||
|
|
f8c2b466a8 | ||
|
|
eb6a27653f | ||
|
|
c352ea7596 | ||
|
|
a1083d45c5 | ||
|
|
708efbd165 | ||
|
|
66a3680134 | ||
|
|
caf15abc4f | ||
|
|
54d6c4ebfd | ||
|
|
05eac58c65 | ||
|
|
d39c70d9f2 | ||
|
|
dc65f459ec | ||
|
|
b0a1988c29 | ||
|
|
bcd8e13c24 | ||
|
|
fc73bdcc18 | ||
|
|
5ba76f28ce | ||
|
|
90b5d98e5c | ||
|
|
50782b9465 | ||
|
|
a7a607e440 | ||
|
|
da70c11da5 | ||
|
|
2e719e5bc6 | ||
|
|
1f2a7801fd | ||
|
|
0121e5c22b | ||
|
|
08574428f1 | ||
|
|
c4a1dfc4e8 | ||
|
|
438d41c986 | ||
|
|
d2595944fc | ||
|
|
4d83e696f7 | ||
|
|
f1c7286448 | ||
|
|
6bd5679429 | ||
|
|
043523b6d2 | ||
|
|
4e077e2b95 | ||
|
|
856d1a8047 | ||
|
|
74a1dbfe6c | ||
|
|
9254bcaf16 | ||
|
|
1944c7fbd7 | ||
|
|
3957797815 | ||
|
|
66a8170bbc | ||
|
|
f495615435 | ||
|
|
ba1ed5b6fd | ||
|
|
cebc9326cd | ||
|
|
b5c7753291 | ||
|
|
d64e804bf6 | ||
|
|
da18033a46 | ||
|
|
d0c8366bb4 | ||
|
|
bd0d911376 | ||
|
|
7ac2376a01 | ||
|
|
1edc688acb | ||
|
|
d48c1bd71d | ||
|
|
67c25570f3 | ||
|
|
116a47fc6a | ||
|
|
6915e95f58 | ||
|
|
07d9d4bca6 | ||
|
|
cfc83f5a23 | ||
|
|
dfc4070dba | ||
|
|
ad4b1650e3 | ||
|
|
1df47a2ddd | ||
|
|
1b6e4af9a7 | ||
|
|
2908a0c2bd | ||
|
|
2bccb5e753 | ||
|
|
ee06dab4d6 | ||
|
|
d615e2e9ff | ||
|
|
01c3f62ed7 | ||
|
|
4bc26310ac | ||
|
|
b5a34e45d2 |
16
.github/BOTMETA.yml
vendored
Normal file
16
.github/BOTMETA.yml
vendored
Normal file
@@ -0,0 +1,16 @@
|
||||
files:
|
||||
awx/ui/:
|
||||
labels: component:ui
|
||||
maintainers: $team_ui
|
||||
awx/api/:
|
||||
labels: component:api
|
||||
maintainers: $team_api
|
||||
awx/main/:
|
||||
labels: component:api
|
||||
maintainers: $team_api
|
||||
installer/:
|
||||
labels: component:installer
|
||||
|
||||
macros:
|
||||
team_api: wwitzel3 matburt chrismeyersfsu cchurch AlanCoding ryanpetrello jangstur
|
||||
team_ui: jlmitch5 jaredevantabor mabashian gconsidine marshmalien benthomasson
|
||||
27
ISSUE_TEMPLATE.md → .github/ISSUE_TEMPLATE.md
vendored
27
ISSUE_TEMPLATE.md → .github/ISSUE_TEMPLATE.md
vendored
@@ -1,31 +1,40 @@
|
||||
### Summary
|
||||
##### ISSUE TYPE
|
||||
<!--- Pick one below and delete the rest: -->
|
||||
- Bug Report
|
||||
- Feature Idea
|
||||
- Documentation
|
||||
|
||||
##### COMPONENT NAME
|
||||
<!-- Pick the area of AWX for this issue, you can have multiple, delete the rest: -->
|
||||
- API
|
||||
- UI
|
||||
- Installer
|
||||
|
||||
##### SUMMARY
|
||||
<!-- Briefly describe the problem. -->
|
||||
|
||||
### Environment
|
||||
|
||||
<!--
|
||||
##### ENVIRONMENT
|
||||
* AWX version: X.Y.Z
|
||||
* AWX install method: openshift, minishift, docker on linux, docker for mac, boot2docker
|
||||
* Ansible version: X.Y.Z
|
||||
* Operating System:
|
||||
* Web Browser:
|
||||
-->
|
||||
|
||||
### Steps To Reproduce:
|
||||
##### STEPS TO REPRODUCE
|
||||
|
||||
<!-- For bugs, please show exactly how to reproduce the problem. For new
|
||||
features, show how the feature would be used. -->
|
||||
|
||||
### Expected Results:
|
||||
##### EXPECTED RESULTS
|
||||
|
||||
<!-- For bug reports, what did you expect to happen when running the steps
|
||||
above? -->
|
||||
|
||||
### Actual Results:
|
||||
##### ACTUAL RESULTS
|
||||
|
||||
<!-- For bug reports, what actually happened? -->
|
||||
|
||||
### Additional Information:
|
||||
##### ADDITIONAL INFORMATION
|
||||
|
||||
<!-- Include any links to sosreport, database dumps, screenshots or other
|
||||
information. -->
|
||||
9
.github/LABEL_MAP.md
vendored
Normal file
9
.github/LABEL_MAP.md
vendored
Normal file
@@ -0,0 +1,9 @@
|
||||
Bug Report: type:bug
|
||||
Bugfix Pull Request: type:bug
|
||||
Feature Request: type:enhancement
|
||||
Feature Pull Request: type:enhancement
|
||||
UI: component:ui
|
||||
API: component:api
|
||||
Installer: component:installer
|
||||
Docs Pull Request: component:docs
|
||||
Documentation: component:docs
|
||||
39
.github/PULL_REQUEST_TEMPLATE.md
vendored
Normal file
39
.github/PULL_REQUEST_TEMPLATE.md
vendored
Normal file
@@ -0,0 +1,39 @@
|
||||
##### SUMMARY
|
||||
<!--- Describe the change, including rationale and design decisions -->
|
||||
|
||||
<!---
|
||||
If you are fixing an existing issue, please include "related #nnn" in your
|
||||
commit message and your description; but you should still explain what
|
||||
the change does.
|
||||
-->
|
||||
|
||||
##### ISSUE TYPE
|
||||
<!--- Pick one below and delete the rest: -->
|
||||
- Feature Pull Request
|
||||
- Bugfix Pull Request
|
||||
- Docs Pull Request
|
||||
|
||||
##### COMPONENT NAME
|
||||
<!--- Name of the module/plugin/module/task -->
|
||||
- API
|
||||
- UI
|
||||
- Installer
|
||||
|
||||
##### AWX VERSION
|
||||
<!--- Paste verbatim output from `make VERSION` between quotes below -->
|
||||
```
|
||||
|
||||
```
|
||||
|
||||
|
||||
##### ADDITIONAL INFORMATION
|
||||
<!---
|
||||
Include additional information to help people understand the change here.
|
||||
For bugs that don't have a linked bug report, a step-by-step reproduction
|
||||
of the problem is helpful.
|
||||
-->
|
||||
|
||||
<!--- Paste verbatim command output below, e.g. before and after your change -->
|
||||
```
|
||||
|
||||
```
|
||||
7
.gitignore
vendored
7
.gitignore
vendored
@@ -20,6 +20,8 @@ celerybeat-schedule
|
||||
awx/ui/static
|
||||
awx/ui/build_test
|
||||
awx/ui/client/languages
|
||||
awx/ui/templates/ui/index.html
|
||||
awx/ui/templates/ui/installing.html
|
||||
|
||||
# Tower setup playbook testing
|
||||
setup/test/roles/postgresql
|
||||
@@ -47,7 +49,7 @@ __pycache__
|
||||
/.istanbul.yml
|
||||
**/node_modules/**
|
||||
/tmp
|
||||
npm-debug.log
|
||||
**/npm-debug.log*
|
||||
|
||||
# UI build flag files
|
||||
awx/ui/.deps_built
|
||||
@@ -108,9 +110,12 @@ local/
|
||||
requirements/vendor
|
||||
.i18n_built
|
||||
VERSION
|
||||
.idea/*
|
||||
|
||||
# AWX python libs populated by requirements.txt
|
||||
awx/lib/.deps_built
|
||||
awx/lib/site-packages
|
||||
venv/*
|
||||
use_dev_supervisor.txt
|
||||
|
||||
.idea/*
|
||||
|
||||
421
CONTRIBUTING.md
421
CONTRIBUTING.md
@@ -1,233 +1,301 @@
|
||||
|
||||
AWX
|
||||
===========
|
||||
# AWX
|
||||
|
||||
Hi there! We're excited to have you as a contributor.
|
||||
|
||||
Have questions about this document or anything not covered here? Come chat with us on IRC (#ansible-awx on freenode) or the mailing list.
|
||||
Have questions about this document or anything not covered here? Come chat with us at `#ansible-awx` on irc.freenode.net, or submit your question to the [mailing list](https://groups.google.com/forum/#!forum/awx-project) .
|
||||
|
||||
Table of contents
|
||||
-----------------
|
||||
## Table of contents
|
||||
|
||||
* [Contributing Agreement](#dco)
|
||||
* [Code of Conduct](#code-of-conduct)
|
||||
* [Setting up the development environment](#setting-up-the-development-environment)
|
||||
* [Things to know prior to submitting code](#things-to-know-before-contributing-code)
|
||||
* [Setting up your development environment](#setting-up-your-development-environment)
|
||||
* [Prerequisites](#prerequisites)
|
||||
* [Local Settings](#local-settings)
|
||||
* [Building the base image](#building-the-base-image)
|
||||
* [Building the user interface](#building-the-user-interface)
|
||||
* [Starting up the development environment](#starting-up-the-development-environment)
|
||||
* [Starting the development environment at the container shell](#starting-the-container-environment-at-the-container-shell)
|
||||
* [Using the development environment](#using-the-development-environment)
|
||||
* [Docker](#docker)
|
||||
* [Docker compose](#docker-compose)
|
||||
* [Node and npm](#node-and-npm)
|
||||
* [Building the environment](#building-the-environment)
|
||||
* [Clone the AWX repo](#clone-the-awx-repo)
|
||||
* [Create local settings](#create-local-settings)
|
||||
* [Build the base image](#build-the-base-image)
|
||||
* [Build the user interface](#build-the-user-interface)
|
||||
# [Running the environment](#running-the-environment)
|
||||
* [Start the containers](#start-the-containers)
|
||||
* [Start from the container shell](#start-from-the-container-shell)
|
||||
* [Post Build Steps](#post-build-steps)
|
||||
* [Start a shell](#start-the-shell)
|
||||
* [Create a superuser](#create-a-superuser)
|
||||
* [Load the data](#load-the-data)
|
||||
* [Accessing the AWX web interface](#accessing-the-awx-web-interface)
|
||||
* [Purging containers and images](#purging-containers-and-images)
|
||||
* [What should I work on?](#what-should-i-work-on)
|
||||
* [Submitting Pull Requests](#submitting-pull-requests)
|
||||
* [Reporting Issues](#reporting-issues)
|
||||
* [How issues are resolved](#how-issues-are-resolved)
|
||||
* [Ansible Issue Bot](#ansible-issue-bot)
|
||||
|
||||
DCO
|
||||
===
|
||||
## Things to know prior to submitting code
|
||||
|
||||
All contributors must use "git commit --signoff" for any
|
||||
commit to be merged, and agree that usage of --signoff constitutes
|
||||
agreement with the terms of DCO 1.1. Any contribution that does not
|
||||
have such a signoff will not be merged.
|
||||
- All code submissions are done through pull requests against the `devel` branch.
|
||||
- You must use `git commit --signoff` for any commit to be merged, and agree that usage of --signoff constitutes agreement with the terms of [DCO 1.1](./DCO_1_1.md).
|
||||
- Take care to make sure no merge commits are in the submission, and use `git rebase` vs `git merge` for this reason.
|
||||
- If submitting a large code change, it's a good idea to join the `#ansible-awx` channel on irc.freenode.net, and talk about what you would like to do or add first. This not only helps everyone know what's going on, it also helps save time and effort, if the community decides some changes are needed.
|
||||
- We ask all of our community members and contributors to adhere to the [Ansible code of conduct](http://docs.ansible.com/ansible/latest/community/code_of_conduct.html). If you have questions, or need assistance, please reach out to our community team at [codeofconduct@ansible.com](mailto:codeofconduct@ansible.com)
|
||||
|
||||
```
|
||||
Developer Certificate of Origin
|
||||
Version 1.1
|
||||
## Setting up your development environment
|
||||
|
||||
Copyright (C) 2004, 2006 The Linux Foundation and its contributors.
|
||||
1 Letterman Drive
|
||||
Suite D4700
|
||||
San Francisco, CA, 94129
|
||||
The AWX development environment workflow and toolchain is based on Docker, and the docker-compose tool, to provide dependencies, services, and databases necessary to run all of the components. It also binds the local source tree into the development container, making it possible to observe and test changes in real time.
|
||||
|
||||
Everyone is permitted to copy and distribute verbatim copies of this
|
||||
license document, but changing it is not allowed.
|
||||
### Prerequisites
|
||||
|
||||
Developer's Certificate of Origin 1.1
|
||||
#### Docker
|
||||
|
||||
By making a contribution to this project, I certify that:
|
||||
Prior to starting the development services, you'll need `docker` and `docker-compose`. On Linux, you can generally find these in your distro's packaging, but you may find that Docker themselves maintain a separate repo that tracks more closely to the latest releases.
|
||||
|
||||
(a) The contribution was created in whole or in part by me and I
|
||||
have the right to submit it under the open source license
|
||||
indicated in the file; or
|
||||
For macOS and Windows, we recommend [Docker for Mac](https://www.docker.com/docker-mac) and [Docker for Windows](https://www.docker.com/docker-windows)
|
||||
respectively.
|
||||
|
||||
(b) The contribution is based upon previous work that, to the best
|
||||
of my knowledge, is covered under an appropriate open source
|
||||
license and I have the right under that license to submit that
|
||||
work with modifications, whether created in whole or in part
|
||||
by me, under the same open source license (unless I am
|
||||
permitted to submit under a different license), as indicated
|
||||
in the file; or
|
||||
For Linux platforms, refer to the following from Docker:
|
||||
|
||||
(c) The contribution was provided directly to me by some other
|
||||
person who certified (a), (b) or (c) and I have not modified
|
||||
it.
|
||||
**Fedora**
|
||||
|
||||
(d) I understand and agree that this project and the contribution
|
||||
are public and that a record of the contribution (including all
|
||||
personal information I submit with it, including my sign-off) is
|
||||
maintained indefinitely and may be redistributed consistent with
|
||||
this project or the open source license(s) involved.
|
||||
> https://docs.docker.com/engine/installation/linux/docker-ce/fedora/
|
||||
|
||||
**Centos**
|
||||
|
||||
> https://docs.docker.com/engine/installation/linux/docker-ce/centos/
|
||||
|
||||
**Ubuntu**
|
||||
|
||||
> https://docs.docker.com/engine/installation/linux/docker-ce/ubuntu/
|
||||
|
||||
**Debian**
|
||||
|
||||
> https://docs.docker.com/engine/installation/linux/docker-ce/debian/
|
||||
|
||||
**Arch**
|
||||
|
||||
> https://wiki.archlinux.org/index.php/Docker
|
||||
|
||||
#### Docker compose
|
||||
|
||||
If you're not using Docker for Mac, or Docker for Windows, you may need, or choose to, install the Docker compose Python module separately, in which case you'll need to run the following:
|
||||
|
||||
```bash
|
||||
(host)$ pip install docker-compose
|
||||
```
|
||||
|
||||
Code of Conduct
|
||||
===============
|
||||
#### Node and npm
|
||||
|
||||
All contributors are expected to adhere to the Ansible Community Code of Conduct: http://docs.ansible.com/ansible/latest/community/code_of_conduct.html
|
||||
The AWX UI requires the following:
|
||||
|
||||
Setting up the development environment
|
||||
======================================
|
||||
- Node 6.x LTS version
|
||||
- NPM 3.x LTS
|
||||
|
||||
The AWX development environment workflow and toolchain is based on Docker and the docker-compose tool to contain
|
||||
the dependencies, services, and databases necessary to run everything. It will bind the local source tree into the container
|
||||
making it possible to observe changes while developing.
|
||||
### Build the environment
|
||||
|
||||
Prerequisites
|
||||
-------------
|
||||
`docker` and `docker-compose` are required for starting the development services, on Linux you can generally find these in your
|
||||
distro's packaging, but you may find that Docker themselves maintain a seperate repo that tracks more closely to the latest releases.
|
||||
For macOS and Windows, we recommend Docker for Mac (https://www.docker.com/docker-mac) and Docker for Windows (https://www.docker.com/docker-windows)
|
||||
respectively. Docker for Mac/Windows automatically comes with `docker-compose`.
|
||||
#### Fork and clone the AWX repo
|
||||
|
||||
> Fedora
|
||||
If you have not done so already, you'll need to fork the AWX repo on GitHub. For more on how to do this, see [Fork a Repo](https://help.github.com/articles/fork-a-repo/).
|
||||
|
||||
https://docs.docker.com/engine/installation/linux/docker-ce/fedora/
|
||||
#### Create local settings
|
||||
|
||||
> Centos
|
||||
AWX will import the file `awx/settings/local_settings.py` and combine it with defaults in `awx/settings/defaults.py`. This file is required for starting the development environment and startup will fail if it's not provided.
|
||||
|
||||
https://docs.docker.com/engine/installation/linux/docker-ce/centos/
|
||||
An example is provided. Make a copy of it, and edit as needed (the defaults are usually fine):
|
||||
|
||||
> Ubuntu
|
||||
```bash
|
||||
(host)$ cp awx/settings/local_settings.py.docker_compose awx/settings/local_settings.py
|
||||
```
|
||||
|
||||
https://docs.docker.com/engine/installation/linux/docker-ce/ubuntu/
|
||||
#### Build the base image
|
||||
|
||||
> Debian
|
||||
The AWX base container image (defined in `tools/docker-compose/Dockerfile`) contains basic OS dependencies and symbolic links into the development environment that make running the services easy.
|
||||
|
||||
https://docs.docker.com/engine/installation/linux/docker-ce/debian/
|
||||
Run the following to build the image:
|
||||
|
||||
> Arch
|
||||
```bash
|
||||
(host)$ make docker-compose-build
|
||||
```
|
||||
|
||||
https://wiki.archlinux.org/index.php/Docker
|
||||
**NOTE**
|
||||
|
||||
For `docker-compose` you may need/choose to install it seperately:
|
||||
> The image will need to be rebuilt, if the Python requirements or OS dependencies change.
|
||||
|
||||
pip install docker-compose
|
||||
Once the build completes, you will have a `ansible/awx_devel` image in your local image cache. Use the `docker images` command to view it, as follows:
|
||||
|
||||
```bash
|
||||
(host)$ docker images
|
||||
|
||||
Local Settings
|
||||
--------------
|
||||
REPOSITORY TAG IMAGE ID CREATED SIZE
|
||||
ansible/awx_devel latest ba9ec3e8df74 26 minutes ago 1.42GB
|
||||
```
|
||||
|
||||
In development mode (i.e. when running from a source checkout), AWX
|
||||
will import the file `awx/settings/local_settings.py` and combine it with defaults in `awx/settings/defaults.py`. This file
|
||||
is required for starting the development environment and startup will fail if it's not provided
|
||||
#### Build the user interface
|
||||
|
||||
An example file that works for the `docker-compose` tool is provided. Make a copy of it and edit as needed (the defaults are usually fine):
|
||||
Run the following to build the AWX UI:
|
||||
|
||||
(host)$ cp awx/settings/local_settings.py.docker_compose awx/settings/local_settings.py
|
||||
```bash
|
||||
(host) $ make ui-devel
|
||||
```
|
||||
### Running the environment
|
||||
|
||||
Building the base image
|
||||
-----------------------
|
||||
#### Start the containers
|
||||
|
||||
The AWX base container image (found in `tools/docker-compose/Dockerfile`) contains basic OS dependencies and
|
||||
symbolic links into the development environment that make running the services easy. You'll first need to build the image:
|
||||
Start the development containers by running the following:
|
||||
|
||||
(host)$ make docker-compose-build
|
||||
|
||||
The image will only need to be rebuilt if the requirements or OS dependencies change. A core concept about this image is that it relies
|
||||
on having your local development environment mapped in.
|
||||
|
||||
Building the user interface
|
||||
---------------------------
|
||||
|
||||
> AWX requires the 6.x LTS version of Node and 3.x LTS NPM
|
||||
|
||||
In order for the AWX user interface to load from the development environment it must be built:
|
||||
|
||||
(host)$ make ui-devel
|
||||
```bash
|
||||
(host)$ make docker-compose
|
||||
```
|
||||
|
||||
When developing features and fixes for the user interface you can find more detail here: [UI Developer README](awx/ui/README.md)
|
||||
The above utilizes the image built in the previous step, and will automatically start all required services and dependent containers. Once the containers launch, your session will be attached to the *awx* container, and you'll be able to watch log messages and events in real time. You will see messages from Django, celery, and the front end build process.
|
||||
|
||||
Starting up the development environment
|
||||
----------------------------------------------
|
||||
If you start a second terminal session, you can take a look at the running containers using the `docker ps` command. For example:
|
||||
|
||||
There are several ways of starting the development environment depending on your desired workflow. The easiest and most common way is with:
|
||||
```bash
|
||||
# List running containers
|
||||
(host)$ docker ps
|
||||
|
||||
(host)$ make docker-compose
|
||||
$ docker ps
|
||||
CONTAINER ID IMAGE COMMAND CREATED STATUS PORTS NAMES
|
||||
aa4a75d6d77b gcr.io/ansible-tower-engineering/awx_devel:devel "/tini -- /bin/sh ..." 23 seconds ago Up 15 seconds 0.0.0.0:5555->5555/tcp, 0.0.0.0:6899-6999->6899-6999/tcp, 0.0.0.0:8013->8013/tcp, 0.0.0.0:8043->8043/tcp, 22/tcp, 0.0.0.0:8080->8080/tcp tools_awx_1
|
||||
e4c0afeb548c postgres:9.6 "docker-entrypoint..." 26 seconds ago Up 23 seconds 5432/tcp tools_postgres_1
|
||||
0089699d5afd tools_logstash "/docker-entrypoin..." 26 seconds ago Up 25 seconds tools_logstash_1
|
||||
4d4ff0ced266 memcached:alpine "docker-entrypoint..." 26 seconds ago Up 25 seconds 0.0.0.0:11211->11211/tcp tools_memcached_1
|
||||
92842acd64cd rabbitmq:3-management "docker-entrypoint..." 26 seconds ago Up 24 seconds 4369/tcp, 5671-5672/tcp, 15671/tcp, 25672/tcp, 0.0.0.0:15672->15672/tcp tools_rabbitmq_1
|
||||
```
|
||||
**NOTE**
|
||||
|
||||
> The Makefile assumes that the image you built is tagged with your current branch. This allows you to build images for different contexts or branches. When starting the containers, you can choose a specific branch by setting `COMPOSE_TAG=<branch name>` in your environment.
|
||||
|
||||
> For example, you might be working in a feature branch, but you want to run the containers using the `devel` image you built previously. To do that, start the containers using the following command: `$ COMPOSE_TAG=devel make docker-compose`
|
||||
|
||||
##### Wait for migrations to complete
|
||||
|
||||
The first time you start the environment, database migrations need to run in order to build the PostgreSQL database. It will take few moments, but eventually you will see output in your terminal session that looks like the following:
|
||||
|
||||
```bash
|
||||
awx_1 | Operations to perform:
|
||||
awx_1 | Synchronize unmigrated apps: solo, api, staticfiles, debug_toolbar, messages, channels, django_extensions, ui, rest_framework, polymorphic
|
||||
awx_1 | Apply all migrations: sso, taggit, sessions, djcelery, sites, kombu_transport_django, social_auth, contenttypes, auth, conf, main
|
||||
awx_1 | Synchronizing apps without migrations:
|
||||
awx_1 | Creating tables...
|
||||
awx_1 | Running deferred SQL...
|
||||
awx_1 | Installing custom SQL...
|
||||
awx_1 | Running migrations:
|
||||
awx_1 | Rendering model states... DONE
|
||||
awx_1 | Applying contenttypes.0001_initial... OK
|
||||
awx_1 | Applying contenttypes.0002_remove_content_type_name... OK
|
||||
awx_1 | Applying auth.0001_initial... OK
|
||||
awx_1 | Applying auth.0002_alter_permission_name_max_length... OK
|
||||
awx_1 | Applying auth.0003_alter_user_email_max_length... OK
|
||||
awx_1 | Applying auth.0004_alter_user_username_opts... OK
|
||||
awx_1 | Applying auth.0005_alter_user_last_login_null... OK
|
||||
awx_1 | Applying auth.0006_require_contenttypes_0002... OK
|
||||
awx_1 | Applying taggit.0001_initial... OK
|
||||
awx_1 | Applying taggit.0002_auto_20150616_2121... OK
|
||||
awx_1 | Applying main.0001_initial... OK
|
||||
awx_1 | Applying main.0002_squashed_v300_release... OK
|
||||
awx_1 | Applying main.0003_squashed_v300_v303_updates... OK
|
||||
awx_1 | Applying main.0004_squashed_v310_release... OK
|
||||
awx_1 | Applying conf.0001_initial... OK
|
||||
awx_1 | Applying conf.0002_v310_copy_tower_settings... OK
|
||||
...
|
||||
```
|
||||
|
||||
Once migrations are completed, you can begin using AWX.
|
||||
|
||||
#### Start from the container shell
|
||||
|
||||
Often times you'll want to start the development environment without immediately starting all of the services in the *awx* container, and instead be taken directly to a shell. You can do this with the following:
|
||||
|
||||
```bash
|
||||
(host)$ make docker-compose-test
|
||||
```
|
||||
|
||||
Using `docker exec`, this will create a session in the running *awx* container, and place you at a command prompt, where you can run shell commands inside the container.
|
||||
|
||||
If you want to start and use the development environment, you'll first need to bootstrap it by running the following command:
|
||||
|
||||
```bash
|
||||
(container)# /bootstrap_development.sh
|
||||
```
|
||||
|
||||
The above will do all the setup tasks, including running database migrations, so it amy take a couple minutes.
|
||||
|
||||
This utilizes the image you built in the previous step and will automatically start all required services and dependent containers. You'll
|
||||
be able to watch log messages and events as they come through.
|
||||
Now you can start each service individually, or start all services in a pre-configured tmux session like so:
|
||||
|
||||
The Makefile assumes that the image you built is tagged with your current branch. This allows you to pre-build images for different contexts
|
||||
but you may want to use a particular branch's image (for instance if you are developing a PR from a branch based on the integration branch):
|
||||
```bash
|
||||
(container)# cd /awx_devel
|
||||
(container)# make server
|
||||
```
|
||||
|
||||
(host)$ COMPOSE_TAG=devel make docker-compose
|
||||
### Post Build Steps
|
||||
|
||||
Starting the development environment at the container shell
|
||||
-----------------------------------------------------------
|
||||
Before you can log in and use the system, you will need to create an admin user. Optionally, you may also want to load some demo data.
|
||||
|
||||
Often times you'll want to start the development environment without immediately starting all services and instead be taken directly to a shell:
|
||||
##### Start a shell
|
||||
|
||||
(host)$ make docker-compose-test
|
||||
To create the admin user, and load demo data, you first need to start a shell session on the *awx* container. In a new terminal session, use the `docker exec` command as follows to start the shell session:
|
||||
|
||||
From here you'll need to bootstrap the development environment before it will be usable for you. The `docker-compose` make target will
|
||||
automatically do this:
|
||||
```bash
|
||||
(host)$ docker exec -it tools_awx_1 bash
|
||||
```
|
||||
This creates a session in the *awx* containers, just as if you were using `ssh`, and allows you execute commands within the running container.
|
||||
|
||||
(container)$ /bootstrap_development.sh
|
||||
##### Create an admin user
|
||||
|
||||
Before you can log into AWX, you need to create an admin user. With this user you will be able to create more users, and begin configuring the server. From within the container shell, run the following command:
|
||||
|
||||
```bash
|
||||
(container)# awx-manage createsuperuser
|
||||
```
|
||||
You will be prompted for a username, an email address, and a password, and you will be asked to confirm the password. The email address is not important, so just enter something that looks like an email address. Remember the username and password, as you will use them to log into the web interface for the first time.
|
||||
|
||||
From here you can start each service individually, or choose to start all service in a pre-configured tmux session:
|
||||
##### Load demo data
|
||||
|
||||
You can optionally load some demo data. This will create a demo project, inventory, and job template. From within the container shell, run the following to load the data:
|
||||
|
||||
(container)# cd /awx_devel
|
||||
(container)# make server
|
||||
```bash
|
||||
(container)# awx-manage create_preload_data
|
||||
```
|
||||
|
||||
Using the development environment
|
||||
---------------------------------
|
||||
**NOTE**
|
||||
|
||||
With the development environment running there are a few optional steps to pre-populate the environment with data. If you are using the `docker-compose`
|
||||
method above you'll first need a shell in the container:
|
||||
> This information will persist in the database running in the `tools_postgres_1` container, until the container is removed. You may periodically need to recreate
|
||||
this container, and thus the database, if the database schema changes in an upstream commit.
|
||||
|
||||
(host)$ docker exec -it tools_awx_1 bash
|
||||
### Accessing the AWX web interface
|
||||
|
||||
Create a superuser account:
|
||||
You can now log into the AWX web interface at [https://localhost:8043](https://localhost:8043), and access the API directly at [https://localhost:8043/api/](https://localhost:8043/api/).
|
||||
|
||||
(container)# awx-manage createsuperuser
|
||||
|
||||
Preload AWX with demo data:
|
||||
To log in use the admin user and password you created above in [Create an admin user](#create-an-admin-user).
|
||||
|
||||
(container)# awx-manage create_preload_data
|
||||
|
||||
This information will persist in the database running in the `tools_postgres_1` container, until it is removed. You may periodically need to recreate
|
||||
this container and database if the database schema changes in an upstream commit.
|
||||
### Purging containers and images
|
||||
|
||||
You should now be able to visit and login to the AWX user interface at https://localhost:8043 or http://localhost:8013 if you have built the UI.
|
||||
If not you can visit the API directly in your browser at: https://localhost:8043/api/ or http://localhost:8013/api/
|
||||
When necessary, remove any AWX containers and images by running the following:
|
||||
|
||||
When working on the source code for AWX the code will auto-reload for you when changes are made, with the exception of any background tasks that run in
|
||||
celery.
|
||||
```bash
|
||||
(host)$ make docker-clean
|
||||
```
|
||||
|
||||
## What should I work on?
|
||||
|
||||
Occasionally it may be necessary to purge any containers and images that may have collected:
|
||||
For feature work, take a look at the current [Enhancements](https://github.com/ansible/awx/issues?q=is%3Aissue+is%3Aopen+label%3Atype%3Aenhancement).
|
||||
|
||||
(host)$ make docker-clean
|
||||
|
||||
There are host of other shortcuts, tools, and container configurations in the Makefile designed for various purposes. Feel free to explore.
|
||||
|
||||
What should I work on?
|
||||
======================
|
||||
If it has someone assigned to it then that person is the person responsible for working the enhancement. If you feel like you could contribute then reach out to that person.
|
||||
|
||||
We list our specs in `/docs`. `/docs/current` are things that we are actively working on. `/docs/future` are ideas for future work and the direction we
|
||||
want that work to take. Fixing bugs, translations, and updates to documentation are also appreciated.
|
||||
Fixing bugs, adding translations, and updating the documentation are always appreciated, so reviewing the backlog of issues is always a good place to start.
|
||||
|
||||
Be aware that if you are working in a part of the codebase that is going through active development your changes may be rejected or you may be asked to
|
||||
rebase them. A good idea before starting work is to have a discussion with us on IRC or the mailing list.
|
||||
**NOTE**
|
||||
|
||||
Submitting Pull Requests
|
||||
========================
|
||||
> If you work in a part of the codebase that is going through active development, your changes may be rejected, or you may be asked to `rebase`. A good idea before starting work is to have a discussion with us in the `#ansible-awx` channel on irc.freenode.net, or on the [mailing list](https://groups.google.com/forum/#!forum/awx-project).
|
||||
|
||||
Fixes and Features for AWX will go through the Github PR interface. There are a few things that can be done to help the visibility of your change
|
||||
and increase the likelihood that it will be accepted
|
||||
**NOTE**
|
||||
|
||||
> Add UI detail to these
|
||||
> If you're planning to develop features or fixes for the UI, please review the [UI Developer doc](./awx/ui/README.md).
|
||||
|
||||
## Submitting Pull Requests
|
||||
|
||||
Fixes and Features for AWX will go through the Github pull request process. Submit your pull request (PR) agains the `devel` branch.
|
||||
|
||||
Here are a few things you can do to help the visibility of your change, and increase the likelihood that it will be accepted:
|
||||
|
||||
* No issues when running linters/code checkers
|
||||
* Python: flake8: `(container)/awx_devel$ make flake8`
|
||||
@@ -237,44 +305,17 @@ and increase the likelihood that it will be accepted
|
||||
* JavaScript: Jasmine: `(container)/awx_devel$ make ui-test-ci`
|
||||
* Write tests for new functionality, update/add tests for bug fixes
|
||||
* Make the smallest change possible
|
||||
* Write good commit messages: https://chris.beams.io/posts/git-commit/
|
||||
* Write good commit messages. See [How to write a Git commit message](https://chris.beams.io/posts/git-commit/).
|
||||
|
||||
It's generally a good idea to discuss features with us first by engaging us in IRC or on the mailing list, especially if you are unsure if it's a good
|
||||
fit.
|
||||
It's generally a good idea to discuss features with us first by engaging us in the `#ansible-awx` channel on irc.freenode.net, or on the [mailing list](https://groups.google.com/forum/#!forum/awx-project).
|
||||
|
||||
We like to keep our commit history clean and will require resubmission of pull requests that contain merge commits. Use `git pull --rebase` rather than
|
||||
`git pull` and `git rebase` rather than `git merge`.
|
||||
We like to keep our commit history clean, and will require resubmission of pull requests that contain merge commits. Use `git pull --rebase`, rather than
|
||||
`git pull`, and `git rebase`, rather than `git merge`.
|
||||
|
||||
Sometimes it might take us a while to fully review your PR. We try to keep the `devel` branch in pretty good working order so we review requests carefuly.
|
||||
Please be patient.
|
||||
Sometimes it might take us a while to fully review your PR. We try to keep the `devel` branch in good working order, and so we review requests carefuly. Please be patient.
|
||||
|
||||
All submitted PRs will have the linter and unit tests run against them and the status reported in the PR.
|
||||
All submitted PRs will have the linter and unit tests run against them, and the status reported in the PR.
|
||||
|
||||
Reporting Issues
|
||||
================
|
||||
## Reporting Issues
|
||||
|
||||
Use the Github issue tracker for filing bugs. In order to save time and help us respond to issues quickly, make sure to fill out as much of the issue template
|
||||
as possible. Version information and an accurate reproducing scenario are critical to helping us identify the problem.
|
||||
|
||||
When reporting issues for the UI we also appreciate having screenshots and any error messages from the web browser's console. It's not unsual for browser extensions
|
||||
and plugins to cause problems. Reporting those will also help speed up analyzing and resolving UI bugs.
|
||||
|
||||
For the API and backend services, please capture all of the logs that you can from the time the problem was occuring.
|
||||
|
||||
Don't use the issue tracker to get help on how to do something - please use the mailing list and IRC for that.
|
||||
|
||||
How issues are resolved
|
||||
-----------------------
|
||||
|
||||
We triage our issues into high, medium, and low and will tag them with the relevant component (api, ui, installer, etc). We will typically focus on high priority
|
||||
issues. There aren't hard and fast rules for determining the severity of an issue, but generally high priority issues have an increased likelihood of breaking
|
||||
existing functionality and/or negatively impacting a large number of users.
|
||||
|
||||
If your issue isn't considered `high` priority then please be patient as it may take some time to get to your report.
|
||||
|
||||
Before opening a new issue, please use the issue search feature to see if it's already been reported. If you have any extra detail to provide then please comment.
|
||||
Rather than posting a "me too" comment you might consider giving it a "thumbs up" on github.
|
||||
|
||||
Ansible Issue Bot
|
||||
-----------------
|
||||
> Fill in
|
||||
We welcome your feedback, and encourage you to file an issue when you run into a problem. But before opening a new issues, we ask that you please view our [Issues guide](./ISSUES.md).
|
||||
|
||||
19
COPYING
19
COPYING
@@ -1,19 +0,0 @@
|
||||
ANSIBLE TOWER BY RED HAT END USER LICENSE AGREEMENT
|
||||
|
||||
This end user license agreement (“EULA”) governs the use of the Ansible Tower software and any related updates, upgrades, versions, appearance, structure and organization (the “Ansible Tower Software”), regardless of the delivery mechanism.
|
||||
|
||||
1. License Grant. Subject to the terms of this EULA, Red Hat, Inc. and its affiliates (“Red Hat”) grant to you (“You”) a non-transferable, non-exclusive, worldwide, non-sublicensable, limited, revocable license to use the Ansible Tower Software for the term of the associated Red Hat Software Subscription(s) and in a quantity equal to the number of Red Hat Software Subscriptions purchased from Red Hat for the Ansible Tower Software (“License”), each as set forth on the applicable Red Hat ordering document. You acquire only the right to use the Ansible Tower Software and do not acquire any rights of ownership. Red Hat reserves all rights to the Ansible Tower Software not expressly granted to You. This License grant pertains solely to Your use of the Ansible Tower Software and is not intended to limit Your rights under, or grant You rights that supersede, the license terms of any software packages which may be made available with the Ansible Tower Software that are subject to an open source software license.
|
||||
|
||||
2. Intellectual Property Rights. Title to the Ansible Tower Software and each component, copy and modification, including all derivative works whether made by Red Hat, You or on Red Hat's behalf, including those made at Your suggestion and all associated intellectual property rights, are and shall remain the sole and exclusive property of Red Hat and/or it licensors. The License does not authorize You (nor may You allow any third party, specifically non-employees of Yours) to: (a) copy, distribute, reproduce, use or allow third party access to the Ansible Tower Software except as expressly authorized hereunder; (b) decompile, disassemble, reverse engineer, translate, modify, convert or apply any procedure or process to the Ansible Tower Software in order to ascertain, derive, and/or appropriate for any reason or purpose, including the Ansible Tower Software source code or source listings or any trade secret information or process contained in the Ansible Tower Software (except as permitted under applicable law); (c) execute or incorporate other software (except for approved software as appears in the Ansible Tower Software documentation or specifically approved by Red Hat in writing) into Ansible Tower Software, or create a derivative work of any part of the Ansible Tower Software; (d) remove any trademarks, trade names or titles, copyrights legends or any other proprietary marking on the Ansible Tower Software; (e) disclose the results of any benchmarking of the Ansible Tower Software (whether or not obtained with Red Hat’s assistance) to any third party; (f) attempt to circumvent any user limits or other license, timing or use restrictions that are built into, defined or agreed upon, regarding the Ansible Tower Software. You are hereby notified that the Ansible Tower Software may contain time-out devices, counter devices, and/or other devices intended to ensure the limits of the License will not be exceeded (“Limiting Devices”). If the Ansible Tower Software contains Limiting Devices, Red Hat will provide You materials necessary to use the Ansible Tower Software to the extent permitted. You may not tamper with or otherwise take any action to defeat or circumvent a Limiting Device or other control measure, including but not limited to, resetting the unit amount or using false host identification number for the purpose of extending any term of the License.
|
||||
|
||||
3. Evaluation Licenses. Unless You have purchased Ansible Tower Software Subscriptions from Red Hat or an authorized reseller under the terms of a commercial agreement with Red Hat, all use of the Ansible Tower Software shall be limited to testing purposes and not for production use (“Evaluation”). Unless otherwise agreed by Red Hat, Evaluation of the Ansible Tower Software shall be limited to an evaluation environment and the Ansible Tower Software shall not be used to manage any systems or virtual machines on networks being used in the operation of Your business or any other non-evaluation purpose. Unless otherwise agreed by Red Hat, You shall limit all Evaluation use to a single 30 day evaluation period and shall not download or otherwise obtain additional copies of the Ansible Tower Software or license keys for Evaluation.
|
||||
|
||||
4. Limited Warranty. Except as specifically stated in this Section 4, to the maximum extent permitted under applicable law, the Ansible Tower Software and the components are provided and licensed “as is” without warranty of any kind, expressed or implied, including the implied warranties of merchantability, non-infringement or fitness for a particular purpose. Red Hat warrants solely to You that the media on which the Ansible Tower Software may be furnished will be free from defects in materials and manufacture under normal use for a period of thirty (30) days from the date of delivery to You. Red Hat does not warrant that the functions contained in the Ansible Tower Software will meet Your requirements or that the operation of the Ansible Tower Software will be entirely error free, appear precisely as described in the accompanying documentation, or comply with regulatory requirements.
|
||||
|
||||
5. Limitation of Remedies and Liability. To the maximum extent permitted by applicable law, Your exclusive remedy under this EULA is to return any defective media within thirty (30) days of delivery along with a copy of Your payment receipt and Red Hat, at its option, will replace it or refund the money paid by You for the media. To the maximum extent permitted under applicable law, neither Red Hat nor any Red Hat authorized distributor will be liable to You for any incidental or consequential damages, including lost profits or lost savings arising out of the use or inability to use the Ansible Tower Software or any component, even if Red Hat or the authorized distributor has been advised of the possibility of such damages. In no event shall Red Hat's liability or an authorized distributor’s liability exceed the amount that You paid to Red Hat for the Ansible Tower Software during the twelve months preceding the first event giving rise to liability.
|
||||
|
||||
6. Export Control. In accordance with the laws of the United States and other countries, You represent and warrant that You: (a) understand that the Ansible Tower Software and its components may be subject to export controls under the U.S. Commerce Department’s Export Administration Regulations (“EAR”); (b) are not located in any country listed in Country Group E:1 in Supplement No. 1 to part 740 of the EAR; (c) will not export, re-export, or transfer the Ansible Tower Software to any prohibited destination or to any end user who has been prohibited from participating in US export transactions by any federal agency of the US government; (d) will not use or transfer the Ansible Tower Software for use in connection with the design, development or production of nuclear, chemical or biological weapons, or rocket systems, space launch vehicles, or sounding rockets or unmanned air vehicle systems; (e) understand and agree that if you are in the United States and you export or transfer the Ansible Tower Software to eligible end users, you will, to the extent required by EAR Section 740.17 obtain a license for such export or transfer and will submit semi-annual reports to the Commerce Department’s Bureau of Industry and Security, which include the name and address (including country) of each transferee; and (f) understand that countries including the United States may restrict the import, use, or export of encryption products (which may include the Ansible Tower Software) and agree that you shall be solely responsible for compliance with any such import, use, or export restrictions.
|
||||
|
||||
7. General. If any provision of this EULA is held to be unenforceable, that shall not affect the enforceability of the remaining provisions. This agreement shall be governed by the laws of the State of New York and of the United States, without regard to any conflict of laws provisions. The rights and obligations of the parties to this EULA shall not be governed by the United Nations Convention on the International Sale of Goods.
|
||||
|
||||
Copyright © 2015 Red Hat, Inc. All rights reserved. "Red Hat" and “Ansible Tower” are registered trademarks of Red Hat, Inc. All other trademarks are the property of their respective owners.
|
||||
45
DCO_1_1.md
Normal file
45
DCO_1_1.md
Normal file
@@ -0,0 +1,45 @@
|
||||
DCO
|
||||
===
|
||||
|
||||
All contributors must use `git commit --signoff` for any
|
||||
commit to be merged, and agree that usage of --signoff constitutes
|
||||
agreement with the terms of DCO 1.1, which appears below:
|
||||
|
||||
```
|
||||
Developer Certificate of Origin
|
||||
Version 1.1
|
||||
|
||||
Copyright (C) 2004, 2006 The Linux Foundation and its contributors.
|
||||
1 Letterman Drive
|
||||
Suite D4700
|
||||
San Francisco, CA, 94129
|
||||
|
||||
Everyone is permitted to copy and distribute verbatim copies of this
|
||||
license document, but changing it is not allowed.
|
||||
|
||||
Developer's Certificate of Origin 1.1
|
||||
|
||||
By making a contribution to this project, I certify that:
|
||||
|
||||
(a) The contribution was created in whole or in part by me and I
|
||||
have the right to submit it under the open source license
|
||||
indicated in the file; or
|
||||
|
||||
(b) The contribution is based upon previous work that, to the best
|
||||
of my knowledge, is covered under an appropriate open source
|
||||
license and I have the right under that license to submit that
|
||||
work with modifications, whether created in whole or in part
|
||||
by me, under the same open source license (unless I am
|
||||
permitted to submit under a different license), as indicated
|
||||
in the file; or
|
||||
|
||||
(c) The contribution was provided directly to me by some other
|
||||
person who certified (a), (b) or (c) and I have not modified
|
||||
it.
|
||||
|
||||
(d) I understand and agree that this project and the contribution
|
||||
are public and that a record of the contribution (including all
|
||||
personal information I submit with it, including my sign-off) is
|
||||
maintained indefinitely and may be redistributed consistent with
|
||||
this project or the open source license(s) involved.
|
||||
```
|
||||
470
INSTALL.md
470
INSTALL.md
@@ -1,2 +1,468 @@
|
||||
Installing AWX
|
||||
==============
|
||||
# Installing AWX
|
||||
|
||||
This document provides a guide for installing AWX.
|
||||
|
||||
## Table of contents
|
||||
|
||||
- [Getting started](#getting-started)
|
||||
- [Clone the repo](#clone-the-repo)
|
||||
- [AWX branding](#awx-branding)
|
||||
- [Prerequisites](#prerequisites)
|
||||
- [System Requirements](#system-requirements)
|
||||
- [AWX Tunables](#awx-tunables)
|
||||
- [Choose a deployment platform](#choose-a-deployment-platform)
|
||||
- [Official vs Building Images](#official-vs-building-images)
|
||||
- [OpenShift](#openshift)
|
||||
- [Prerequisites](#prerequisites)
|
||||
- [Deploying to Minishift](#deploying-to-minishift)
|
||||
- [Pre-build steps](#pre-build-steps)
|
||||
- [PostgreSQL](#postgresql)
|
||||
- [Start the build](#start-the-build)
|
||||
- [Post build](#post-build)
|
||||
- [Accessing AWX](#accessing-awx)
|
||||
- [Docker](#docker)
|
||||
- [Prerequisites](#prerequisites-2)
|
||||
- [Pre-build steps](#pre-build-steps-1)
|
||||
- [Deploying to a remote host](#deploying-to-a-remote-host)
|
||||
- [Inventory variables](#inventory-variables)
|
||||
- [Docker registry](#docker-registry)
|
||||
- [PostgreSQL](#postgresql-1)
|
||||
- [Proxy settings](#proxy-settings)
|
||||
- [Start the build](#start-the-build-1)
|
||||
- [Post build](#post-build-1)
|
||||
- [Accessing AWX](#accessing-awx-1)
|
||||
|
||||
## Getting started
|
||||
|
||||
### Clone the repo
|
||||
|
||||
If you have not already done so, you will need to clone, or create a local copy, of the [AWX repo](https://github.com/ansible/awx). For more on how to clone the repo, view [git clone help](https://git-scm.com/docs/git-clone).
|
||||
|
||||
Once you have a local copy, run commands within the root of the project tree.
|
||||
|
||||
### AWX branding
|
||||
|
||||
You can optionally install the AWX branding assets from the [awx-logos repo](https://github.com/ansible/awx-logos). Prior to installing, please review and agree to the [trademark guidelines](https://github.com/ansible/awx-logos/blob/master/TRADEMARKS.md).
|
||||
|
||||
To install the assets, clone the `awx-logos` repo so that it is next to your `awx` clone. As you progress through the installation steps, you'll be setting variables in the [inventory](./installer/inventory) file. To include the assets in the build, set `awx_official=true`.
|
||||
|
||||
### Prerequisites
|
||||
|
||||
Before you can run a deployment, you'll need the following installed in your local environment:
|
||||
|
||||
- [Ansible](http://docs.ansible.com/ansible/latest/intro_installation.html) Requires Version 2.4+
|
||||
- [Docker](https://docs.docker.com/engine/installation/)
|
||||
- [docker-py](https://github.com/docker/docker-py) Python module
|
||||
- [GNU Make](https://www.gnu.org/software/make/)
|
||||
- [Git](https://git-scm.com/)
|
||||
|
||||
### System Requirements
|
||||
|
||||
The system that runs the AWX service will need to satisfy the following requirements
|
||||
|
||||
- At leasts 4GB of memory
|
||||
- At least 2 cpu cores
|
||||
- At least 20GB of space
|
||||
- Running Docker or Openshift
|
||||
|
||||
### AWX Tunables
|
||||
|
||||
**TODO** add tunable bits
|
||||
|
||||
### Choose a deployment platform
|
||||
|
||||
We currently support running AWX as a containerized application using Docker images deployed to either an OpenShift cluster, or a standalone Docker daemon. The remainder of this document will walk you through the process of building the images, and deploying them to either platform.
|
||||
|
||||
The [installer](./installer) directory contains an [inventory](./installer/inventory) file, and a playbook, [install.yml](./installer/install.yml). You'll begin by setting variables in the inventory file according to the platform you wish to use, and then you'll start the image build and deployment process by running the playbook.
|
||||
|
||||
In the sections below, you'll find deployment details and instructions for each platform. To deploy to Docker, view the [Docker section](#docker), and for OpenShift, view the [OpenShift section](#openshift).
|
||||
|
||||
### Official vs Building Images
|
||||
|
||||
When installing AWX you have the option of building your own images or using the images provided on DockerHub (see [awx_web](https://hub.docker.com/r/ansible/awx_web/) and [awx_task](https://hub.docker.com/r/ansible/awx_task/))
|
||||
|
||||
This is controlled by the following variables in the `inventory` file
|
||||
|
||||
```
|
||||
dockerhub_base=ansible
|
||||
dockerhub_version=latest
|
||||
```
|
||||
|
||||
If these variables are present then all deployments will use these hosted images. If the variables are not present then the images will be built during the install.
|
||||
|
||||
*dockerhub_base*
|
||||
|
||||
> The base location on DockerHub where the images are hosted (by default this pulls container images named `ansible/awx_web:tag` and `ansible/awx_task:tag`)
|
||||
|
||||
*dockerhub_version*
|
||||
|
||||
> Multiple versions are provided. `latest` always pulls the most recent. You may also select version numbers at different granularities: 1, 1.0, 1.0.1, 1.0.0.123
|
||||
|
||||
## OpenShift
|
||||
|
||||
### Prerequisites
|
||||
|
||||
To complete a deployment to OpenShift, you will obviously need access to an OpenShift cluster. For demo and testing purposes, you can use [Minishift](https://github.com/minishift/minishift) to create a single node cluster running inside a virtual machine.
|
||||
|
||||
You will also need to have the `oc` command in your PATH. The `install.yml` playbook will call out to `oc` when logging into, and creating objects on the cluster.
|
||||
|
||||
#### Deploying to Minishift
|
||||
|
||||
Install Minishift by following the [installation guide](https://docs.openshift.org/latest/minishift/getting-started/installing.html).
|
||||
|
||||
The Minishift VM contains a Docker daemon, which you can use to build the AWX images. This is generally the approach you should take, and we recommend doing so. To use this instance, run the following command to setup your environment:
|
||||
|
||||
```bash
|
||||
# Set DOCKER environment variable to point to the Minishift VM
|
||||
$ eval $(minishift docker-env)
|
||||
```
|
||||
|
||||
**Note**
|
||||
|
||||
> If you choose to not use the Docker instance running inside the VM, and build the images externally, you will have to enable the OpenShift cluster to access the images. This involves pushing the images to an external Docker registry, and granting the cluster access to it, or exposing the internal registry, and pushing the images into it.
|
||||
|
||||
### Pre-build steps
|
||||
|
||||
Before starting the build process, review the [inventory](./installer/inventory) file, and uncomment and provide values for the following variables found in the `[all:vars]` section:
|
||||
|
||||
*openshift_host*
|
||||
|
||||
> IP address or hostname of the OpenShift cluster. If you're using Minishift, this will be the value returned by `minishift ip`.
|
||||
|
||||
*awx_openshift_project*
|
||||
|
||||
> Name of the OpenShift project that will be created, and used as the namespace for the AWX app. Defaults to *awx*.
|
||||
|
||||
*awx_node_port*
|
||||
|
||||
> The web server port running inside the AWX pod. Defaults to *30083*.
|
||||
|
||||
*openshift_user*
|
||||
|
||||
> Username of the OpenShift user that will create the project, and deploy the application. Defaults to *developer*.
|
||||
|
||||
*docker_registry*
|
||||
|
||||
> IP address and port, or URL, for accessing a registry that the OpenShift cluster can access. Defaults to *172.30.1.1:5000*, the internal registry delivered with Minishift. This is not needed if you are using official hosted images.
|
||||
n
|
||||
*docker_registry_repository*
|
||||
|
||||
> Namespace to use when pushing and pulling images to and from the registry. Generally this will match the project name. It defaults to *awx*. This is not needed if you are using official hosted images.
|
||||
|
||||
*docker_registry_username*
|
||||
|
||||
> Username of the user that will push images to the registry. Will generally match the *openshift_user* value. Defaults to *developer*. This is not needed if you are using official hosted images.
|
||||
|
||||
#### PostgreSQL
|
||||
|
||||
AWX requires access to a PostgreSQL database, and by default, one will be created and deployed in a pod. The database is configured for persistence and will create a persistent volume claim named `postgresql`. By default it will claim 5GB from the available persistent volume pool. This can be tuned by setting a variable in the inventory file or on the command line during the `ansible-playbook` run.
|
||||
|
||||
ansible-playbook ... -e pg_volume_capacity=n
|
||||
|
||||
If you wish to use an external database, in the inventory file, set the value of `pg_hostname`, and update `pg_username`, `pg_password`, `pg_database`, and `pg_port` with the connection information. When setting `pg_hostname` the installer will assume you have configured the database in that location and will not launch the postgresql pod.
|
||||
|
||||
### Start the build
|
||||
|
||||
To start the build, you will pass two *extra* variables on the command line. The first is *openshift_password*, which is the password for the *openshift_user*, and the second is *docker_registry_password*, which is the password associated with *docker_registry_username*.
|
||||
|
||||
If you're using the OpenShift internal registry, then you'll pass an access token for the *docker_registry_password* value, rather than a password. The `oc whoami -t` command will generate the required token, as long as you're logged into the cluster via `oc cluster login`.
|
||||
|
||||
To start the build and deployment, run the following (docker_registry_password is optional if using official images):
|
||||
|
||||
```bash
|
||||
# Start the build and deployment
|
||||
$ ansible-playbook -i inventory install.yml -e openshift_password=developer -e docker_registry_password=$(oc whoami -t)
|
||||
```
|
||||
|
||||
### Post build
|
||||
|
||||
After the playbook run completes, check the status of the deployment by running `oc get pods`:
|
||||
|
||||
```bash
|
||||
# View the running pods
|
||||
$ oc get pods
|
||||
|
||||
NAME READY STATUS RESTARTS AGE
|
||||
awx-3886581826-5mv0l 4/4 Running 0 8s
|
||||
postgresql-1-l85fh 1/1 Running 0 20m
|
||||
|
||||
```
|
||||
|
||||
In the above example, the name of the AWX pod is `awx-3886581826-5mv0l`. Before accessing the AWX web interface, setup tasks and database migrations need to complete. These tasks are running in the `awx_task` container inside the AWX pod. To monitor their status, tail the container's STDOUT by running the following command, replacing the AWX pod name with the pod name from your environment:
|
||||
|
||||
```bash
|
||||
# Follow the awx_task log output
|
||||
$ oc logs -f awx-3886581826-5mv0l -c awx-celery
|
||||
```
|
||||
|
||||
You will see the following indicating that database migrations are running:
|
||||
|
||||
```bash
|
||||
Using /etc/ansible/ansible.cfg as config file
|
||||
127.0.0.1 | SUCCESS => {
|
||||
"changed": false,
|
||||
"db": "awx"
|
||||
}
|
||||
Operations to perform:
|
||||
Synchronize unmigrated apps: solo, api, staticfiles, messages, channels, django_extensions, ui, rest_framework, polymorphic
|
||||
Apply all migrations: sso, taggit, sessions, djcelery, sites, kombu_transport_django, social_auth, contenttypes, auth, conf, main
|
||||
Synchronizing apps without migrations:
|
||||
Creating tables...
|
||||
Running deferred SQL...
|
||||
Installing custom SQL...
|
||||
Running migrations:
|
||||
Rendering model states... DONE
|
||||
Applying contenttypes.0001_initial... OK
|
||||
Applying contenttypes.0002_remove_content_type_name... OK
|
||||
Applying auth.0001_initial... OK
|
||||
Applying auth.0002_alter_permission_name_max_length... OK
|
||||
Applying auth.0003_alter_user_email_max_length... OK
|
||||
Applying auth.0004_alter_user_username_opts... OK
|
||||
Applying auth.0005_alter_user_last_login_null... OK
|
||||
Applying auth.0006_require_contenttypes_0002... OK
|
||||
Applying taggit.0001_initial... OK
|
||||
Applying taggit.0002_auto_20150616_2121... OK
|
||||
...
|
||||
```
|
||||
|
||||
When you see output similar to the following, you'll know that database migrations have completed, and you can access the web interface:
|
||||
|
||||
```bash
|
||||
Python 2.7.5 (default, Nov 6 2016, 00:28:07)
|
||||
[GCC 4.8.5 20150623 (Red Hat 4.8.5-11)] on linux2
|
||||
Type "help", "copyright", "credits" or "license" for more information.
|
||||
(InteractiveConsole)
|
||||
|
||||
>>> <User: admin>
|
||||
>>> Default organization added.
|
||||
Demo Credential, Inventory, and Job Template added.
|
||||
Successfully registered instance awx-3886581826-5mv0l
|
||||
(changed: True)
|
||||
Creating instance group tower
|
||||
Added instance awx-3886581826-5mv0l to tower
|
||||
```
|
||||
|
||||
Once database migrations complete, the web interface will be accessible.
|
||||
|
||||
### Accessing AWX
|
||||
|
||||
The AWX web interface is running in the AWX pod, behind the `awx-web-svc` service. To view the service, and its port value, run the following command:
|
||||
|
||||
```bash
|
||||
# View available services
|
||||
$ oc get services
|
||||
|
||||
NAME CLUSTER-IP EXTERNAL-IP PORT(S) AGE
|
||||
awx-web-svc 172.30.111.74 <nodes> 8052:30083/TCP 37m
|
||||
postgresql 172.30.102.9 <none> 5432/TCP 38m
|
||||
```
|
||||
|
||||
The deployment process creates a route, `awx-web-svc`, to expose the service. How the ingres is actually created will vary depending on your environment, and how the cluster is configured. You can view the route, and the external IP address and hostname assigned to it, by running the following command:
|
||||
|
||||
```bash
|
||||
# View available routes
|
||||
$ oc get routes
|
||||
|
||||
NAME HOST/PORT PATH SERVICES PORT TERMINATION WILDCARD
|
||||
awx-web-svc awx-web-svc-awx.192.168.64.2.nip.io awx-web-svc http edge/Allow None
|
||||
```
|
||||
|
||||
The above example is taken from a Minishift instance. From a web browser, use `https` to access the `HOST/PORT` value from your environment. Using the above example, the URL to access the server would be [https://awx-web-svc-awx.192.168.64.2.nip.io](https://awx-web-svc-awx.192.168.64.2.nip.io).
|
||||
|
||||
Once you access the AWX server, you will be prompted with a login dialog. The default administrator username is `admin`, and the password is `password`.
|
||||
|
||||
## Docker
|
||||
|
||||
### Prerequisites
|
||||
|
||||
You will need the following installed on the host where AWX will be deployed:
|
||||
|
||||
- [Docker](https://docs.docker.com/engine/installation/)
|
||||
- [docker-py](https://github.com/docker/docker-py) Python module
|
||||
|
||||
Note: After installing Docker, the Docker service must be started.
|
||||
|
||||
### Pre-build steps
|
||||
|
||||
#### Deploying to a remote host
|
||||
|
||||
By default, the delivered [installer/inventory](./installer/inventory) file will deploy AWX to the local host. It is possible; however, to deploy to a remote host. The [installer/install.yml](./installer/install.yml) playbook can be used to build images on the local host, and ship the built images to, and run deployment tasks on, a remote host. To do this, modify the [installer/inventory](./installer/inventory) file, by commenting out `localhost`, and adding the remote host.
|
||||
|
||||
For example, suppose you wish to build images locally on your CI/CD host, and deploy them to a remote host named *awx-server*. To do this, add *awx-server* to the [installer/inventory](./installer/inventory) file, and comment out or remove `localhost`, as demonstrated by the following:
|
||||
|
||||
```yaml
|
||||
# localhost ansible_connection=local
|
||||
awx-server
|
||||
|
||||
[all:vars]
|
||||
...
|
||||
```
|
||||
|
||||
In the above example, image build tasks will be delegated to `localhost`, which is typically where the clone of the AWX project exists. Built images will be archived, copied to remote host, and imported into the remote Docker image cache. Tasks to start the AWX containers will then execute on the remote host.
|
||||
|
||||
If you choose to use the official images then the remote host will be the one to pull those images.
|
||||
|
||||
**Note**
|
||||
|
||||
> You may also want to set additional variables to control how Ansible connects to the host. For more information about this, view [Behavioral Inventory Parameters](http://docs.ansible.com/ansible/latest/intro_inventory.html#id12).
|
||||
|
||||
> As mentioned above, in [Prerequisites](#prerequisites-1), the prerequisites are required on the remote host.
|
||||
|
||||
> When deploying to a remote host, the playook does not execute tasks with the `become` option. For this reason, make sure the user that connects to the remote host has privileges to run the `docker` command. This typically means that non-privileged users need to be part of the `docker` group.
|
||||
|
||||
|
||||
#### Inventory variables
|
||||
|
||||
Before starting the build process, review the [inventory](./installer/inventory) file, and uncomment and provide values for the following variables found in the `[all:vars]` section:
|
||||
|
||||
*postgres_data_dir*
|
||||
|
||||
> If you're using the default PostgreSQL container (see [PostgreSQL](#postgresql-1) below), provide a path that can be mounted to the container, and where the database can be persisted.
|
||||
|
||||
*host_port*
|
||||
|
||||
> Provide a port number that can be mapped from the Docker daemon host to the web server running inside the AWX container. Defaults to *80*.
|
||||
|
||||
|
||||
#### Docker registry
|
||||
|
||||
If you wish to tag and push built images to a Docker registry, set the following variables in the inventory file:
|
||||
|
||||
*docker_registry*
|
||||
|
||||
> IP address and port, or URL, for accessing a registry.
|
||||
|
||||
*docker_registry_repository*
|
||||
|
||||
> Namespace to use when pushing and pulling images to and from the registry. Defaults to *awx*.
|
||||
|
||||
*docker_registry_username*
|
||||
|
||||
> Username of the user that will push images to the registry. Defaults to *developer*.
|
||||
|
||||
*docker_remove_local_images*
|
||||
|
||||
> Due to the way that the docker_image module behaves, images will not be pushed to a remote repository if they are present locally. Set this to delete local versions of the images that will be pushed to the remote. This will fail if containers are currently running from those images.
|
||||
|
||||
**Note**
|
||||
|
||||
> These settings are ignored if using official images
|
||||
|
||||
|
||||
#### Proxy settings
|
||||
|
||||
*http_proxy*
|
||||
|
||||
> IP address and port, or URL, for using an http_proxy.
|
||||
|
||||
*https_proxy*
|
||||
|
||||
> IP address and port, or URL, for using an https_proxy.
|
||||
|
||||
*no_proxy*
|
||||
|
||||
> Exclude IP address or URL from the proxy.
|
||||
|
||||
#### PostgreSQL
|
||||
|
||||
AWX requires access to a PostgreSQL database, and by default, one will be created and deployed in a container, and data will be persisted to a host volume. In this scenario, you must set the value of `postgres_data_dir` to a path that can be mounted to the container. When the container is stopped, the database files will still exist in the specified path.
|
||||
|
||||
If you wish to use an external database, in the inventory file, set the value of `pg_hostname`, and update `pg_username`, `pg_password`, `pg_database`, and `pg_port` with the connection information.
|
||||
|
||||
### Start the build
|
||||
|
||||
If you are not pushing images to a Docker registry, start the build by running the following:
|
||||
|
||||
```bash
|
||||
# Set the working directory to installer
|
||||
$ cd installer
|
||||
|
||||
# Run the Ansible playbook
|
||||
$ ansible-playbook -i inventory install.yml
|
||||
```
|
||||
|
||||
If you're pushing built images to a repository, then use the `-e` option to pass the registry password as follows, replacing *password* with the password of the username assigned to `docker_registry_username` (note that you will also need to remove `dockerhub_base` and `dockerhub_version` from the inventory file):
|
||||
|
||||
```bash
|
||||
# Set the working directory to installer
|
||||
$ cd installer
|
||||
|
||||
# Run the Ansible playbook
|
||||
$ ansible-playbook -i inventory -e docker_registry_password=password install.yml
|
||||
```
|
||||
|
||||
### Post build
|
||||
|
||||
After the playbook run completes, Docker will report up to 5 running containers. If you chose to use an existing PostgresSQL database, then it will report 4. You can view the running containers using the `docker ps` command, as follows:
|
||||
|
||||
```bash
|
||||
CONTAINER ID IMAGE COMMAND CREATED STATUS PORTS NAMES
|
||||
e240ed8209cd awx_task:1.0.0.8 "/tini -- /bin/sh ..." 2 minutes ago Up About a minute 8052/tcp awx_task
|
||||
1cfd02601690 awx_web:1.0.0.8 "/tini -- /bin/sh ..." 2 minutes ago Up About a minute 0.0.0.0:80->8052/tcp awx_web
|
||||
55a552142bcd memcached:alpine "docker-entrypoint..." 2 minutes ago Up 2 minutes 11211/tcp memcached
|
||||
84011c072aad rabbitmq:3 "docker-entrypoint..." 2 minutes ago Up 2 minutes 4369/tcp, 5671-5672/tcp, 25672/tcp rabbitmq
|
||||
97e196120ab3 postgres:9.6 "docker-entrypoint..." 2 minutes ago Up 2 minutes 5432/tcp postgres
|
||||
```
|
||||
|
||||
Immediately after the containers start, the *awx_task* container will perform required setup tasks, including database migrations. These tasks need to complete before the web interface can be accessed. To monitor the progress, you can follow the container's STDOUT by running the following:
|
||||
|
||||
```bash
|
||||
# Tail the the awx_task log
|
||||
$ docker logs -f awx_task
|
||||
```
|
||||
|
||||
You will see output similar to the following:
|
||||
|
||||
```bash
|
||||
Using /etc/ansible/ansible.cfg as config file
|
||||
127.0.0.1 | SUCCESS => {
|
||||
"changed": false,
|
||||
"db": "awx"
|
||||
}
|
||||
Operations to perform:
|
||||
Synchronize unmigrated apps: solo, api, staticfiles, messages, channels, django_extensions, ui, rest_framework, polymorphic
|
||||
Apply all migrations: sso, taggit, sessions, djcelery, sites, kombu_transport_django, social_auth, contenttypes, auth, conf, main
|
||||
Synchronizing apps without migrations:
|
||||
Creating tables...
|
||||
Running deferred SQL...
|
||||
Installing custom SQL...
|
||||
Running migrations:
|
||||
Rendering model states... DONE
|
||||
Applying contenttypes.0001_initial... OK
|
||||
Applying contenttypes.0002_remove_content_type_name... OK
|
||||
Applying auth.0001_initial... OK
|
||||
Applying auth.0002_alter_permission_name_max_length... OK
|
||||
Applying auth.0003_alter_user_email_max_length... OK
|
||||
Applying auth.0004_alter_user_username_opts... OK
|
||||
Applying auth.0005_alter_user_last_login_null... OK
|
||||
Applying auth.0006_require_contenttypes_0002... OK
|
||||
Applying taggit.0001_initial... OK
|
||||
Applying taggit.0002_auto_20150616_2121... OK
|
||||
Applying main.0001_initial... OK
|
||||
...
|
||||
```
|
||||
|
||||
Once migrations complete, you will see the following log output, indicating that migrations have completed:
|
||||
|
||||
```bash
|
||||
Python 2.7.5 (default, Nov 6 2016, 00:28:07)
|
||||
[GCC 4.8.5 20150623 (Red Hat 4.8.5-11)] on linux2
|
||||
Type "help", "copyright", "credits" or "license" for more information.
|
||||
(InteractiveConsole)
|
||||
|
||||
>>> <User: admin>
|
||||
>>> Default organization added.
|
||||
Demo Credential, Inventory, and Job Template added.
|
||||
Successfully registered instance awx
|
||||
(changed: True)
|
||||
Creating instance group tower
|
||||
Added instance awx to tower
|
||||
(changed: True)
|
||||
...
|
||||
```
|
||||
|
||||
### Accessing AWX
|
||||
|
||||
The AWX web server is accessible on the deployment host, using the *host_port* value set in the *inventory* file. The default URL is [http://localhost](http://localhost).
|
||||
|
||||
You will prompted with a login dialog. The default administrator username is `admin`, and the password is `password`.
|
||||
|
||||
87
ISSUES.md
Normal file
87
ISSUES.md
Normal file
@@ -0,0 +1,87 @@
|
||||
# Issues
|
||||
|
||||
## Reporting
|
||||
|
||||
Use the GitHub [issue tracker](https://github.com/ansible/awx/issues) for filing bugs. In order to save time, and help us respond to issues quickly, make sure to fill out as much of the issue template
|
||||
as possible. Version information, and an accurate reproducing scenario are critical to helping us identify the problem.
|
||||
|
||||
Please don't use the issue tracker as a way to ask how to do something. Instead, use the [mailing list](https://groups.google.com/forum/#!forum/awx-project) , and the `#ansible-awx` channel on irc.freenode.net to get help.
|
||||
|
||||
Before opening a new issue, please use the issue search feature to see if what you're experiencing has already been reported. If you have any extra detail to provide, please comment. Otherwise, rather than posting a "me too" comment, please consider giving it a ["thumbs up"](https://github.com/blog/2119-add-reactions-to-pull-requests-issues-and-comment) to give us an indication of the severity of the problem.
|
||||
|
||||
### UI Issues
|
||||
|
||||
When reporting issues for the UI, we also appreciate having screen shots and any error messages from the web browser's console. It's not unusual for browser extensions
|
||||
and plugins to cause problems. Reporting those will also help speed up analyzing and resolving UI bugs.
|
||||
|
||||
### API and backend issues
|
||||
|
||||
For the API and backend services, please capture all of the logs that you can from the time the problem occurred.
|
||||
|
||||
## How issues are resolved
|
||||
|
||||
We triage our issues into high, medium, and low, and tag them with the relevant component (e.g. api, ui, installer, etc.). We typically focus on higher priority issues first. There aren't hard and fast rules for determining the severity of an issue, but generally high priority issues have an increased likelihood of breaking existing functionality, and negatively impacting a large number of users.
|
||||
|
||||
If your issue isn't considered high priority, then please be patient as it may take some time to get to it.
|
||||
|
||||
|
||||
### Issue states
|
||||
|
||||
`state:needs_triage` This issue has not been looked at by a person yet and still needs to be triaged. This is the initial state for all new issues/pull requests.
|
||||
|
||||
`state:needs_info` The issue needs more information. This could be more debug output, more specifics out the system such as version information. Any detail that is currently preventing this issue from moving forward. This should be considered a blocked state.
|
||||
|
||||
`state:needs_review` The the issue/pull request needs to be reviewed by other maintainers and contributors. This is usually used when there is a question out to another maintainer or when a person is less familar with an area of the code base the issue is for.
|
||||
|
||||
`state:needs_revision` More commonly used on pull requests, this state represents that there are changes that are being waited on.
|
||||
|
||||
`state:in_progress` The issue is actively being worked on and you should be in contact with who ever is assigned if you are also working on or plan to work on a similar issue.
|
||||
|
||||
`state:in_testing` The issue or pull request is currently being tested.
|
||||
|
||||
|
||||
### AWX Issue Bot (awxbot)
|
||||
We use an issue bot to help us label and organize incoming issues, this bot, awxbot, is a version of [ansible/ansibullbot](https://github.com/ansible/ansibullbot).
|
||||
|
||||
#### Overview
|
||||
|
||||
AWXbot performs many functions:
|
||||
|
||||
* Respond quickly to issues and pull requests.
|
||||
* Identify the maintainers responsible for reviewing pull requests.
|
||||
* Identify issues and pull request types and components (e.g. type:bug, component: api)
|
||||
|
||||
#### For issue submitters
|
||||
|
||||
The bot requires a minimal subset of information from the issue template:
|
||||
|
||||
* issue type
|
||||
* component
|
||||
* summary
|
||||
|
||||
If any of those items are missing your issue will still get the `needs_triage` label, but may end up being responded to slower than issues that have the complete set of information.
|
||||
So please use the template whenever possible.
|
||||
|
||||
Currently you can expect the bot to add common labels such as `state:needs_triage`, `type:bug`, `type:enhancement`, `component:ui`, etc...
|
||||
These labels are determined by the template data. Please use the template and fill it out as accurately as possible.
|
||||
|
||||
The `state:needs_triage` label will remain on your issue until a person has looked at it.
|
||||
|
||||
#### For pull request submitters
|
||||
|
||||
The bot requires a minimal subset of information from the pull request template:
|
||||
|
||||
* issue type
|
||||
* component
|
||||
* summary
|
||||
|
||||
If any of those items are missing your pull request will still get the `needs_triage` label, but may end up being responded to slower than other pull requests that have a complete set of information.
|
||||
|
||||
Currently you can expect awxbot to add common labels such as `state:needs_triage`, `type:bug`, `component:docs`, etc...
|
||||
These labels are determined by the template data. Please use the template and fill it out as accurately as possible.
|
||||
|
||||
The `state:needs_triage` label will will remain on your pull request until a person has looked at it.
|
||||
|
||||
You can also expect the bot to CC maintainers of specific areas of the code, this will notify them that there is a pull request by placing a comment on the pull request.
|
||||
The comment will look something like `CC @matburt @wwitzel3 ...`.
|
||||
|
||||
168
LICENSE.md
Normal file
168
LICENSE.md
Normal file
@@ -0,0 +1,168 @@
|
||||
Apache License
|
||||
==============
|
||||
|
||||
_Version 2.0, January 2004_
|
||||
_<<http://www.apache.org/licenses/>>_
|
||||
|
||||
### Terms and Conditions for use, reproduction, and distribution
|
||||
|
||||
#### 1. Definitions
|
||||
|
||||
“License” shall mean the terms and conditions for use, reproduction, and
|
||||
distribution as defined by Sections 1 through 9 of this document.
|
||||
|
||||
“Licensor” shall mean the copyright owner or entity authorized by the copyright
|
||||
owner that is granting the License.
|
||||
|
||||
“Legal Entity” shall mean the union of the acting entity and all other entities
|
||||
that control, are controlled by, or are under common control with that entity.
|
||||
For the purposes of this definition, “control” means **(i)** the power, direct or
|
||||
indirect, to cause the direction or management of such entity, whether by
|
||||
contract or otherwise, or **(ii)** ownership of fifty percent (50%) or more of the
|
||||
outstanding shares, or **(iii)** beneficial ownership of such entity.
|
||||
|
||||
“You” (or “Your”) shall mean an individual or Legal Entity exercising
|
||||
permissions granted by this License.
|
||||
|
||||
“Source” form shall mean the preferred form for making modifications, including
|
||||
but not limited to software source code, documentation source, and configuration
|
||||
files.
|
||||
|
||||
“Object” form shall mean any form resulting from mechanical transformation or
|
||||
translation of a Source form, including but not limited to compiled object code,
|
||||
generated documentation, and conversions to other media types.
|
||||
|
||||
“Work” shall mean the work of authorship, whether in Source or Object form, made
|
||||
available under the License, as indicated by a copyright notice that is included
|
||||
in or attached to the work (an example is provided in the Appendix below).
|
||||
|
||||
“Derivative Works” shall mean any work, whether in Source or Object form, that
|
||||
is based on (or derived from) the Work and for which the editorial revisions,
|
||||
annotations, elaborations, or other modifications represent, as a whole, an
|
||||
original work of authorship. For the purposes of this License, Derivative Works
|
||||
shall not include works that remain separable from, or merely link (or bind by
|
||||
name) to the interfaces of, the Work and Derivative Works thereof.
|
||||
|
||||
“Contribution” shall mean any work of authorship, including the original version
|
||||
of the Work and any modifications or additions to that Work or Derivative Works
|
||||
thereof, that is intentionally submitted to Licensor for inclusion in the Work
|
||||
by the copyright owner or by an individual or Legal Entity authorized to submit
|
||||
on behalf of the copyright owner. For the purposes of this definition,
|
||||
“submitted” means any form of electronic, verbal, or written communication sent
|
||||
to the Licensor or its representatives, including but not limited to
|
||||
communication on electronic mailing lists, source code control systems, and
|
||||
issue tracking systems that are managed by, or on behalf of, the Licensor for
|
||||
the purpose of discussing and improving the Work, but excluding communication
|
||||
that is conspicuously marked or otherwise designated in writing by the copyright
|
||||
owner as “Not a Contribution.”
|
||||
|
||||
“Contributor” shall mean Licensor and any individual or Legal Entity on behalf
|
||||
of whom a Contribution has been received by Licensor and subsequently
|
||||
incorporated within the Work.
|
||||
|
||||
#### 2. Grant of Copyright License
|
||||
|
||||
Subject to the terms and conditions of this License, each Contributor hereby
|
||||
grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free,
|
||||
irrevocable copyright license to reproduce, prepare Derivative Works of,
|
||||
publicly display, publicly perform, sublicense, and distribute the Work and such
|
||||
Derivative Works in Source or Object form.
|
||||
|
||||
#### 3. Grant of Patent License
|
||||
|
||||
Subject to the terms and conditions of this License, each Contributor hereby
|
||||
grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free,
|
||||
irrevocable (except as stated in this section) patent license to make, have
|
||||
made, use, offer to sell, sell, import, and otherwise transfer the Work, where
|
||||
such license applies only to those patent claims licensable by such Contributor
|
||||
that are necessarily infringed by their Contribution(s) alone or by combination
|
||||
of their Contribution(s) with the Work to which such Contribution(s) was
|
||||
submitted. If You institute patent litigation against any entity (including a
|
||||
cross-claim or counterclaim in a lawsuit) alleging that the Work or a
|
||||
Contribution incorporated within the Work constitutes direct or contributory
|
||||
patent infringement, then any patent licenses granted to You under this License
|
||||
for that Work shall terminate as of the date such litigation is filed.
|
||||
|
||||
#### 4. Redistribution
|
||||
|
||||
You may reproduce and distribute copies of the Work or Derivative Works thereof
|
||||
in any medium, with or without modifications, and in Source or Object form,
|
||||
provided that You meet the following conditions:
|
||||
|
||||
* **(a)** You must give any other recipients of the Work or Derivative Works a copy of
|
||||
this License; and
|
||||
* **(b)** You must cause any modified files to carry prominent notices stating that You
|
||||
changed the files; and
|
||||
* **(c)** You must retain, in the Source form of any Derivative Works that You distribute,
|
||||
all copyright, patent, trademark, and attribution notices from the Source form
|
||||
of the Work, excluding those notices that do not pertain to any part of the
|
||||
Derivative Works; and
|
||||
* **(d)** If the Work includes a “NOTICE” text file as part of its distribution, then any
|
||||
Derivative Works that You distribute must include a readable copy of the
|
||||
attribution notices contained within such NOTICE file, excluding those notices
|
||||
that do not pertain to any part of the Derivative Works, in at least one of the
|
||||
following places: within a NOTICE text file distributed as part of the
|
||||
Derivative Works; within the Source form or documentation, if provided along
|
||||
with the Derivative Works; or, within a display generated by the Derivative
|
||||
Works, if and wherever such third-party notices normally appear. The contents of
|
||||
the NOTICE file are for informational purposes only and do not modify the
|
||||
License. You may add Your own attribution notices within Derivative Works that
|
||||
You distribute, alongside or as an addendum to the NOTICE text from the Work,
|
||||
provided that such additional attribution notices cannot be construed as
|
||||
modifying the License.
|
||||
|
||||
You may add Your own copyright statement to Your modifications and may provide
|
||||
additional or different license terms and conditions for use, reproduction, or
|
||||
distribution of Your modifications, or for any such Derivative Works as a whole,
|
||||
provided Your use, reproduction, and distribution of the Work otherwise complies
|
||||
with the conditions stated in this License.
|
||||
|
||||
#### 5. Submission of Contributions
|
||||
|
||||
Unless You explicitly state otherwise, any Contribution intentionally submitted
|
||||
for inclusion in the Work by You to the Licensor shall be under the terms and
|
||||
conditions of this License, without any additional terms or conditions.
|
||||
Notwithstanding the above, nothing herein shall supersede or modify the terms of
|
||||
any separate license agreement you may have executed with Licensor regarding
|
||||
such Contributions.
|
||||
|
||||
#### 6. Trademarks
|
||||
|
||||
This License does not grant permission to use the trade names, trademarks,
|
||||
service marks, or product names of the Licensor, except as required for
|
||||
reasonable and customary use in describing the origin of the Work and
|
||||
reproducing the content of the NOTICE file.
|
||||
|
||||
#### 7. Disclaimer of Warranty
|
||||
|
||||
Unless required by applicable law or agreed to in writing, Licensor provides the
|
||||
Work (and each Contributor provides its Contributions) on an “AS IS” BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied,
|
||||
including, without limitation, any warranties or conditions of TITLE,
|
||||
NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are
|
||||
solely responsible for determining the appropriateness of using or
|
||||
redistributing the Work and assume any risks associated with Your exercise of
|
||||
permissions under this License.
|
||||
|
||||
#### 8. Limitation of Liability
|
||||
|
||||
In no event and under no legal theory, whether in tort (including negligence),
|
||||
contract, or otherwise, unless required by applicable law (such as deliberate
|
||||
and grossly negligent acts) or agreed to in writing, shall any Contributor be
|
||||
liable to You for damages, including any direct, indirect, special, incidental,
|
||||
or consequential damages of any character arising as a result of this License or
|
||||
out of the use or inability to use the Work (including but not limited to
|
||||
damages for loss of goodwill, work stoppage, computer failure or malfunction, or
|
||||
any and all other commercial damages or losses), even if such Contributor has
|
||||
been advised of the possibility of such damages.
|
||||
|
||||
#### 9. Accepting Warranty or Additional Liability
|
||||
|
||||
While redistributing the Work or Derivative Works thereof, You may choose to
|
||||
offer, and charge a fee for, acceptance of support, warranty, indemnity, or
|
||||
other liability obligations and/or rights consistent with this License. However,
|
||||
in accepting such obligations, You may act only on Your own behalf and on Your
|
||||
sole responsibility, not on behalf of any other Contributor, and only if You
|
||||
agree to indemnify, defend, and hold each Contributor harmless for any liability
|
||||
incurred by, or claims asserted against, such Contributor by reason of your
|
||||
accepting any such warranty or additional liability.
|
||||
@@ -1,4 +1,6 @@
|
||||
recursive-include awx *.py
|
||||
recursive-include awx *.po
|
||||
recursive-include awx *.mo
|
||||
recursive-include awx/static *
|
||||
recursive-include awx/templates *.html
|
||||
recursive-include awx/api/templates *.md *.html
|
||||
|
||||
48
Makefile
48
Makefile
@@ -77,13 +77,15 @@ I18N_FLAG_FILE = .i18n_built
|
||||
receiver test test_unit test_ansible test_coverage coverage_html \
|
||||
dev_build release_build release_clean sdist \
|
||||
ui-docker-machine ui-docker ui-release ui-devel \
|
||||
ui-test ui-deps ui-test-ci ui-test-saucelabs VERSION
|
||||
ui-test ui-deps ui-test-ci VERSION
|
||||
|
||||
# remove ui build artifacts
|
||||
clean-ui:
|
||||
rm -rf awx/ui/static/
|
||||
rm -rf awx/ui/node_modules/
|
||||
rm -rf awx/ui/coverage/
|
||||
rm -rf awx/ui/test/unit/reports/
|
||||
rm -rf awx/ui/test/spec/reports/
|
||||
rm -rf awx/ui/test/e2e/reports/
|
||||
rm -rf awx/ui/client/languages/
|
||||
rm -f $(UI_DEPS_FLAG_FILE)
|
||||
rm -f $(UI_RELEASE_FLAG_FILE)
|
||||
@@ -201,8 +203,11 @@ develop:
|
||||
fi
|
||||
|
||||
version_file:
|
||||
mkdir -p /var/lib/awx/
|
||||
python -c "import awx as awx; print awx.__version__" > /var/lib/awx/.awx_version
|
||||
mkdir -p /var/lib/awx/; \
|
||||
if [ "$(VENV_BASE)" ]; then \
|
||||
. $(VENV_BASE)/awx/bin/activate; \
|
||||
fi; \
|
||||
python -c "import awx as awx; print awx.__version__" > /var/lib/awx/.awx_version; \
|
||||
|
||||
# Do any one-time init tasks.
|
||||
comma := ,
|
||||
@@ -282,7 +287,7 @@ flower:
|
||||
@if [ "$(VENV_BASE)" ]; then \
|
||||
. $(VENV_BASE)/awx/bin/activate; \
|
||||
fi; \
|
||||
$(PYTHON) manage.py celery flower --address=0.0.0.0 --port=5555 --broker=amqp://guest:guest@$(RABBITMQ_HOST):5672//
|
||||
celery flower --address=0.0.0.0 --port=5555 --broker=amqp://guest:guest@$(RABBITMQ_HOST):5672//
|
||||
|
||||
collectstatic:
|
||||
@if [ "$(VENV_BASE)" ]; then \
|
||||
@@ -320,8 +325,7 @@ celeryd:
|
||||
@if [ "$(VENV_BASE)" ]; then \
|
||||
. $(VENV_BASE)/awx/bin/activate; \
|
||||
fi; \
|
||||
$(PYTHON) manage.py celeryd -l DEBUG -B -Ofair --autoreload --autoscale=100,4 --schedule=$(CELERY_SCHEDULE_FILE) -Q tower_scheduler,tower_broadcast_all,$(COMPOSE_HOST),$(AWX_GROUP_QUEUES) -n celery@$(COMPOSE_HOST)
|
||||
#$(PYTHON) manage.py celery multi show projects jobs default -l DEBUG -Q:projects projects -Q:jobs jobs -Q:default default -c:projects 1 -c:jobs 3 -c:default 3 -Ofair -B --schedule=$(CELERY_SCHEDULE_FILE)
|
||||
celery worker -A awx -l DEBUG -B -Ofair --autoscale=100,4 --schedule=$(CELERY_SCHEDULE_FILE) -Q tower_scheduler,tower_broadcast_all,$(COMPOSE_HOST),$(AWX_GROUP_QUEUES) -n celery@$(COMPOSE_HOST)
|
||||
|
||||
# Run to start the zeromq callback receiver
|
||||
receiver:
|
||||
@@ -330,18 +334,18 @@ receiver:
|
||||
fi; \
|
||||
$(PYTHON) manage.py run_callback_receiver
|
||||
|
||||
socketservice:
|
||||
@if [ "$(VENV_BASE)" ]; then \
|
||||
. $(VENV_BASE)/awx/bin/activate; \
|
||||
fi; \
|
||||
$(PYTHON) manage.py run_socketio_service
|
||||
|
||||
nginx:
|
||||
nginx -g "daemon off;"
|
||||
|
||||
rdb:
|
||||
$(PYTHON) tools/rdb.py
|
||||
|
||||
jupyter:
|
||||
@if [ "$(VENV_BASE)" ]; then \
|
||||
. $(VENV_BASE)/awx/bin/activate; \
|
||||
fi; \
|
||||
$(MANAGEMENT_COMMAND) shell_plus --notebook
|
||||
|
||||
reports:
|
||||
mkdir -p $@
|
||||
|
||||
@@ -362,7 +366,7 @@ pylint: reports
|
||||
|
||||
check: flake8 pep8 # pyflakes pylint
|
||||
|
||||
TEST_DIRS ?= awx/main/tests awx/conf/tests awx/sso/tests
|
||||
TEST_DIRS ?= awx/main/tests/unit awx/main/tests/functional awx/conf/tests awx/sso/tests
|
||||
# Run all API unit tests.
|
||||
test: test_ansible
|
||||
@if [ "$(VENV_BASE)" ]; then \
|
||||
@@ -495,15 +499,14 @@ ui: clean-ui ui-devel
|
||||
|
||||
ui-test-ci: $(UI_DEPS_FLAG_FILE)
|
||||
$(NPM_BIN) --prefix awx/ui run test:ci
|
||||
$(NPM_BIN) --prefix awx/ui run unit
|
||||
|
||||
testjs_ci:
|
||||
echo "Update UI unittests later" #ui-test-ci
|
||||
|
||||
jshint: $(UI_DEPS_FLAG_FILE)
|
||||
$(NPM_BIN) run --prefix awx/ui jshint
|
||||
|
||||
ui-test-saucelabs: $(UI_DEPS_FLAG_FILE)
|
||||
$(NPM_BIN) --prefix awx/ui run test:saucelabs
|
||||
$(NPM_BIN) run --prefix awx/ui lint
|
||||
|
||||
# END UI TASKS
|
||||
# --------------------------------------
|
||||
@@ -548,6 +551,7 @@ docker-isolated:
|
||||
TAG=$(COMPOSE_TAG) DEV_DOCKER_TAG_BASE=$(DEV_DOCKER_TAG_BASE) docker-compose -f tools/docker-compose.yml -f tools/docker-isolated-override.yml create
|
||||
docker start tools_awx_1
|
||||
docker start tools_isolated_1
|
||||
echo "__version__ = '`python setup.py --version`'" | docker exec -i tools_isolated_1 /bin/bash -c "cat > /venv/awx/lib/python2.7/site-packages/awx.py"
|
||||
if [ "`docker exec -i -t tools_isolated_1 cat /root/.ssh/authorized_keys`" == "`docker exec -t tools_awx_1 cat /root/.ssh/id_rsa.pub`" ]; then \
|
||||
echo "SSH keys already copied to isolated instance"; \
|
||||
else \
|
||||
@@ -606,11 +610,5 @@ psql-container:
|
||||
docker run -it --net tools_default --rm postgres:9.4.1 sh -c 'exec psql -h "postgres" -p "5432" -U postgres'
|
||||
|
||||
VERSION:
|
||||
echo $(VERSION_TARGET) > $@
|
||||
|
||||
production-openshift-image: sdist
|
||||
cat installer/openshift/Dockerfile | sed "s/{{ version }}/$(VERSION_TARGET)/g" | sed "s/{{ tar }}/$(SDIST_TAR_FILE)/g" > ./Dockerfile.production
|
||||
cp installer/openshift/Dockerfile.celery ./Dockerfile.celery.production
|
||||
docker build -t awx_web -f ./Dockerfile.production .
|
||||
docker build -t awx_task -f ./Dockerfile.celery.production .
|
||||
|
||||
@echo $(VERSION_TARGET) > $@
|
||||
@echo "awx: $(VERSION_TARGET)"
|
||||
|
||||
48
README.md
48
README.md
@@ -1,16 +1,46 @@
|
||||
[](https://requires.io/github/ansible/awx/requirements/?branch=devel)
|
||||
[](https://app.shippable.com/github/ansible/awx)
|
||||
|
||||
AWX
|
||||
=============
|
||||
===
|
||||
|
||||
AWX provides a web-based user interface, REST API and task engine built on top of
|
||||
Ansible.
|
||||
AWX provides a web-based user interface, REST API, and task engine built on top of [Ansible](https://github.com/ansible/ansible). It is the upstream project for [Tower](https://www.ansible.com/tower), a commercial derivative of AWX.
|
||||
|
||||
Resources
|
||||
---------
|
||||
To install AWX, please view the [Install guide](./INSTALL.md).
|
||||
|
||||
Refer to `CONTRIBUTING.md` to get started developing, testing and building AWX.
|
||||
To learn more about using AWX, and Tower, view the [Tower docs site](http://docs.ansible.com/ansible-tower/index.html).
|
||||
|
||||
Refer to `INSTALL.md` to get started deploying AWX.
|
||||
The AWX Project Frequently Asked Questions can be found [here](https://www.ansible.com/awx-project-faq).
|
||||
|
||||
Contributing
|
||||
------------
|
||||
|
||||
- Refer to the [Contributing guide](./CONTRIBUTING.md) to get started developing, testing, and building AWX.
|
||||
- All code submissions are done through pull requests against the `devel` branch.
|
||||
- All contributors must use git commit --signoff for any commit to be merged, and agree that usage of --signoff constitutes agreement with the terms of [DCO 1.1](./DCO_1_1.md)
|
||||
- Take care to make sure no merge commits are in the submission, and use `git rebase` vs `git merge` for this reason.
|
||||
- If submitting a large code change, it's a good idea to join the `#ansible-awx` channel on irc.freenode.net, and talk about what you would like to do or add first. This not only helps everyone know what's going on, it also helps save time and effort, if the community decides some changes are needed.
|
||||
|
||||
Reporting Issues
|
||||
----------------
|
||||
|
||||
If you're experiencing a problem, we encourage you to open an issue, and share your feedback. But before opening a new issue, we ask that you please take a look at our [Issues guide](./ISSUES.md).
|
||||
|
||||
Code of Conduct
|
||||
---------------
|
||||
|
||||
We ask all of our community members and contributors to adhere to the [Ansible code of conduct](http://docs.ansible.com/ansible/latest/community/code_of_conduct.html). If you have questions, or need assistance, please reach out to our community team at [codeofconduct@ansible.com](mailto:codeofconduct@ansible.com)
|
||||
|
||||
Get Involved
|
||||
------------
|
||||
|
||||
We welcome your feedback and ideas. Here's how to reach us:
|
||||
|
||||
- Join the `#ansible-awx` channel on irc.freenode.net
|
||||
- Join the [mailing list](https://groups.google.com/forum/#!forum/awx-project)
|
||||
- [Open an Issue](https://github.com/ansible/awx/issues)
|
||||
|
||||
License
|
||||
-------
|
||||
|
||||
[Apache v2](./LICENSE.md)
|
||||
|
||||
Refer to `LOCALIZATION.md` for translation and localization help.
|
||||
|
||||
@@ -1,15 +1,17 @@
|
||||
# Copyright (c) 2015 Ansible, Inc.
|
||||
# All Rights Reserved.
|
||||
from __future__ import absolute_import, unicode_literals
|
||||
|
||||
import os
|
||||
import sys
|
||||
import warnings
|
||||
|
||||
from pkg_resources import get_distribution
|
||||
from .celery import app as celery_app
|
||||
|
||||
__version__ = get_distribution('awx').version
|
||||
|
||||
__all__ = ['__version__']
|
||||
__all__ = ['__version__', 'celery_app']
|
||||
|
||||
# Check for the presence/absence of "devonly" module to determine if running
|
||||
# from a source code checkout or release packaage.
|
||||
|
||||
@@ -17,7 +17,7 @@ from rest_framework import exceptions
|
||||
from rest_framework import HTTP_HEADER_ENCODING
|
||||
|
||||
# AWX
|
||||
from awx.main.models import UnifiedJob, AuthToken
|
||||
from awx.main.models import AuthToken
|
||||
|
||||
logger = logging.getLogger('awx.api.authentication')
|
||||
|
||||
@@ -137,29 +137,3 @@ class LoggedBasicAuthentication(authentication.BasicAuthentication):
|
||||
if not settings.AUTH_BASIC_ENABLED:
|
||||
return
|
||||
return super(LoggedBasicAuthentication, self).authenticate_header(request)
|
||||
|
||||
|
||||
class TaskAuthentication(authentication.BaseAuthentication):
|
||||
'''
|
||||
Custom authentication used for views accessed by the inventory and callback
|
||||
scripts when running a task.
|
||||
'''
|
||||
|
||||
model = None
|
||||
|
||||
def authenticate(self, request):
|
||||
auth = authentication.get_authorization_header(request).split()
|
||||
if len(auth) != 2 or auth[0].lower() != 'token' or '-' not in auth[1]:
|
||||
return None
|
||||
pk, key = auth[1].split('-', 1)
|
||||
try:
|
||||
unified_job = UnifiedJob.objects.get(pk=pk, status='running')
|
||||
except UnifiedJob.DoesNotExist:
|
||||
return None
|
||||
token = unified_job.task_auth_token
|
||||
if auth[1] != token:
|
||||
raise exceptions.AuthenticationFailed(_('Invalid task token'))
|
||||
return (None, token)
|
||||
|
||||
def authenticate_header(self, request):
|
||||
return 'Token'
|
||||
|
||||
@@ -22,6 +22,7 @@ from rest_framework.filters import BaseFilterBackend
|
||||
|
||||
# AWX
|
||||
from awx.main.utils import get_type_for_model, to_python_boolean
|
||||
from awx.main.utils.db import get_all_field_names
|
||||
from awx.main.models.credential import CredentialType
|
||||
from awx.main.models.rbac import RoleAncestorEntry
|
||||
|
||||
@@ -70,7 +71,7 @@ class TypeFilterBackend(BaseFilterBackend):
|
||||
types_map[ct_type] = ct.pk
|
||||
model = queryset.model
|
||||
model_type = get_type_for_model(model)
|
||||
if 'polymorphic_ctype' in model._meta.get_all_field_names():
|
||||
if 'polymorphic_ctype' in get_all_field_names(model):
|
||||
types_pks = set([v for k,v in types_map.items() if k in types])
|
||||
queryset = queryset.filter(polymorphic_ctype_id__in=types_pks)
|
||||
elif model_type in types:
|
||||
@@ -119,7 +120,7 @@ class FieldLookupBackend(BaseFilterBackend):
|
||||
'last_updated': 'last_job_run',
|
||||
}.get(name, name)
|
||||
|
||||
if name == 'type' and 'polymorphic_ctype' in model._meta.get_all_field_names():
|
||||
if name == 'type' and 'polymorphic_ctype' in get_all_field_names(model):
|
||||
name = 'polymorphic_ctype'
|
||||
new_parts.append('polymorphic_ctype__model')
|
||||
else:
|
||||
@@ -136,7 +137,7 @@ class FieldLookupBackend(BaseFilterBackend):
|
||||
new_parts.pop()
|
||||
new_parts.append(name_alt)
|
||||
else:
|
||||
field = model._meta.get_field_by_name(name)[0]
|
||||
field = model._meta.get_field(name)
|
||||
if isinstance(field, ForeignObjectRel) and getattr(field.field, '__prevent_search__', False):
|
||||
raise PermissionDenied(_('Filtering on %s is not allowed.' % name))
|
||||
elif getattr(field, '__prevent_search__', False):
|
||||
@@ -268,8 +269,10 @@ class FieldLookupBackend(BaseFilterBackend):
|
||||
|
||||
# Make legacy v1 Job/Template fields work for backwards compatability
|
||||
# TODO: remove after API v1 deprecation period
|
||||
if queryset.model._meta.object_name in ('JobTemplate', 'Job') and key in ('cloud_credential', 'network_credential'):
|
||||
key = 'extra_credentials'
|
||||
if queryset.model._meta.object_name in ('JobTemplate', 'Job') and key in (
|
||||
'credential', 'vault_credential', 'cloud_credential', 'network_credential'
|
||||
):
|
||||
key = 'credentials'
|
||||
|
||||
# Make legacy v1 Credential fields work for backwards compatability
|
||||
# TODO: remove after API v1 deprecation period
|
||||
@@ -375,7 +378,7 @@ class OrderByBackend(BaseFilterBackend):
|
||||
# given the limited number of views with multiple types,
|
||||
# sorting on polymorphic_ctype.model is effectively the same.
|
||||
new_order_by = []
|
||||
if 'polymorphic_ctype' in queryset.model._meta.get_all_field_names():
|
||||
if 'polymorphic_ctype' in get_all_field_names(queryset.model):
|
||||
for field in order_by:
|
||||
if field == 'type':
|
||||
new_order_by.append('polymorphic_ctype__model')
|
||||
|
||||
@@ -31,6 +31,7 @@ from rest_framework import views
|
||||
from awx.api.filters import FieldLookupBackend
|
||||
from awx.main.models import * # noqa
|
||||
from awx.main.utils import * # noqa
|
||||
from awx.main.utils.db import get_all_field_names
|
||||
from awx.api.serializers import ResourceAccessListElementSerializer
|
||||
from awx.api.versioning import URLPathVersioning, get_request_version
|
||||
from awx.api.metadata import SublistAttachDetatchMetadata
|
||||
@@ -188,6 +189,7 @@ class APIView(views.APIView):
|
||||
'new_in_300': getattr(self, 'new_in_300', False),
|
||||
'new_in_310': getattr(self, 'new_in_310', False),
|
||||
'new_in_320': getattr(self, 'new_in_320', False),
|
||||
'new_in_330': getattr(self, 'new_in_330', False),
|
||||
'new_in_api_v2': getattr(self, 'new_in_api_v2', False),
|
||||
'deprecated': getattr(self, 'deprecated', False),
|
||||
}
|
||||
@@ -321,8 +323,7 @@ class ListAPIView(generics.ListAPIView, GenericAPIView):
|
||||
return page
|
||||
|
||||
def get_description_context(self):
|
||||
opts = self.model._meta
|
||||
if 'username' in opts.get_all_field_names():
|
||||
if 'username' in get_all_field_names(self.model):
|
||||
order_field = 'username'
|
||||
else:
|
||||
order_field = 'name'
|
||||
|
||||
@@ -89,7 +89,7 @@ class Metadata(metadata.SimpleMetadata):
|
||||
# Special handling of inventory source_region choices that vary based on
|
||||
# selected inventory source.
|
||||
if field.field_name == 'source_regions':
|
||||
for cp in ('azure', 'ec2', 'gce'):
|
||||
for cp in ('azure_rm', 'ec2', 'gce'):
|
||||
get_regions = getattr(InventorySource, 'get_%s_region_choices' % cp)
|
||||
field_info['%s_region_choices' % cp] = get_regions()
|
||||
|
||||
|
||||
@@ -34,7 +34,7 @@ class ModelAccessPermission(permissions.BasePermission):
|
||||
|
||||
def check_get_permissions(self, request, view, obj=None):
|
||||
if hasattr(view, 'parent_model'):
|
||||
parent_obj = get_object_or_400(view.parent_model, pk=view.kwargs['pk'])
|
||||
parent_obj = view.get_parent_object()
|
||||
if not check_user_access(request.user, view.parent_model, 'read',
|
||||
parent_obj):
|
||||
return False
|
||||
@@ -44,22 +44,26 @@ class ModelAccessPermission(permissions.BasePermission):
|
||||
|
||||
def check_post_permissions(self, request, view, obj=None):
|
||||
if hasattr(view, 'parent_model'):
|
||||
parent_obj = get_object_or_400(view.parent_model, pk=view.kwargs['pk'])
|
||||
parent_obj = view.get_parent_object()
|
||||
if not check_user_access(request.user, view.parent_model, 'read',
|
||||
parent_obj):
|
||||
return False
|
||||
if hasattr(view, 'parent_key'):
|
||||
if not check_user_access(request.user, view.model, 'add', {view.parent_key: parent_obj.pk}):
|
||||
if not check_user_access(request.user, view.model, 'add', {view.parent_key: parent_obj}):
|
||||
return False
|
||||
return True
|
||||
elif getattr(view, 'is_job_start', False):
|
||||
elif hasattr(view, 'obj_permission_type'):
|
||||
# Generic object-centric view permission check without object not needed
|
||||
if not obj:
|
||||
return True
|
||||
return check_user_access(request.user, view.model, 'start', obj)
|
||||
elif getattr(view, 'is_job_cancel', False):
|
||||
if not obj:
|
||||
return True
|
||||
return check_user_access(request.user, view.model, 'cancel', obj)
|
||||
# Permission check that happens when get_object() is called
|
||||
extra_kwargs = {}
|
||||
if view.obj_permission_type == 'admin':
|
||||
extra_kwargs['data'] = {}
|
||||
return check_user_access(
|
||||
request.user, view.model, view.obj_permission_type, obj,
|
||||
**extra_kwargs
|
||||
)
|
||||
else:
|
||||
if obj:
|
||||
return True
|
||||
|
||||
@@ -48,7 +48,8 @@ class BrowsableAPIRenderer(renderers.BrowsableAPIRenderer):
|
||||
obj = getattr(view, 'object', None)
|
||||
if obj is None and hasattr(view, 'get_object') and hasattr(view, 'retrieve'):
|
||||
try:
|
||||
obj = view.get_object()
|
||||
view.object = view.get_object()
|
||||
obj = view.object
|
||||
except Exception:
|
||||
obj = None
|
||||
with override_method(view, request, method) as request:
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -10,4 +10,5 @@
|
||||
{% if new_in_300 %}> _Added in Ansible Tower 3.0.0_{% endif %}
|
||||
{% if new_in_310 %}> _New in Ansible Tower 3.1.0_{% endif %}
|
||||
{% if new_in_320 %}> _New in Ansible Tower 3.2.0_{% endif %}
|
||||
{% if new_in_330 %}> _New in Ansible Tower 3.3.0_{% endif %}
|
||||
{% endif %}
|
||||
|
||||
12
awx/api/templates/api/job_create_schedule.md
Normal file
12
awx/api/templates/api/job_create_schedule.md
Normal file
@@ -0,0 +1,12 @@
|
||||
Create a schedule based on a job:
|
||||
|
||||
Make a POST request to this endpoint to create a schedule that launches
|
||||
the job template that launched this job, and uses the same
|
||||
parameters that the job was launched with. These parameters include all
|
||||
"prompted" resources such as `extra_vars`, `inventory`, `limit`, etc.
|
||||
|
||||
Jobs that were launched with user-provided passwords cannot have a schedule
|
||||
created from them.
|
||||
|
||||
Make a GET request for information about what those prompts are and
|
||||
whether or not a schedule can be created.
|
||||
@@ -26,9 +26,6 @@ The response will include the following fields:
|
||||
job_template (array, read-only)
|
||||
* `survey_enabled`: Flag indicating whether the job_template has an enabled
|
||||
survey (boolean, read-only)
|
||||
* `credential_needed_to_start`: Flag indicating the presence of a credential
|
||||
associated with the job template. If not then one should be supplied when
|
||||
launching the job (boolean, read-only)
|
||||
* `inventory_needed_to_start`: Flag indicating the presence of an inventory
|
||||
associated with the job template. If not then one should be supplied when
|
||||
launching the job (boolean, read-only)
|
||||
@@ -36,9 +33,8 @@ The response will include the following fields:
|
||||
Make a POST request to this resource to launch the job_template. If any
|
||||
passwords, inventory, or extra variables (extra_vars) are required, they must
|
||||
be passed via POST data, with extra_vars given as a YAML or JSON string and
|
||||
escaped parentheses. If `credential_needed_to_start` is `True` then the
|
||||
`credential` field is required and if the `inventory_needed_to_start` is
|
||||
`True` then the `inventory` is required as well.
|
||||
escaped parentheses. If the `inventory_needed_to_start` is `True` then the
|
||||
`inventory` is required.
|
||||
|
||||
If successful, the response status code will be 201. If any required passwords
|
||||
are not provided, a 400 status code will be returned. If the job cannot be
|
||||
|
||||
@@ -1,19 +0,0 @@
|
||||
ANSIBLE TOWER BY RED HAT END USER LICENSE AGREEMENT
|
||||
|
||||
This end user license agreement (“EULA”) governs the use of the Ansible Tower software and any related updates, upgrades, versions, appearance, structure and organization (the “Ansible Tower Software”), regardless of the delivery mechanism.
|
||||
|
||||
1. License Grant. Subject to the terms of this EULA, Red Hat, Inc. and its affiliates (“Red Hat”) grant to you (“You”) a non-transferable, non-exclusive, worldwide, non-sublicensable, limited, revocable license to use the Ansible Tower Software for the term of the associated Red Hat Software Subscription(s) and in a quantity equal to the number of Red Hat Software Subscriptions purchased from Red Hat for the Ansible Tower Software (“License”), each as set forth on the applicable Red Hat ordering document. You acquire only the right to use the Ansible Tower Software and do not acquire any rights of ownership. Red Hat reserves all rights to the Ansible Tower Software not expressly granted to You. This License grant pertains solely to Your use of the Ansible Tower Software and is not intended to limit Your rights under, or grant You rights that supersede, the license terms of any software packages which may be made available with the Ansible Tower Software that are subject to an open source software license.
|
||||
|
||||
2. Intellectual Property Rights. Title to the Ansible Tower Software and each component, copy and modification, including all derivative works whether made by Red Hat, You or on Red Hat's behalf, including those made at Your suggestion and all associated intellectual property rights, are and shall remain the sole and exclusive property of Red Hat and/or it licensors. The License does not authorize You (nor may You allow any third party, specifically non-employees of Yours) to: (a) copy, distribute, reproduce, use or allow third party access to the Ansible Tower Software except as expressly authorized hereunder; (b) decompile, disassemble, reverse engineer, translate, modify, convert or apply any procedure or process to the Ansible Tower Software in order to ascertain, derive, and/or appropriate for any reason or purpose, including the Ansible Tower Software source code or source listings or any trade secret information or process contained in the Ansible Tower Software (except as permitted under applicable law); (c) execute or incorporate other software (except for approved software as appears in the Ansible Tower Software documentation or specifically approved by Red Hat in writing) into Ansible Tower Software, or create a derivative work of any part of the Ansible Tower Software; (d) remove any trademarks, trade names or titles, copyrights legends or any other proprietary marking on the Ansible Tower Software; (e) disclose the results of any benchmarking of the Ansible Tower Software (whether or not obtained with Red Hat’s assistance) to any third party; (f) attempt to circumvent any user limits or other license, timing or use restrictions that are built into, defined or agreed upon, regarding the Ansible Tower Software. You are hereby notified that the Ansible Tower Software may contain time-out devices, counter devices, and/or other devices intended to ensure the limits of the License will not be exceeded (“Limiting Devices”). If the Ansible Tower Software contains Limiting Devices, Red Hat will provide You materials necessary to use the Ansible Tower Software to the extent permitted. You may not tamper with or otherwise take any action to defeat or circumvent a Limiting Device or other control measure, including but not limited to, resetting the unit amount or using false host identification number for the purpose of extending any term of the License.
|
||||
|
||||
3. Evaluation Licenses. Unless You have purchased Ansible Tower Software Subscriptions from Red Hat or an authorized reseller under the terms of a commercial agreement with Red Hat, all use of the Ansible Tower Software shall be limited to testing purposes and not for production use (“Evaluation”). Unless otherwise agreed by Red Hat, Evaluation of the Ansible Tower Software shall be limited to an evaluation environment and the Ansible Tower Software shall not be used to manage any systems or virtual machines on networks being used in the operation of Your business or any other non-evaluation purpose. Unless otherwise agreed by Red Hat, You shall limit all Evaluation use to a single 30 day evaluation period and shall not download or otherwise obtain additional copies of the Ansible Tower Software or license keys for Evaluation.
|
||||
|
||||
4. Limited Warranty. Except as specifically stated in this Section 4, to the maximum extent permitted under applicable law, the Ansible Tower Software and the components are provided and licensed “as is” without warranty of any kind, expressed or implied, including the implied warranties of merchantability, non-infringement or fitness for a particular purpose. Red Hat warrants solely to You that the media on which the Ansible Tower Software may be furnished will be free from defects in materials and manufacture under normal use for a period of thirty (30) days from the date of delivery to You. Red Hat does not warrant that the functions contained in the Ansible Tower Software will meet Your requirements or that the operation of the Ansible Tower Software will be entirely error free, appear precisely as described in the accompanying documentation, or comply with regulatory requirements.
|
||||
|
||||
5. Limitation of Remedies and Liability. To the maximum extent permitted by applicable law, Your exclusive remedy under this EULA is to return any defective media within thirty (30) days of delivery along with a copy of Your payment receipt and Red Hat, at its option, will replace it or refund the money paid by You for the media. To the maximum extent permitted under applicable law, neither Red Hat nor any Red Hat authorized distributor will be liable to You for any incidental or consequential damages, including lost profits or lost savings arising out of the use or inability to use the Ansible Tower Software or any component, even if Red Hat or the authorized distributor has been advised of the possibility of such damages. In no event shall Red Hat's liability or an authorized distributor’s liability exceed the amount that You paid to Red Hat for the Ansible Tower Software during the twelve months preceding the first event giving rise to liability.
|
||||
|
||||
6. Export Control. In accordance with the laws of the United States and other countries, You represent and warrant that You: (a) understand that the Ansible Tower Software and its components may be subject to export controls under the U.S. Commerce Department’s Export Administration Regulations (“EAR”); (b) are not located in any country listed in Country Group E:1 in Supplement No. 1 to part 740 of the EAR; (c) will not export, re-export, or transfer the Ansible Tower Software to any prohibited destination or to any end user who has been prohibited from participating in US export transactions by any federal agency of the US government; (d) will not use or transfer the Ansible Tower Software for use in connection with the design, development or production of nuclear, chemical or biological weapons, or rocket systems, space launch vehicles, or sounding rockets or unmanned air vehicle systems; (e) understand and agree that if you are in the United States and you export or transfer the Ansible Tower Software to eligible end users, you will, to the extent required by EAR Section 740.17 obtain a license for such export or transfer and will submit semi-annual reports to the Commerce Department’s Bureau of Industry and Security, which include the name and address (including country) of each transferee; and (f) understand that countries including the United States may restrict the import, use, or export of encryption products (which may include the Ansible Tower Software) and agree that you shall be solely responsible for compliance with any such import, use, or export restrictions.
|
||||
|
||||
7. General. If any provision of this EULA is held to be unenforceable, that shall not affect the enforceability of the remaining provisions. This agreement shall be governed by the laws of the State of New York and of the United States, without regard to any conflict of laws provisions. The rights and obligations of the parties to this EULA shall not be governed by the United Nations Convention on the International Sale of Goods.
|
||||
|
||||
Copyright © 2015 Red Hat, Inc. All rights reserved. "Red Hat" and “Ansible Tower” are registered trademarks of Red Hat, Inc. All other trademarks are the property of their respective owners.
|
||||
|
||||
420
awx/api/urls.py
420
awx/api/urls.py
@@ -1,420 +0,0 @@
|
||||
# Copyright (c) 2015 Ansible, Inc.
|
||||
# All Rights Reserved.
|
||||
|
||||
# noqa
|
||||
|
||||
from django.conf.urls import include, patterns, url as original_url
|
||||
|
||||
def url(regex, view, kwargs=None, name=None, prefix=''):
|
||||
# Set default name from view name (if a string).
|
||||
if isinstance(view, basestring) and name is None:
|
||||
name = view
|
||||
return original_url(regex, view, kwargs, name, prefix)
|
||||
|
||||
organization_urls = patterns('awx.api.views',
|
||||
url(r'^$', 'organization_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/$', 'organization_detail'),
|
||||
url(r'^(?P<pk>[0-9]+)/users/$', 'organization_users_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/admins/$', 'organization_admins_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/inventories/$', 'organization_inventories_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/projects/$', 'organization_projects_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/workflow_job_templates/$', 'organization_workflow_job_templates_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/teams/$', 'organization_teams_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/credentials/$', 'organization_credential_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/activity_stream/$', 'organization_activity_stream_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/notification_templates/$', 'organization_notification_templates_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/notification_templates_any/$', 'organization_notification_templates_any_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/notification_templates_error/$', 'organization_notification_templates_error_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/notification_templates_success/$', 'organization_notification_templates_success_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/instance_groups/$', 'organization_instance_groups_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/object_roles/$', 'organization_object_roles_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/access_list/$', 'organization_access_list'),
|
||||
)
|
||||
|
||||
user_urls = patterns('awx.api.views',
|
||||
url(r'^$', 'user_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/$', 'user_detail'),
|
||||
url(r'^(?P<pk>[0-9]+)/teams/$', 'user_teams_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/organizations/$', 'user_organizations_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/admin_of_organizations/$', 'user_admin_of_organizations_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/projects/$', 'user_projects_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/credentials/$', 'user_credentials_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/roles/$', 'user_roles_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/activity_stream/$', 'user_activity_stream_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/access_list/$', 'user_access_list'),
|
||||
|
||||
)
|
||||
|
||||
project_urls = patterns('awx.api.views',
|
||||
url(r'^$', 'project_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/$', 'project_detail'),
|
||||
url(r'^(?P<pk>[0-9]+)/playbooks/$', 'project_playbooks'),
|
||||
url(r'^(?P<pk>[0-9]+)/inventories/$', 'project_inventories'),
|
||||
url(r'^(?P<pk>[0-9]+)/scm_inventory_sources/$', 'project_scm_inventory_sources'),
|
||||
url(r'^(?P<pk>[0-9]+)/teams/$', 'project_teams_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/update/$', 'project_update_view'),
|
||||
url(r'^(?P<pk>[0-9]+)/project_updates/$', 'project_updates_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/activity_stream/$', 'project_activity_stream_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/schedules/$', 'project_schedules_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/notification_templates_any/$', 'project_notification_templates_any_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/notification_templates_error/$', 'project_notification_templates_error_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/notification_templates_success/$', 'project_notification_templates_success_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/object_roles/$', 'project_object_roles_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/access_list/$', 'project_access_list'),
|
||||
)
|
||||
|
||||
project_update_urls = patterns('awx.api.views',
|
||||
url(r'^$', 'project_update_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/$', 'project_update_detail'),
|
||||
url(r'^(?P<pk>[0-9]+)/cancel/$', 'project_update_cancel'),
|
||||
url(r'^(?P<pk>[0-9]+)/stdout/$', 'project_update_stdout'),
|
||||
url(r'^(?P<pk>[0-9]+)/scm_inventory_updates/$', 'project_update_scm_inventory_updates'),
|
||||
url(r'^(?P<pk>[0-9]+)/notifications/$', 'project_update_notifications_list'),
|
||||
)
|
||||
|
||||
team_urls = patterns('awx.api.views',
|
||||
url(r'^$', 'team_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/$', 'team_detail'),
|
||||
url(r'^(?P<pk>[0-9]+)/projects/$', 'team_projects_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/users/$', 'team_users_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/credentials/$', 'team_credentials_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/roles/$', 'team_roles_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/object_roles/$', 'team_object_roles_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/activity_stream/$', 'team_activity_stream_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/access_list/$', 'team_access_list'),
|
||||
)
|
||||
|
||||
inventory_urls = patterns('awx.api.views',
|
||||
url(r'^$', 'inventory_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/$', 'inventory_detail'),
|
||||
url(r'^(?P<pk>[0-9]+)/hosts/$', 'inventory_hosts_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/groups/$', 'inventory_groups_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/root_groups/$', 'inventory_root_groups_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/variable_data/$', 'inventory_variable_data'),
|
||||
url(r'^(?P<pk>[0-9]+)/script/$', 'inventory_script_view'),
|
||||
url(r'^(?P<pk>[0-9]+)/tree/$', 'inventory_tree_view'),
|
||||
url(r'^(?P<pk>[0-9]+)/inventory_sources/$', 'inventory_inventory_sources_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/update_inventory_sources/$', 'inventory_inventory_sources_update'),
|
||||
url(r'^(?P<pk>[0-9]+)/activity_stream/$', 'inventory_activity_stream_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/job_templates/$', 'inventory_job_template_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/ad_hoc_commands/$', 'inventory_ad_hoc_commands_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/access_list/$', 'inventory_access_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/object_roles/$', 'inventory_object_roles_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/instance_groups/$', 'inventory_instance_groups_list'),
|
||||
#url(r'^(?P<pk>[0-9]+)/single_fact/$', 'inventory_single_fact_view'),
|
||||
)
|
||||
|
||||
host_urls = patterns('awx.api.views',
|
||||
url(r'^$', 'host_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/$', 'host_detail'),
|
||||
url(r'^(?P<pk>[0-9]+)/variable_data/$', 'host_variable_data'),
|
||||
url(r'^(?P<pk>[0-9]+)/groups/$', 'host_groups_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/all_groups/$', 'host_all_groups_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/job_events/', 'host_job_events_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/job_host_summaries/$', 'host_job_host_summaries_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/activity_stream/$', 'host_activity_stream_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/inventory_sources/$', 'host_inventory_sources_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/smart_inventories/$', 'host_smart_inventories_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/ad_hoc_commands/$', 'host_ad_hoc_commands_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/ad_hoc_command_events/$', 'host_ad_hoc_command_events_list'),
|
||||
#url(r'^(?P<pk>[0-9]+)/single_fact/$', 'host_single_fact_view'),
|
||||
url(r'^(?P<pk>[0-9]+)/fact_versions/$', 'host_fact_versions_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/fact_view/$', 'host_fact_compare_view'),
|
||||
url(r'^(?P<pk>[0-9]+)/insights/$', 'host_insights'),
|
||||
)
|
||||
|
||||
group_urls = patterns('awx.api.views',
|
||||
url(r'^$', 'group_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/$', 'group_detail'),
|
||||
url(r'^(?P<pk>[0-9]+)/children/$', 'group_children_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/hosts/$', 'group_hosts_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/all_hosts/$', 'group_all_hosts_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/variable_data/$', 'group_variable_data'),
|
||||
url(r'^(?P<pk>[0-9]+)/job_events/$', 'group_job_events_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/job_host_summaries/$', 'group_job_host_summaries_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/potential_children/$', 'group_potential_children_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/activity_stream/$', 'group_activity_stream_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/inventory_sources/$', 'group_inventory_sources_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/ad_hoc_commands/$', 'group_ad_hoc_commands_list'),
|
||||
#url(r'^(?P<pk>[0-9]+)/single_fact/$', 'group_single_fact_view'),
|
||||
)
|
||||
|
||||
inventory_source_urls = patterns('awx.api.views',
|
||||
url(r'^$', 'inventory_source_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/$', 'inventory_source_detail'),
|
||||
url(r'^(?P<pk>[0-9]+)/update/$', 'inventory_source_update_view'),
|
||||
url(r'^(?P<pk>[0-9]+)/inventory_updates/$', 'inventory_source_updates_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/activity_stream/$', 'inventory_source_activity_stream_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/schedules/$', 'inventory_source_schedules_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/groups/$', 'inventory_source_groups_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/hosts/$', 'inventory_source_hosts_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/notification_templates_any/$', 'inventory_source_notification_templates_any_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/notification_templates_error/$', 'inventory_source_notification_templates_error_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/notification_templates_success/$', 'inventory_source_notification_templates_success_list'),
|
||||
)
|
||||
|
||||
inventory_update_urls = patterns('awx.api.views',
|
||||
url(r'^$', 'inventory_update_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/$', 'inventory_update_detail'),
|
||||
url(r'^(?P<pk>[0-9]+)/cancel/$', 'inventory_update_cancel'),
|
||||
url(r'^(?P<pk>[0-9]+)/stdout/$', 'inventory_update_stdout'),
|
||||
url(r'^(?P<pk>[0-9]+)/notifications/$', 'inventory_update_notifications_list'),
|
||||
)
|
||||
|
||||
inventory_script_urls = patterns('awx.api.views',
|
||||
url(r'^$', 'inventory_script_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/$', 'inventory_script_detail'),
|
||||
url(r'^(?P<pk>[0-9]+)/object_roles/$', 'inventory_script_object_roles_list'),
|
||||
)
|
||||
|
||||
credential_type_urls = patterns('awx.api.views',
|
||||
url(r'^$', 'credential_type_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/$', 'credential_type_detail'),
|
||||
url(r'^(?P<pk>[0-9]+)/credentials/$', 'credential_type_credential_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/activity_stream/$', 'credential_type_activity_stream_list'),
|
||||
)
|
||||
|
||||
credential_urls = patterns('awx.api.views',
|
||||
url(r'^$', 'credential_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/activity_stream/$', 'credential_activity_stream_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/$', 'credential_detail'),
|
||||
url(r'^(?P<pk>[0-9]+)/access_list/$', 'credential_access_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/object_roles/$', 'credential_object_roles_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/owner_users/$', 'credential_owner_users_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/owner_teams/$', 'credential_owner_teams_list'),
|
||||
# See also credentials resources on users/teams.
|
||||
)
|
||||
|
||||
role_urls = patterns('awx.api.views',
|
||||
url(r'^$', 'role_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/$', 'role_detail'),
|
||||
url(r'^(?P<pk>[0-9]+)/users/$', 'role_users_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/teams/$', 'role_teams_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/parents/$', 'role_parents_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/children/$', 'role_children_list'),
|
||||
)
|
||||
|
||||
job_template_urls = patterns('awx.api.views',
|
||||
url(r'^$', 'job_template_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/$', 'job_template_detail'),
|
||||
url(r'^(?P<pk>[0-9]+)/launch/$', 'job_template_launch'),
|
||||
url(r'^(?P<pk>[0-9]+)/jobs/$', 'job_template_jobs_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/callback/$', 'job_template_callback'),
|
||||
url(r'^(?P<pk>[0-9]+)/schedules/$', 'job_template_schedules_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/survey_spec/$', 'job_template_survey_spec'),
|
||||
url(r'^(?P<pk>[0-9]+)/activity_stream/$', 'job_template_activity_stream_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/notification_templates_any/$', 'job_template_notification_templates_any_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/notification_templates_error/$', 'job_template_notification_templates_error_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/notification_templates_success/$', 'job_template_notification_templates_success_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/instance_groups/$', 'job_template_instance_groups_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/access_list/$', 'job_template_access_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/object_roles/$', 'job_template_object_roles_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/labels/$', 'job_template_label_list'),
|
||||
)
|
||||
|
||||
job_urls = patterns('awx.api.views',
|
||||
url(r'^$', 'job_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/$', 'job_detail'),
|
||||
url(r'^(?P<pk>[0-9]+)/start/$', 'job_start'),
|
||||
url(r'^(?P<pk>[0-9]+)/cancel/$', 'job_cancel'),
|
||||
url(r'^(?P<pk>[0-9]+)/relaunch/$', 'job_relaunch'),
|
||||
url(r'^(?P<pk>[0-9]+)/job_host_summaries/$', 'job_job_host_summaries_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/job_events/$', 'job_job_events_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/activity_stream/$', 'job_activity_stream_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/stdout/$', 'job_stdout'),
|
||||
url(r'^(?P<pk>[0-9]+)/notifications/$', 'job_notifications_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/labels/$', 'job_label_list'),
|
||||
)
|
||||
|
||||
job_host_summary_urls = patterns('awx.api.views',
|
||||
url(r'^(?P<pk>[0-9]+)/$', 'job_host_summary_detail'),
|
||||
)
|
||||
|
||||
job_event_urls = patterns('awx.api.views',
|
||||
url(r'^$', 'job_event_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/$', 'job_event_detail'),
|
||||
url(r'^(?P<pk>[0-9]+)/children/$', 'job_event_children_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/hosts/$', 'job_event_hosts_list'),
|
||||
)
|
||||
|
||||
ad_hoc_command_urls = patterns('awx.api.views',
|
||||
url(r'^$', 'ad_hoc_command_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/$', 'ad_hoc_command_detail'),
|
||||
url(r'^(?P<pk>[0-9]+)/cancel/$', 'ad_hoc_command_cancel'),
|
||||
url(r'^(?P<pk>[0-9]+)/relaunch/$', 'ad_hoc_command_relaunch'),
|
||||
url(r'^(?P<pk>[0-9]+)/events/$', 'ad_hoc_command_ad_hoc_command_events_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/activity_stream/$', 'ad_hoc_command_activity_stream_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/notifications/$', 'ad_hoc_command_notifications_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/stdout/$', 'ad_hoc_command_stdout'),
|
||||
)
|
||||
|
||||
ad_hoc_command_event_urls = patterns('awx.api.views',
|
||||
url(r'^$', 'ad_hoc_command_event_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/$', 'ad_hoc_command_event_detail'),
|
||||
)
|
||||
|
||||
system_job_template_urls = patterns('awx.api.views',
|
||||
url(r'^$', 'system_job_template_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/$', 'system_job_template_detail'),
|
||||
url(r'^(?P<pk>[0-9]+)/launch/$', 'system_job_template_launch'),
|
||||
url(r'^(?P<pk>[0-9]+)/jobs/$', 'system_job_template_jobs_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/schedules/$', 'system_job_template_schedules_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/notification_templates_any/$', 'system_job_template_notification_templates_any_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/notification_templates_error/$', 'system_job_template_notification_templates_error_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/notification_templates_success/$', 'system_job_template_notification_templates_success_list'),
|
||||
)
|
||||
|
||||
system_job_urls = patterns('awx.api.views',
|
||||
url(r'^$', 'system_job_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/$', 'system_job_detail'),
|
||||
url(r'^(?P<pk>[0-9]+)/cancel/$', 'system_job_cancel'),
|
||||
url(r'^(?P<pk>[0-9]+)/notifications/$', 'system_job_notifications_list'),
|
||||
)
|
||||
|
||||
workflow_job_template_urls = patterns('awx.api.views',
|
||||
url(r'^$', 'workflow_job_template_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/$', 'workflow_job_template_detail'),
|
||||
url(r'^(?P<pk>[0-9]+)/workflow_jobs/$', 'workflow_job_template_jobs_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/launch/$', 'workflow_job_template_launch'),
|
||||
url(r'^(?P<pk>[0-9]+)/copy/$', 'workflow_job_template_copy'),
|
||||
url(r'^(?P<pk>[0-9]+)/schedules/$', 'workflow_job_template_schedules_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/survey_spec/$', 'workflow_job_template_survey_spec'),
|
||||
url(r'^(?P<pk>[0-9]+)/workflow_nodes/$', 'workflow_job_template_workflow_nodes_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/activity_stream/$', 'workflow_job_template_activity_stream_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/notification_templates_any/$', 'workflow_job_template_notification_templates_any_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/notification_templates_error/$', 'workflow_job_template_notification_templates_error_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/notification_templates_success/$', 'workflow_job_template_notification_templates_success_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/access_list/$', 'workflow_job_template_access_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/object_roles/$', 'workflow_job_template_object_roles_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/labels/$', 'workflow_job_template_label_list'),
|
||||
)
|
||||
|
||||
workflow_job_urls = patterns('awx.api.views',
|
||||
url(r'^$', 'workflow_job_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/$', 'workflow_job_detail'),
|
||||
url(r'^(?P<pk>[0-9]+)/workflow_nodes/$', 'workflow_job_workflow_nodes_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/labels/$', 'workflow_job_label_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/cancel/$', 'workflow_job_cancel'),
|
||||
url(r'^(?P<pk>[0-9]+)/relaunch/$', 'workflow_job_relaunch'),
|
||||
url(r'^(?P<pk>[0-9]+)/notifications/$', 'workflow_job_notifications_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/activity_stream/$', 'workflow_job_activity_stream_list'),
|
||||
)
|
||||
|
||||
|
||||
notification_template_urls = patterns('awx.api.views',
|
||||
url(r'^$', 'notification_template_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/$', 'notification_template_detail'),
|
||||
url(r'^(?P<pk>[0-9]+)/test/$', 'notification_template_test'),
|
||||
url(r'^(?P<pk>[0-9]+)/notifications/$', 'notification_template_notification_list'),
|
||||
)
|
||||
|
||||
notification_urls = patterns('awx.api.views',
|
||||
url(r'^$', 'notification_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/$', 'notification_detail'),
|
||||
)
|
||||
|
||||
label_urls = patterns('awx.api.views',
|
||||
url(r'^$', 'label_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/$', 'label_detail'),
|
||||
)
|
||||
|
||||
workflow_job_template_node_urls = patterns('awx.api.views',
|
||||
url(r'^$', 'workflow_job_template_node_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/$', 'workflow_job_template_node_detail'),
|
||||
url(r'^(?P<pk>[0-9]+)/success_nodes/$', 'workflow_job_template_node_success_nodes_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/failure_nodes/$', 'workflow_job_template_node_failure_nodes_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/always_nodes/$', 'workflow_job_template_node_always_nodes_list'),
|
||||
)
|
||||
|
||||
workflow_job_node_urls = patterns('awx.api.views',
|
||||
url(r'^$', 'workflow_job_node_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/$', 'workflow_job_node_detail'),
|
||||
url(r'^(?P<pk>[0-9]+)/success_nodes/$', 'workflow_job_node_success_nodes_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/failure_nodes/$', 'workflow_job_node_failure_nodes_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/always_nodes/$', 'workflow_job_node_always_nodes_list'),
|
||||
)
|
||||
|
||||
schedule_urls = patterns('awx.api.views',
|
||||
url(r'^$', 'schedule_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/$', 'schedule_detail'),
|
||||
url(r'^(?P<pk>[0-9]+)/jobs/$', 'schedule_unified_jobs_list'),
|
||||
)
|
||||
|
||||
activity_stream_urls = patterns('awx.api.views',
|
||||
url(r'^$', 'activity_stream_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/$', 'activity_stream_detail'),
|
||||
)
|
||||
|
||||
instance_urls = patterns('awx.api.views',
|
||||
url(r'^$', 'instance_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/$', 'instance_detail'),
|
||||
url(r'^(?P<pk>[0-9]+)/jobs/$', 'instance_unified_jobs_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/instance_groups/$', 'instance_instance_groups_list'),
|
||||
)
|
||||
|
||||
instance_group_urls = patterns('awx.api.views',
|
||||
url(r'^$', 'instance_group_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/$', 'instance_group_detail'),
|
||||
url(r'^(?P<pk>[0-9]+)/jobs/$', 'instance_group_unified_jobs_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/instances/$', 'instance_group_instance_list'),
|
||||
)
|
||||
|
||||
v1_urls = patterns('awx.api.views',
|
||||
url(r'^$', 'api_v1_root_view'),
|
||||
url(r'^ping/$', 'api_v1_ping_view'),
|
||||
url(r'^config/$', 'api_v1_config_view'),
|
||||
url(r'^auth/$', 'auth_view'),
|
||||
url(r'^authtoken/$', 'auth_token_view'),
|
||||
url(r'^me/$', 'user_me_list'),
|
||||
url(r'^dashboard/$', 'dashboard_view'),
|
||||
url(r'^dashboard/graphs/jobs/$','dashboard_jobs_graph_view'),
|
||||
url(r'^settings/', include('awx.conf.urls')),
|
||||
url(r'^instances/', include(instance_urls)),
|
||||
url(r'^instance_groups/', include(instance_group_urls)),
|
||||
url(r'^schedules/', include(schedule_urls)),
|
||||
url(r'^organizations/', include(organization_urls)),
|
||||
url(r'^users/', include(user_urls)),
|
||||
url(r'^projects/', include(project_urls)),
|
||||
url(r'^project_updates/', include(project_update_urls)),
|
||||
url(r'^teams/', include(team_urls)),
|
||||
url(r'^inventories/', include(inventory_urls)),
|
||||
url(r'^hosts/', include(host_urls)),
|
||||
url(r'^groups/', include(group_urls)),
|
||||
url(r'^inventory_sources/', include(inventory_source_urls)),
|
||||
url(r'^inventory_updates/', include(inventory_update_urls)),
|
||||
url(r'^inventory_scripts/', include(inventory_script_urls)),
|
||||
url(r'^credentials/', include(credential_urls)),
|
||||
url(r'^roles/', include(role_urls)),
|
||||
url(r'^job_templates/', include(job_template_urls)),
|
||||
url(r'^jobs/', include(job_urls)),
|
||||
url(r'^job_host_summaries/', include(job_host_summary_urls)),
|
||||
url(r'^job_events/', include(job_event_urls)),
|
||||
url(r'^ad_hoc_commands/', include(ad_hoc_command_urls)),
|
||||
url(r'^ad_hoc_command_events/', include(ad_hoc_command_event_urls)),
|
||||
url(r'^system_job_templates/', include(system_job_template_urls)),
|
||||
url(r'^system_jobs/', include(system_job_urls)),
|
||||
url(r'^notification_templates/', include(notification_template_urls)),
|
||||
url(r'^notifications/', include(notification_urls)),
|
||||
url(r'^workflow_job_templates/',include(workflow_job_template_urls)),
|
||||
url(r'^workflow_jobs/' ,include(workflow_job_urls)),
|
||||
url(r'^labels/', include(label_urls)),
|
||||
url(r'^workflow_job_template_nodes/', include(workflow_job_template_node_urls)),
|
||||
url(r'^workflow_job_nodes/', include(workflow_job_node_urls)),
|
||||
url(r'^unified_job_templates/$','unified_job_template_list'),
|
||||
url(r'^unified_jobs/$', 'unified_job_list'),
|
||||
url(r'^activity_stream/', include(activity_stream_urls)),
|
||||
)
|
||||
|
||||
v2_urls = patterns('awx.api.views',
|
||||
url(r'^$', 'api_v2_root_view'),
|
||||
url(r'^credential_types/', include(credential_type_urls)),
|
||||
url(r'^hosts/(?P<pk>[0-9]+)/ansible_facts/$', 'host_ansible_facts_detail'),
|
||||
url(r'^jobs/(?P<pk>[0-9]+)/extra_credentials/$', 'job_extra_credentials_list'),
|
||||
url(r'^job_templates/(?P<pk>[0-9]+)/extra_credentials/$', 'job_template_extra_credentials_list'),
|
||||
)
|
||||
|
||||
urlpatterns = patterns('awx.api.views',
|
||||
url(r'^$', 'api_root_view'),
|
||||
url(r'^(?P<version>(v2))/', include(v2_urls)),
|
||||
url(r'^(?P<version>(v1|v2))/', include(v1_urls))
|
||||
)
|
||||
7
awx/api/urls/__init__.py
Normal file
7
awx/api/urls/__init__.py
Normal file
@@ -0,0 +1,7 @@
|
||||
# Copyright (c) 2017 Ansible, Inc.
|
||||
# All Rights Reserved.
|
||||
|
||||
from __future__ import absolute_import, unicode_literals
|
||||
from .urls import urlpatterns
|
||||
|
||||
__all__ = ['urlpatterns']
|
||||
17
awx/api/urls/activity_stream.py
Normal file
17
awx/api/urls/activity_stream.py
Normal file
@@ -0,0 +1,17 @@
|
||||
# Copyright (c) 2017 Ansible, Inc.
|
||||
# All Rights Reserved.
|
||||
|
||||
from django.conf.urls import url
|
||||
|
||||
from awx.api.views import (
|
||||
ActivityStreamList,
|
||||
ActivityStreamDetail,
|
||||
)
|
||||
|
||||
|
||||
urls = [
|
||||
url(r'^$', ActivityStreamList.as_view(), name='activity_stream_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/$', ActivityStreamDetail.as_view(), name='activity_stream_detail'),
|
||||
]
|
||||
|
||||
__all__ = ['urls']
|
||||
29
awx/api/urls/ad_hoc_command.py
Normal file
29
awx/api/urls/ad_hoc_command.py
Normal file
@@ -0,0 +1,29 @@
|
||||
# Copyright (c) 2017 Ansible, Inc.
|
||||
# All Rights Reserved.
|
||||
|
||||
from django.conf.urls import url
|
||||
|
||||
from awx.api.views import (
|
||||
AdHocCommandList,
|
||||
AdHocCommandDetail,
|
||||
AdHocCommandCancel,
|
||||
AdHocCommandRelaunch,
|
||||
AdHocCommandAdHocCommandEventsList,
|
||||
AdHocCommandActivityStreamList,
|
||||
AdHocCommandNotificationsList,
|
||||
AdHocCommandStdout,
|
||||
)
|
||||
|
||||
|
||||
urls = [
|
||||
url(r'^$', AdHocCommandList.as_view(), name='ad_hoc_command_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/$', AdHocCommandDetail.as_view(), name='ad_hoc_command_detail'),
|
||||
url(r'^(?P<pk>[0-9]+)/cancel/$', AdHocCommandCancel.as_view(), name='ad_hoc_command_cancel'),
|
||||
url(r'^(?P<pk>[0-9]+)/relaunch/$', AdHocCommandRelaunch.as_view(), name='ad_hoc_command_relaunch'),
|
||||
url(r'^(?P<pk>[0-9]+)/events/$', AdHocCommandAdHocCommandEventsList.as_view(), name='ad_hoc_command_ad_hoc_command_events_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/activity_stream/$', AdHocCommandActivityStreamList.as_view(), name='ad_hoc_command_activity_stream_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/notifications/$', AdHocCommandNotificationsList.as_view(), name='ad_hoc_command_notifications_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/stdout/$', AdHocCommandStdout.as_view(), name='ad_hoc_command_stdout'),
|
||||
]
|
||||
|
||||
__all__ = ['urls']
|
||||
17
awx/api/urls/ad_hoc_command_event.py
Normal file
17
awx/api/urls/ad_hoc_command_event.py
Normal file
@@ -0,0 +1,17 @@
|
||||
# Copyright (c) 2017 Ansible, Inc.
|
||||
# All Rights Reserved.
|
||||
|
||||
from django.conf.urls import url
|
||||
|
||||
from awx.api.views import (
|
||||
AdHocCommandEventList,
|
||||
AdHocCommandEventDetail,
|
||||
)
|
||||
|
||||
|
||||
urls = [
|
||||
url(r'^$', AdHocCommandEventList.as_view(), name='ad_hoc_command_event_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/$', AdHocCommandEventDetail.as_view(), name='ad_hoc_command_event_detail'),
|
||||
]
|
||||
|
||||
__all__ = ['urls']
|
||||
27
awx/api/urls/credential.py
Normal file
27
awx/api/urls/credential.py
Normal file
@@ -0,0 +1,27 @@
|
||||
# Copyright (c) 2017 Ansible, Inc.
|
||||
# All Rights Reserved.
|
||||
|
||||
from django.conf.urls import url
|
||||
|
||||
from awx.api.views import (
|
||||
CredentialList,
|
||||
CredentialActivityStreamList,
|
||||
CredentialDetail,
|
||||
CredentialAccessList,
|
||||
CredentialObjectRolesList,
|
||||
CredentialOwnerUsersList,
|
||||
CredentialOwnerTeamsList,
|
||||
)
|
||||
|
||||
|
||||
urls = [
|
||||
url(r'^$', CredentialList.as_view(), name='credential_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/activity_stream/$', CredentialActivityStreamList.as_view(), name='credential_activity_stream_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/$', CredentialDetail.as_view(), name='credential_detail'),
|
||||
url(r'^(?P<pk>[0-9]+)/access_list/$', CredentialAccessList.as_view(), name='credential_access_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/object_roles/$', CredentialObjectRolesList.as_view(), name='credential_object_roles_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/owner_users/$', CredentialOwnerUsersList.as_view(), name='credential_owner_users_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/owner_teams/$', CredentialOwnerTeamsList.as_view(), name='credential_owner_teams_list'),
|
||||
]
|
||||
|
||||
__all__ = ['urls']
|
||||
21
awx/api/urls/credential_type.py
Normal file
21
awx/api/urls/credential_type.py
Normal file
@@ -0,0 +1,21 @@
|
||||
# Copyright (c) 2017 Ansible, Inc.
|
||||
# All Rights Reserved.
|
||||
|
||||
from django.conf.urls import url
|
||||
|
||||
from awx.api.views import (
|
||||
CredentialTypeList,
|
||||
CredentialTypeDetail,
|
||||
CredentialTypeCredentialList,
|
||||
CredentialTypeActivityStreamList,
|
||||
)
|
||||
|
||||
|
||||
urls = [
|
||||
url(r'^$', CredentialTypeList.as_view(), name='credential_type_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/$', CredentialTypeDetail.as_view(), name='credential_type_detail'),
|
||||
url(r'^(?P<pk>[0-9]+)/credentials/$', CredentialTypeCredentialList.as_view(), name='credential_type_credential_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/activity_stream/$', CredentialTypeActivityStreamList.as_view(), name='credential_type_activity_stream_list'),
|
||||
]
|
||||
|
||||
__all__ = ['urls']
|
||||
37
awx/api/urls/group.py
Normal file
37
awx/api/urls/group.py
Normal file
@@ -0,0 +1,37 @@
|
||||
# Copyright (c) 2017 Ansible, Inc.
|
||||
# All Rights Reserved.
|
||||
|
||||
from django.conf.urls import url
|
||||
|
||||
from awx.api.views import (
|
||||
GroupList,
|
||||
GroupDetail,
|
||||
GroupChildrenList,
|
||||
GroupHostsList,
|
||||
GroupAllHostsList,
|
||||
GroupVariableData,
|
||||
GroupJobEventsList,
|
||||
GroupJobHostSummariesList,
|
||||
GroupPotentialChildrenList,
|
||||
GroupActivityStreamList,
|
||||
GroupInventorySourcesList,
|
||||
GroupAdHocCommandsList,
|
||||
)
|
||||
|
||||
|
||||
urls = [
|
||||
url(r'^$', GroupList.as_view(), name='group_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/$', GroupDetail.as_view(), name='group_detail'),
|
||||
url(r'^(?P<pk>[0-9]+)/children/$', GroupChildrenList.as_view(), name='group_children_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/hosts/$', GroupHostsList.as_view(), name='group_hosts_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/all_hosts/$', GroupAllHostsList.as_view(), name='group_all_hosts_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/variable_data/$', GroupVariableData.as_view(), name='group_variable_data'),
|
||||
url(r'^(?P<pk>[0-9]+)/job_events/$', GroupJobEventsList.as_view(), name='group_job_events_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/job_host_summaries/$', GroupJobHostSummariesList.as_view(), name='group_job_host_summaries_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/potential_children/$', GroupPotentialChildrenList.as_view(), name='group_potential_children_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/activity_stream/$', GroupActivityStreamList.as_view(), name='group_activity_stream_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/inventory_sources/$', GroupInventorySourcesList.as_view(), name='group_inventory_sources_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/ad_hoc_commands/$', GroupAdHocCommandsList.as_view(), name='group_ad_hoc_commands_list'),
|
||||
]
|
||||
|
||||
__all__ = ['urls']
|
||||
43
awx/api/urls/host.py
Normal file
43
awx/api/urls/host.py
Normal file
@@ -0,0 +1,43 @@
|
||||
# Copyright (c) 2017 Ansible, Inc.
|
||||
# All Rights Reserved.
|
||||
|
||||
from django.conf.urls import url
|
||||
|
||||
from awx.api.views import (
|
||||
HostList,
|
||||
HostDetail,
|
||||
HostVariableData,
|
||||
HostGroupsList,
|
||||
HostAllGroupsList,
|
||||
HostJobEventsList,
|
||||
HostJobHostSummariesList,
|
||||
HostActivityStreamList,
|
||||
HostInventorySourcesList,
|
||||
HostSmartInventoriesList,
|
||||
HostAdHocCommandsList,
|
||||
HostAdHocCommandEventsList,
|
||||
HostFactVersionsList,
|
||||
HostFactCompareView,
|
||||
HostInsights,
|
||||
)
|
||||
|
||||
|
||||
urls = [
|
||||
url(r'^$', HostList.as_view(), name='host_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/$', HostDetail.as_view(), name='host_detail'),
|
||||
url(r'^(?P<pk>[0-9]+)/variable_data/$', HostVariableData.as_view(), name='host_variable_data'),
|
||||
url(r'^(?P<pk>[0-9]+)/groups/$', HostGroupsList.as_view(), name='host_groups_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/all_groups/$', HostAllGroupsList.as_view(), name='host_all_groups_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/job_events/', HostJobEventsList.as_view(), name='host_job_events_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/job_host_summaries/$', HostJobHostSummariesList.as_view(), name='host_job_host_summaries_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/activity_stream/$', HostActivityStreamList.as_view(), name='host_activity_stream_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/inventory_sources/$', HostInventorySourcesList.as_view(), name='host_inventory_sources_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/smart_inventories/$', HostSmartInventoriesList.as_view(), name='host_smart_inventories_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/ad_hoc_commands/$', HostAdHocCommandsList.as_view(), name='host_ad_hoc_commands_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/ad_hoc_command_events/$', HostAdHocCommandEventsList.as_view(), name='host_ad_hoc_command_events_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/fact_versions/$', HostFactVersionsList.as_view(), name='host_fact_versions_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/fact_view/$', HostFactCompareView.as_view(), name='host_fact_compare_view'),
|
||||
url(r'^(?P<pk>[0-9]+)/insights/$', HostInsights.as_view(), name='host_insights'),
|
||||
]
|
||||
|
||||
__all__ = ['urls']
|
||||
22
awx/api/urls/instance.py
Normal file
22
awx/api/urls/instance.py
Normal file
@@ -0,0 +1,22 @@
|
||||
# Copyright (c) 2017 Ansible, Inc.
|
||||
# All Rights Reserved.
|
||||
|
||||
from django.conf.urls import url
|
||||
|
||||
from awx.api.views import (
|
||||
InstanceList,
|
||||
InstanceDetail,
|
||||
InstanceUnifiedJobsList,
|
||||
InstanceInstanceGroupsList,
|
||||
)
|
||||
|
||||
|
||||
urls = [
|
||||
url(r'^$', InstanceList.as_view(), name='instance_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/$', InstanceDetail.as_view(), name='instance_detail'),
|
||||
url(r'^(?P<pk>[0-9]+)/jobs/$', InstanceUnifiedJobsList.as_view(), name='instance_unified_jobs_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/instance_groups/$', InstanceInstanceGroupsList.as_view(),
|
||||
name='instance_instance_groups_list'),
|
||||
]
|
||||
|
||||
__all__ = ['urls']
|
||||
21
awx/api/urls/instance_group.py
Normal file
21
awx/api/urls/instance_group.py
Normal file
@@ -0,0 +1,21 @@
|
||||
# Copyright (c) 2017 Ansible, Inc.
|
||||
# All Rights Reserved.
|
||||
|
||||
from django.conf.urls import url
|
||||
|
||||
from awx.api.views import (
|
||||
InstanceGroupList,
|
||||
InstanceGroupDetail,
|
||||
InstanceGroupUnifiedJobsList,
|
||||
InstanceGroupInstanceList,
|
||||
)
|
||||
|
||||
|
||||
urls = [
|
||||
url(r'^$', InstanceGroupList.as_view(), name='instance_group_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/$', InstanceGroupDetail.as_view(), name='instance_group_detail'),
|
||||
url(r'^(?P<pk>[0-9]+)/jobs/$', InstanceGroupUnifiedJobsList.as_view(), name='instance_group_unified_jobs_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/instances/$', InstanceGroupInstanceList.as_view(), name='instance_group_instance_list'),
|
||||
]
|
||||
|
||||
__all__ = ['urls']
|
||||
45
awx/api/urls/inventory.py
Normal file
45
awx/api/urls/inventory.py
Normal file
@@ -0,0 +1,45 @@
|
||||
# Copyright (c) 2017 Ansible, Inc.
|
||||
# All Rights Reserved.
|
||||
|
||||
from django.conf.urls import url
|
||||
|
||||
from awx.api.views import (
|
||||
InventoryList,
|
||||
InventoryDetail,
|
||||
InventoryHostsList,
|
||||
InventoryGroupsList,
|
||||
InventoryRootGroupsList,
|
||||
InventoryVariableData,
|
||||
InventoryScriptView,
|
||||
InventoryTreeView,
|
||||
InventoryInventorySourcesList,
|
||||
InventoryInventorySourcesUpdate,
|
||||
InventoryActivityStreamList,
|
||||
InventoryJobTemplateList,
|
||||
InventoryAdHocCommandsList,
|
||||
InventoryAccessList,
|
||||
InventoryObjectRolesList,
|
||||
InventoryInstanceGroupsList,
|
||||
)
|
||||
|
||||
|
||||
urls = [
|
||||
url(r'^$', InventoryList.as_view(), name='inventory_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/$', InventoryDetail.as_view(), name='inventory_detail'),
|
||||
url(r'^(?P<pk>[0-9]+)/hosts/$', InventoryHostsList.as_view(), name='inventory_hosts_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/groups/$', InventoryGroupsList.as_view(), name='inventory_groups_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/root_groups/$', InventoryRootGroupsList.as_view(), name='inventory_root_groups_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/variable_data/$', InventoryVariableData.as_view(), name='inventory_variable_data'),
|
||||
url(r'^(?P<pk>[0-9]+)/script/$', InventoryScriptView.as_view(), name='inventory_script_view'),
|
||||
url(r'^(?P<pk>[0-9]+)/tree/$', InventoryTreeView.as_view(), name='inventory_tree_view'),
|
||||
url(r'^(?P<pk>[0-9]+)/inventory_sources/$', InventoryInventorySourcesList.as_view(), name='inventory_inventory_sources_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/update_inventory_sources/$', InventoryInventorySourcesUpdate.as_view(), name='inventory_inventory_sources_update'),
|
||||
url(r'^(?P<pk>[0-9]+)/activity_stream/$', InventoryActivityStreamList.as_view(), name='inventory_activity_stream_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/job_templates/$', InventoryJobTemplateList.as_view(), name='inventory_job_template_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/ad_hoc_commands/$', InventoryAdHocCommandsList.as_view(), name='inventory_ad_hoc_commands_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/access_list/$', InventoryAccessList.as_view(), name='inventory_access_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/object_roles/$', InventoryObjectRolesList.as_view(), name='inventory_object_roles_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/instance_groups/$', InventoryInstanceGroupsList.as_view(), name='inventory_instance_groups_list'),
|
||||
]
|
||||
|
||||
__all__ = ['urls']
|
||||
19
awx/api/urls/inventory_script.py
Normal file
19
awx/api/urls/inventory_script.py
Normal file
@@ -0,0 +1,19 @@
|
||||
# Copyright (c) 2017 Ansible, Inc.
|
||||
# All Rights Reserved.
|
||||
|
||||
from django.conf.urls import url
|
||||
|
||||
from awx.api.views import (
|
||||
InventoryScriptList,
|
||||
InventoryScriptDetail,
|
||||
InventoryScriptObjectRolesList,
|
||||
)
|
||||
|
||||
|
||||
urls = [
|
||||
url(r'^$', InventoryScriptList.as_view(), name='inventory_script_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/$', InventoryScriptDetail.as_view(), name='inventory_script_detail'),
|
||||
url(r'^(?P<pk>[0-9]+)/object_roles/$', InventoryScriptObjectRolesList.as_view(), name='inventory_script_object_roles_list'),
|
||||
]
|
||||
|
||||
__all__ = ['urls']
|
||||
38
awx/api/urls/inventory_source.py
Normal file
38
awx/api/urls/inventory_source.py
Normal file
@@ -0,0 +1,38 @@
|
||||
# Copyright (c) 2017 Ansible, Inc.
|
||||
# All Rights Reserved.
|
||||
|
||||
from django.conf.urls import url
|
||||
|
||||
from awx.api.views import (
|
||||
InventorySourceList,
|
||||
InventorySourceDetail,
|
||||
InventorySourceUpdateView,
|
||||
InventorySourceUpdatesList,
|
||||
InventorySourceActivityStreamList,
|
||||
InventorySourceSchedulesList,
|
||||
InventorySourceGroupsList,
|
||||
InventorySourceHostsList,
|
||||
InventorySourceNotificationTemplatesAnyList,
|
||||
InventorySourceNotificationTemplatesErrorList,
|
||||
InventorySourceNotificationTemplatesSuccessList,
|
||||
)
|
||||
|
||||
|
||||
urls = [
|
||||
url(r'^$', InventorySourceList.as_view(), name='inventory_source_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/$', InventorySourceDetail.as_view(), name='inventory_source_detail'),
|
||||
url(r'^(?P<pk>[0-9]+)/update/$', InventorySourceUpdateView.as_view(), name='inventory_source_update_view'),
|
||||
url(r'^(?P<pk>[0-9]+)/inventory_updates/$', InventorySourceUpdatesList.as_view(), name='inventory_source_updates_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/activity_stream/$', InventorySourceActivityStreamList.as_view(), name='inventory_source_activity_stream_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/schedules/$', InventorySourceSchedulesList.as_view(), name='inventory_source_schedules_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/groups/$', InventorySourceGroupsList.as_view(), name='inventory_source_groups_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/hosts/$', InventorySourceHostsList.as_view(), name='inventory_source_hosts_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/notification_templates_any/$', InventorySourceNotificationTemplatesAnyList.as_view(),
|
||||
name='inventory_source_notification_templates_any_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/notification_templates_error/$', InventorySourceNotificationTemplatesErrorList.as_view(),
|
||||
name='inventory_source_notification_templates_error_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/notification_templates_success/$', InventorySourceNotificationTemplatesSuccessList.as_view(),
|
||||
name='inventory_source_notification_templates_success_list'),
|
||||
]
|
||||
|
||||
__all__ = ['urls']
|
||||
23
awx/api/urls/inventory_update.py
Normal file
23
awx/api/urls/inventory_update.py
Normal file
@@ -0,0 +1,23 @@
|
||||
# Copyright (c) 2017 Ansible, Inc.
|
||||
# All Rights Reserved.
|
||||
|
||||
from django.conf.urls import url
|
||||
|
||||
from awx.api.views import (
|
||||
InventoryUpdateList,
|
||||
InventoryUpdateDetail,
|
||||
InventoryUpdateCancel,
|
||||
InventoryUpdateStdout,
|
||||
InventoryUpdateNotificationsList,
|
||||
)
|
||||
|
||||
|
||||
urls = [
|
||||
url(r'^$', InventoryUpdateList.as_view(), name='inventory_update_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/$', InventoryUpdateDetail.as_view(), name='inventory_update_detail'),
|
||||
url(r'^(?P<pk>[0-9]+)/cancel/$', InventoryUpdateCancel.as_view(), name='inventory_update_cancel'),
|
||||
url(r'^(?P<pk>[0-9]+)/stdout/$', InventoryUpdateStdout.as_view(), name='inventory_update_stdout'),
|
||||
url(r'^(?P<pk>[0-9]+)/notifications/$', InventoryUpdateNotificationsList.as_view(), name='inventory_update_notifications_list'),
|
||||
]
|
||||
|
||||
__all__ = ['urls']
|
||||
39
awx/api/urls/job.py
Normal file
39
awx/api/urls/job.py
Normal file
@@ -0,0 +1,39 @@
|
||||
# Copyright (c) 2017 Ansible, Inc.
|
||||
# All Rights Reserved.
|
||||
|
||||
from django.conf.urls import url
|
||||
|
||||
from awx.api.views import (
|
||||
JobList,
|
||||
JobDetail,
|
||||
JobStart,
|
||||
JobCancel,
|
||||
JobRelaunch,
|
||||
JobCreateSchedule,
|
||||
JobJobHostSummariesList,
|
||||
JobJobEventsList,
|
||||
JobActivityStreamList,
|
||||
JobStdout,
|
||||
JobNotificationsList,
|
||||
JobLabelList,
|
||||
JobHostSummaryDetail,
|
||||
)
|
||||
|
||||
|
||||
urls = [
|
||||
url(r'^$', JobList.as_view(), name='job_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/$', JobDetail.as_view(), name='job_detail'),
|
||||
url(r'^(?P<pk>[0-9]+)/start/$', JobStart.as_view(), name='job_start'), # Todo: Remove In 3.3
|
||||
url(r'^(?P<pk>[0-9]+)/cancel/$', JobCancel.as_view(), name='job_cancel'),
|
||||
url(r'^(?P<pk>[0-9]+)/relaunch/$', JobRelaunch.as_view(), name='job_relaunch'),
|
||||
url(r'^(?P<pk>[0-9]+)/create_schedule/$', JobCreateSchedule.as_view(), name='job_create_schedule'),
|
||||
url(r'^(?P<pk>[0-9]+)/job_host_summaries/$', JobJobHostSummariesList.as_view(), name='job_job_host_summaries_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/job_events/$', JobJobEventsList.as_view(), name='job_job_events_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/activity_stream/$', JobActivityStreamList.as_view(), name='job_activity_stream_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/stdout/$', JobStdout.as_view(), name='job_stdout'),
|
||||
url(r'^(?P<pk>[0-9]+)/notifications/$', JobNotificationsList.as_view(), name='job_notifications_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/labels/$', JobLabelList.as_view(), name='job_label_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/$', JobHostSummaryDetail.as_view(), name='job_host_summary_detail'),
|
||||
]
|
||||
|
||||
__all__ = ['urls']
|
||||
21
awx/api/urls/job_event.py
Normal file
21
awx/api/urls/job_event.py
Normal file
@@ -0,0 +1,21 @@
|
||||
# Copyright (c) 2017 Ansible, Inc.
|
||||
# All Rights Reserved.
|
||||
|
||||
from django.conf.urls import url
|
||||
|
||||
from awx.api.views import (
|
||||
JobEventList,
|
||||
JobEventDetail,
|
||||
JobEventChildrenList,
|
||||
JobEventHostsList,
|
||||
)
|
||||
|
||||
|
||||
urls = [
|
||||
url(r'^$', JobEventList.as_view(), name='job_event_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/$', JobEventDetail.as_view(), name='job_event_detail'),
|
||||
url(r'^(?P<pk>[0-9]+)/children/$', JobEventChildrenList.as_view(), name='job_event_children_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/hosts/$', JobEventHostsList.as_view(), name='job_event_hosts_list'),
|
||||
]
|
||||
|
||||
__all__ = ['urls']
|
||||
15
awx/api/urls/job_host_summary.py
Normal file
15
awx/api/urls/job_host_summary.py
Normal file
@@ -0,0 +1,15 @@
|
||||
# Copyright (c) 2017 Ansible, Inc.
|
||||
# All Rights Reserved.
|
||||
|
||||
from django.conf.urls import url
|
||||
|
||||
from awx.api.views import (
|
||||
JobHostSummaryDetail,
|
||||
)
|
||||
|
||||
|
||||
urls = [
|
||||
url(r'^(?P<pk>[0-9]+)/$', JobHostSummaryDetail.as_view(), name='job_host_summary_detail'),
|
||||
]
|
||||
|
||||
__all__ = ['urls']
|
||||
46
awx/api/urls/job_template.py
Normal file
46
awx/api/urls/job_template.py
Normal file
@@ -0,0 +1,46 @@
|
||||
# Copyright (c) 2017 Ansible, Inc.
|
||||
# All Rights Reserved.
|
||||
|
||||
from django.conf.urls import url
|
||||
|
||||
from awx.api.views import (
|
||||
JobTemplateList,
|
||||
JobTemplateDetail,
|
||||
JobTemplateLaunch,
|
||||
JobTemplateJobsList,
|
||||
JobTemplateCallback,
|
||||
JobTemplateSchedulesList,
|
||||
JobTemplateSurveySpec,
|
||||
JobTemplateActivityStreamList,
|
||||
JobTemplateNotificationTemplatesAnyList,
|
||||
JobTemplateNotificationTemplatesErrorList,
|
||||
JobTemplateNotificationTemplatesSuccessList,
|
||||
JobTemplateInstanceGroupsList,
|
||||
JobTemplateAccessList,
|
||||
JobTemplateObjectRolesList,
|
||||
JobTemplateLabelList,
|
||||
)
|
||||
|
||||
|
||||
urls = [
|
||||
url(r'^$', JobTemplateList.as_view(), name='job_template_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/$', JobTemplateDetail.as_view(), name='job_template_detail'),
|
||||
url(r'^(?P<pk>[0-9]+)/launch/$', JobTemplateLaunch.as_view(), name='job_template_launch'),
|
||||
url(r'^(?P<pk>[0-9]+)/jobs/$', JobTemplateJobsList.as_view(), name='job_template_jobs_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/callback/$', JobTemplateCallback.as_view(), name='job_template_callback'),
|
||||
url(r'^(?P<pk>[0-9]+)/schedules/$', JobTemplateSchedulesList.as_view(), name='job_template_schedules_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/survey_spec/$', JobTemplateSurveySpec.as_view(), name='job_template_survey_spec'),
|
||||
url(r'^(?P<pk>[0-9]+)/activity_stream/$', JobTemplateActivityStreamList.as_view(), name='job_template_activity_stream_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/notification_templates_any/$', JobTemplateNotificationTemplatesAnyList.as_view(),
|
||||
name='job_template_notification_templates_any_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/notification_templates_error/$', JobTemplateNotificationTemplatesErrorList.as_view(),
|
||||
name='job_template_notification_templates_error_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/notification_templates_success/$', JobTemplateNotificationTemplatesSuccessList.as_view(),
|
||||
name='job_template_notification_templates_success_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/instance_groups/$', JobTemplateInstanceGroupsList.as_view(), name='job_template_instance_groups_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/access_list/$', JobTemplateAccessList.as_view(), name='job_template_access_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/object_roles/$', JobTemplateObjectRolesList.as_view(), name='job_template_object_roles_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/labels/$', JobTemplateLabelList.as_view(), name='job_template_label_list'),
|
||||
]
|
||||
|
||||
__all__ = ['urls']
|
||||
17
awx/api/urls/label.py
Normal file
17
awx/api/urls/label.py
Normal file
@@ -0,0 +1,17 @@
|
||||
# Copyright (c) 2017 Ansible, Inc.
|
||||
# All Rights Reserved.
|
||||
|
||||
from django.conf.urls import url
|
||||
|
||||
from awx.api.views import (
|
||||
LabelList,
|
||||
LabelDetail,
|
||||
)
|
||||
|
||||
|
||||
urls = [
|
||||
url(r'^$', LabelList.as_view(), name='label_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/$', LabelDetail.as_view(), name='label_detail'),
|
||||
]
|
||||
|
||||
__all__ = ['urls']
|
||||
17
awx/api/urls/notification.py
Normal file
17
awx/api/urls/notification.py
Normal file
@@ -0,0 +1,17 @@
|
||||
# Copyright (c) 2017 Ansible, Inc.
|
||||
# All Rights Reserved.
|
||||
|
||||
from django.conf.urls import url
|
||||
|
||||
from awx.api.views import (
|
||||
NotificationList,
|
||||
NotificationDetail,
|
||||
)
|
||||
|
||||
|
||||
urls = [
|
||||
url(r'^$', NotificationList.as_view(), name='notification_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/$', NotificationDetail.as_view(), name='notification_detail'),
|
||||
]
|
||||
|
||||
__all__ = ['urls']
|
||||
21
awx/api/urls/notification_template.py
Normal file
21
awx/api/urls/notification_template.py
Normal file
@@ -0,0 +1,21 @@
|
||||
# Copyright (c) 2017 Ansible, Inc.
|
||||
# All Rights Reserved.
|
||||
|
||||
from django.conf.urls import url
|
||||
|
||||
from awx.api.views import (
|
||||
NotificationTemplateList,
|
||||
NotificationTemplateDetail,
|
||||
NotificationTemplateTest,
|
||||
NotificationTemplateNotificationList,
|
||||
)
|
||||
|
||||
|
||||
urls = [
|
||||
url(r'^$', NotificationTemplateList.as_view(), name='notification_template_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/$', NotificationTemplateDetail.as_view(), name='notification_template_detail'),
|
||||
url(r'^(?P<pk>[0-9]+)/test/$', NotificationTemplateTest.as_view(), name='notification_template_test'),
|
||||
url(r'^(?P<pk>[0-9]+)/notifications/$', NotificationTemplateNotificationList.as_view(), name='notification_template_notification_list'),
|
||||
]
|
||||
|
||||
__all__ = ['urls']
|
||||
50
awx/api/urls/organization.py
Normal file
50
awx/api/urls/organization.py
Normal file
@@ -0,0 +1,50 @@
|
||||
# Copyright (c) 2017 Ansible, Inc.
|
||||
# All Rights Reserved.
|
||||
|
||||
from django.conf.urls import url
|
||||
|
||||
from awx.api.views import (
|
||||
OrganizationList,
|
||||
OrganizationDetail,
|
||||
OrganizationUsersList,
|
||||
OrganizationAdminsList,
|
||||
OrganizationInventoriesList,
|
||||
OrganizationProjectsList,
|
||||
OrganizationWorkflowJobTemplatesList,
|
||||
OrganizationTeamsList,
|
||||
OrganizationCredentialList,
|
||||
OrganizationActivityStreamList,
|
||||
OrganizationNotificationTemplatesList,
|
||||
OrganizationNotificationTemplatesAnyList,
|
||||
OrganizationNotificationTemplatesErrorList,
|
||||
OrganizationNotificationTemplatesSuccessList,
|
||||
OrganizationInstanceGroupsList,
|
||||
OrganizationObjectRolesList,
|
||||
OrganizationAccessList,
|
||||
)
|
||||
|
||||
|
||||
urls = [
|
||||
url(r'^$', OrganizationList.as_view(), name='organization_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/$', OrganizationDetail.as_view(), name='organization_detail'),
|
||||
url(r'^(?P<pk>[0-9]+)/users/$', OrganizationUsersList.as_view(), name='organization_users_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/admins/$', OrganizationAdminsList.as_view(), name='organization_admins_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/inventories/$', OrganizationInventoriesList.as_view(), name='organization_inventories_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/projects/$', OrganizationProjectsList.as_view(), name='organization_projects_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/workflow_job_templates/$', OrganizationWorkflowJobTemplatesList.as_view(), name='organization_workflow_job_templates_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/teams/$', OrganizationTeamsList.as_view(), name='organization_teams_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/credentials/$', OrganizationCredentialList.as_view(), name='organization_credential_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/activity_stream/$', OrganizationActivityStreamList.as_view(), name='organization_activity_stream_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/notification_templates/$', OrganizationNotificationTemplatesList.as_view(), name='organization_notification_templates_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/notification_templates_any/$', OrganizationNotificationTemplatesAnyList.as_view(),
|
||||
name='organization_notification_templates_any_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/notification_templates_error/$', OrganizationNotificationTemplatesErrorList.as_view(),
|
||||
name='organization_notification_templates_error_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/notification_templates_success/$', OrganizationNotificationTemplatesSuccessList.as_view(),
|
||||
name='organization_notification_templates_success_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/instance_groups/$', OrganizationInstanceGroupsList.as_view(), name='organization_instance_groups_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/object_roles/$', OrganizationObjectRolesList.as_view(), name='organization_object_roles_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/access_list/$', OrganizationAccessList.as_view(), name='organization_access_list'),
|
||||
]
|
||||
|
||||
__all__ = ['urls']
|
||||
44
awx/api/urls/project.py
Normal file
44
awx/api/urls/project.py
Normal file
@@ -0,0 +1,44 @@
|
||||
# Copyright (c) 2017 Ansible, Inc.
|
||||
# All Rights Reserved.
|
||||
|
||||
from django.conf.urls import url
|
||||
|
||||
from awx.api.views import (
|
||||
ProjectList,
|
||||
ProjectDetail,
|
||||
ProjectPlaybooks,
|
||||
ProjectInventories,
|
||||
ProjectScmInventorySources,
|
||||
ProjectTeamsList,
|
||||
ProjectUpdateView,
|
||||
ProjectUpdatesList,
|
||||
ProjectActivityStreamList,
|
||||
ProjectSchedulesList,
|
||||
ProjectNotificationTemplatesAnyList,
|
||||
ProjectNotificationTemplatesErrorList,
|
||||
ProjectNotificationTemplatesSuccessList,
|
||||
ProjectObjectRolesList,
|
||||
ProjectAccessList,
|
||||
)
|
||||
|
||||
|
||||
urls = [
|
||||
url(r'^$', ProjectList.as_view(), name='project_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/$', ProjectDetail.as_view(), name='project_detail'),
|
||||
url(r'^(?P<pk>[0-9]+)/playbooks/$', ProjectPlaybooks.as_view(), name='project_playbooks'),
|
||||
url(r'^(?P<pk>[0-9]+)/inventories/$', ProjectInventories.as_view(), name='project_inventories'),
|
||||
url(r'^(?P<pk>[0-9]+)/scm_inventory_sources/$', ProjectScmInventorySources.as_view(), name='project_scm_inventory_sources'),
|
||||
url(r'^(?P<pk>[0-9]+)/teams/$', ProjectTeamsList.as_view(), name='project_teams_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/update/$', ProjectUpdateView.as_view(), name='project_update_view'),
|
||||
url(r'^(?P<pk>[0-9]+)/project_updates/$', ProjectUpdatesList.as_view(), name='project_updates_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/activity_stream/$', ProjectActivityStreamList.as_view(), name='project_activity_stream_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/schedules/$', ProjectSchedulesList.as_view(), name='project_schedules_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/notification_templates_any/$', ProjectNotificationTemplatesAnyList.as_view(), name='project_notification_templates_any_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/notification_templates_error/$', ProjectNotificationTemplatesErrorList.as_view(), name='project_notification_templates_error_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/notification_templates_success/$', ProjectNotificationTemplatesSuccessList.as_view(),
|
||||
name='project_notification_templates_success_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/object_roles/$', ProjectObjectRolesList.as_view(), name='project_object_roles_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/access_list/$', ProjectAccessList.as_view(), name='project_access_list'),
|
||||
]
|
||||
|
||||
__all__ = ['urls']
|
||||
25
awx/api/urls/project_update.py
Normal file
25
awx/api/urls/project_update.py
Normal file
@@ -0,0 +1,25 @@
|
||||
# Copyright (c) 2017 Ansible, Inc.
|
||||
# All Rights Reserved.
|
||||
|
||||
from django.conf.urls import url
|
||||
|
||||
from awx.api.views import (
|
||||
ProjectUpdateList,
|
||||
ProjectUpdateDetail,
|
||||
ProjectUpdateCancel,
|
||||
ProjectUpdateStdout,
|
||||
ProjectUpdateScmInventoryUpdates,
|
||||
ProjectUpdateNotificationsList,
|
||||
)
|
||||
|
||||
|
||||
urls = [
|
||||
url(r'^$', ProjectUpdateList.as_view(), name='project_update_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/$', ProjectUpdateDetail.as_view(), name='project_update_detail'),
|
||||
url(r'^(?P<pk>[0-9]+)/cancel/$', ProjectUpdateCancel.as_view(), name='project_update_cancel'),
|
||||
url(r'^(?P<pk>[0-9]+)/stdout/$', ProjectUpdateStdout.as_view(), name='project_update_stdout'),
|
||||
url(r'^(?P<pk>[0-9]+)/scm_inventory_updates/$', ProjectUpdateScmInventoryUpdates.as_view(), name='project_update_scm_inventory_updates'),
|
||||
url(r'^(?P<pk>[0-9]+)/notifications/$', ProjectUpdateNotificationsList.as_view(), name='project_update_notifications_list'),
|
||||
]
|
||||
|
||||
__all__ = ['urls']
|
||||
25
awx/api/urls/role.py
Normal file
25
awx/api/urls/role.py
Normal file
@@ -0,0 +1,25 @@
|
||||
# Copyright (c) 2017 Ansible, Inc.
|
||||
# All Rights Reserved.
|
||||
|
||||
from django.conf.urls import url
|
||||
|
||||
from awx.api.views import (
|
||||
RoleList,
|
||||
RoleDetail,
|
||||
RoleUsersList,
|
||||
RoleTeamsList,
|
||||
RoleParentsList,
|
||||
RoleChildrenList,
|
||||
)
|
||||
|
||||
|
||||
urls = [
|
||||
url(r'^$', RoleList.as_view(), name='role_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/$', RoleDetail.as_view(), name='role_detail'),
|
||||
url(r'^(?P<pk>[0-9]+)/users/$', RoleUsersList.as_view(), name='role_users_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/teams/$', RoleTeamsList.as_view(), name='role_teams_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/parents/$', RoleParentsList.as_view(), name='role_parents_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/children/$', RoleChildrenList.as_view(), name='role_children_list'),
|
||||
]
|
||||
|
||||
__all__ = ['urls']
|
||||
21
awx/api/urls/schedule.py
Normal file
21
awx/api/urls/schedule.py
Normal file
@@ -0,0 +1,21 @@
|
||||
# Copyright (c) 2017 Ansible, Inc.
|
||||
# All Rights Reserved.
|
||||
|
||||
from django.conf.urls import url
|
||||
|
||||
from awx.api.views import (
|
||||
ScheduleList,
|
||||
ScheduleDetail,
|
||||
ScheduleUnifiedJobsList,
|
||||
ScheduleCredentialsList,
|
||||
)
|
||||
|
||||
|
||||
urls = [
|
||||
url(r'^$', ScheduleList.as_view(), name='schedule_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/$', ScheduleDetail.as_view(), name='schedule_detail'),
|
||||
url(r'^(?P<pk>[0-9]+)/jobs/$', ScheduleUnifiedJobsList.as_view(), name='schedule_unified_jobs_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/credentials/$', ScheduleCredentialsList.as_view(), name='schedule_credentials_list'),
|
||||
]
|
||||
|
||||
__all__ = ['urls']
|
||||
21
awx/api/urls/system_job.py
Normal file
21
awx/api/urls/system_job.py
Normal file
@@ -0,0 +1,21 @@
|
||||
# Copyright (c) 2017 Ansible, Inc.
|
||||
# All Rights Reserved.
|
||||
|
||||
from django.conf.urls import url
|
||||
|
||||
from awx.api.views import (
|
||||
SystemJobList,
|
||||
SystemJobDetail,
|
||||
SystemJobCancel,
|
||||
SystemJobNotificationsList,
|
||||
)
|
||||
|
||||
|
||||
urls = [
|
||||
url(r'^$', SystemJobList.as_view(), name='system_job_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/$', SystemJobDetail.as_view(), name='system_job_detail'),
|
||||
url(r'^(?P<pk>[0-9]+)/cancel/$', SystemJobCancel.as_view(), name='system_job_cancel'),
|
||||
url(r'^(?P<pk>[0-9]+)/notifications/$', SystemJobNotificationsList.as_view(), name='system_job_notifications_list'),
|
||||
]
|
||||
|
||||
__all__ = ['urls']
|
||||
32
awx/api/urls/system_job_template.py
Normal file
32
awx/api/urls/system_job_template.py
Normal file
@@ -0,0 +1,32 @@
|
||||
# Copyright (c) 2017 Ansible, Inc.
|
||||
# All Rights Reserved.
|
||||
|
||||
from django.conf.urls import url
|
||||
|
||||
from awx.api.views import (
|
||||
SystemJobTemplateList,
|
||||
SystemJobTemplateDetail,
|
||||
SystemJobTemplateLaunch,
|
||||
SystemJobTemplateJobsList,
|
||||
SystemJobTemplateSchedulesList,
|
||||
SystemJobTemplateNotificationTemplatesAnyList,
|
||||
SystemJobTemplateNotificationTemplatesErrorList,
|
||||
SystemJobTemplateNotificationTemplatesSuccessList,
|
||||
)
|
||||
|
||||
|
||||
urls = [
|
||||
url(r'^$', SystemJobTemplateList.as_view(), name='system_job_template_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/$', SystemJobTemplateDetail.as_view(), name='system_job_template_detail'),
|
||||
url(r'^(?P<pk>[0-9]+)/launch/$', SystemJobTemplateLaunch.as_view(), name='system_job_template_launch'),
|
||||
url(r'^(?P<pk>[0-9]+)/jobs/$', SystemJobTemplateJobsList.as_view(), name='system_job_template_jobs_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/schedules/$', SystemJobTemplateSchedulesList.as_view(), name='system_job_template_schedules_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/notification_templates_any/$', SystemJobTemplateNotificationTemplatesAnyList.as_view(),
|
||||
name='system_job_template_notification_templates_any_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/notification_templates_error/$', SystemJobTemplateNotificationTemplatesErrorList.as_view(),
|
||||
name='system_job_template_notification_templates_error_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/notification_templates_success/$', SystemJobTemplateNotificationTemplatesSuccessList.as_view(),
|
||||
name='system_job_template_notification_templates_success_list'),
|
||||
]
|
||||
|
||||
__all__ = ['urls']
|
||||
31
awx/api/urls/team.py
Normal file
31
awx/api/urls/team.py
Normal file
@@ -0,0 +1,31 @@
|
||||
# Copyright (c) 2017 Ansible, Inc.
|
||||
# All Rights Reserved.
|
||||
|
||||
from django.conf.urls import url
|
||||
|
||||
from awx.api.views import (
|
||||
TeamList,
|
||||
TeamDetail,
|
||||
TeamProjectsList,
|
||||
TeamUsersList,
|
||||
TeamCredentialsList,
|
||||
TeamRolesList,
|
||||
TeamObjectRolesList,
|
||||
TeamActivityStreamList,
|
||||
TeamAccessList,
|
||||
)
|
||||
|
||||
|
||||
urls = [
|
||||
url(r'^$', TeamList.as_view(), name='team_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/$', TeamDetail.as_view(), name='team_detail'),
|
||||
url(r'^(?P<pk>[0-9]+)/projects/$', TeamProjectsList.as_view(), name='team_projects_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/users/$', TeamUsersList.as_view(), name='team_users_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/credentials/$', TeamCredentialsList.as_view(), name='team_credentials_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/roles/$', TeamRolesList.as_view(), name='team_roles_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/object_roles/$', TeamObjectRolesList.as_view(), name='team_object_roles_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/activity_stream/$', TeamActivityStreamList.as_view(), name='team_activity_stream_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/access_list/$', TeamAccessList.as_view(), name='team_access_list'),
|
||||
]
|
||||
|
||||
__all__ = ['urls']
|
||||
123
awx/api/urls/urls.py
Normal file
123
awx/api/urls/urls.py
Normal file
@@ -0,0 +1,123 @@
|
||||
# Copyright (c) 2015 Ansible, Inc.
|
||||
# All Rights Reserved.
|
||||
|
||||
from __future__ import absolute_import, unicode_literals
|
||||
from django.conf.urls import include, url
|
||||
|
||||
from awx.api.views import (
|
||||
ApiRootView,
|
||||
ApiV1RootView,
|
||||
ApiV2RootView,
|
||||
ApiV1PingView,
|
||||
ApiV1ConfigView,
|
||||
AuthView,
|
||||
AuthTokenView,
|
||||
UserMeList,
|
||||
DashboardView,
|
||||
DashboardJobsGraphView,
|
||||
UnifiedJobTemplateList,
|
||||
UnifiedJobList,
|
||||
HostAnsibleFactsDetail,
|
||||
JobCredentialsList,
|
||||
JobExtraCredentialsList,
|
||||
JobTemplateCredentialsList,
|
||||
JobTemplateExtraCredentialsList,
|
||||
)
|
||||
|
||||
from .organization import urls as organization_urls
|
||||
from .user import urls as user_urls
|
||||
from .project import urls as project_urls
|
||||
from .project_update import urls as project_update_urls
|
||||
from .inventory import urls as inventory_urls
|
||||
from .team import urls as team_urls
|
||||
from .host import urls as host_urls
|
||||
from .group import urls as group_urls
|
||||
from .inventory_source import urls as inventory_source_urls
|
||||
from .inventory_update import urls as inventory_update_urls
|
||||
from .inventory_script import urls as inventory_script_urls
|
||||
from .credential_type import urls as credential_type_urls
|
||||
from .credential import urls as credential_urls
|
||||
from .role import urls as role_urls
|
||||
from .job_template import urls as job_template_urls
|
||||
from .job import urls as job_urls
|
||||
from .job_host_summary import urls as job_host_summary_urls
|
||||
from .job_event import urls as job_event_urls
|
||||
from .ad_hoc_command import urls as ad_hoc_command_urls
|
||||
from .ad_hoc_command_event import urls as ad_hoc_command_event_urls
|
||||
from .system_job_template import urls as system_job_template_urls
|
||||
from .system_job import urls as system_job_urls
|
||||
from .workflow_job_template import urls as workflow_job_template_urls
|
||||
from .workflow_job import urls as workflow_job_urls
|
||||
from .notification_template import urls as notification_template_urls
|
||||
from .notification import urls as notification_urls
|
||||
from .label import urls as label_urls
|
||||
from .workflow_job_template_node import urls as workflow_job_template_node_urls
|
||||
from .workflow_job_node import urls as workflow_job_node_urls
|
||||
from .schedule import urls as schedule_urls
|
||||
from .activity_stream import urls as activity_stream_urls
|
||||
from .instance import urls as instance_urls
|
||||
from .instance_group import urls as instance_group_urls
|
||||
|
||||
|
||||
v1_urls = [
|
||||
url(r'^$', ApiV1RootView.as_view(), name='api_v1_root_view'),
|
||||
url(r'^ping/$', ApiV1PingView.as_view(), name='api_v1_ping_view'),
|
||||
url(r'^config/$', ApiV1ConfigView.as_view(), name='api_v1_config_view'),
|
||||
url(r'^auth/$', AuthView.as_view()),
|
||||
url(r'^authtoken/$', AuthTokenView.as_view(), name='auth_token_view'),
|
||||
url(r'^me/$', UserMeList.as_view(), name='user_me_list'),
|
||||
url(r'^dashboard/$', DashboardView.as_view(), name='dashboard_view'),
|
||||
url(r'^dashboard/graphs/jobs/$', DashboardJobsGraphView.as_view(), name='dashboard_jobs_graph_view'),
|
||||
url(r'^settings/', include('awx.conf.urls')),
|
||||
url(r'^instances/', include(instance_urls)),
|
||||
url(r'^instance_groups/', include(instance_group_urls)),
|
||||
url(r'^schedules/', include(schedule_urls)),
|
||||
url(r'^organizations/', include(organization_urls)),
|
||||
url(r'^users/', include(user_urls)),
|
||||
url(r'^projects/', include(project_urls)),
|
||||
url(r'^project_updates/', include(project_update_urls)),
|
||||
url(r'^teams/', include(team_urls)),
|
||||
url(r'^inventories/', include(inventory_urls)),
|
||||
url(r'^hosts/', include(host_urls)),
|
||||
url(r'^groups/', include(group_urls)),
|
||||
url(r'^inventory_sources/', include(inventory_source_urls)),
|
||||
url(r'^inventory_updates/', include(inventory_update_urls)),
|
||||
url(r'^inventory_scripts/', include(inventory_script_urls)),
|
||||
url(r'^credentials/', include(credential_urls)),
|
||||
url(r'^roles/', include(role_urls)),
|
||||
url(r'^job_templates/', include(job_template_urls)),
|
||||
url(r'^jobs/', include(job_urls)),
|
||||
url(r'^job_host_summaries/', include(job_host_summary_urls)),
|
||||
url(r'^job_events/', include(job_event_urls)),
|
||||
url(r'^ad_hoc_commands/', include(ad_hoc_command_urls)),
|
||||
url(r'^ad_hoc_command_events/', include(ad_hoc_command_event_urls)),
|
||||
url(r'^system_job_templates/', include(system_job_template_urls)),
|
||||
url(r'^system_jobs/', include(system_job_urls)),
|
||||
url(r'^notification_templates/', include(notification_template_urls)),
|
||||
url(r'^notifications/', include(notification_urls)),
|
||||
url(r'^workflow_job_templates/', include(workflow_job_template_urls)),
|
||||
url(r'^workflow_jobs/', include(workflow_job_urls)),
|
||||
url(r'^labels/', include(label_urls)),
|
||||
url(r'^workflow_job_template_nodes/', include(workflow_job_template_node_urls)),
|
||||
url(r'^workflow_job_nodes/', include(workflow_job_node_urls)),
|
||||
url(r'^unified_job_templates/$', UnifiedJobTemplateList.as_view(), name='unified_job_template_list'),
|
||||
url(r'^unified_jobs/$', UnifiedJobList.as_view(), name='unified_job_list'),
|
||||
url(r'^activity_stream/', include(activity_stream_urls)),
|
||||
]
|
||||
|
||||
v2_urls = [
|
||||
url(r'^$', ApiV2RootView.as_view(), name='api_v2_root_view'),
|
||||
url(r'^credential_types/', include(credential_type_urls)),
|
||||
url(r'^hosts/(?P<pk>[0-9]+)/ansible_facts/$', HostAnsibleFactsDetail.as_view(), name='host_ansible_facts_detail'),
|
||||
url(r'^jobs/(?P<pk>[0-9]+)/extra_credentials/$', JobExtraCredentialsList.as_view(), name='job_extra_credentials_list'),
|
||||
url(r'^jobs/(?P<pk>[0-9]+)/credentials/$', JobCredentialsList.as_view(), name='job_credentials_list'),
|
||||
url(r'^job_templates/(?P<pk>[0-9]+)/extra_credentials/$', JobTemplateExtraCredentialsList.as_view(), name='job_template_extra_credentials_list'),
|
||||
url(r'^job_templates/(?P<pk>[0-9]+)/credentials/$', JobTemplateCredentialsList.as_view(), name='job_template_credentials_list'),
|
||||
]
|
||||
|
||||
app_name = 'api'
|
||||
urlpatterns = [
|
||||
url(r'^$', ApiRootView.as_view(), name='api_root_view'),
|
||||
url(r'^(?P<version>(v2))/', include(v2_urls)),
|
||||
url(r'^(?P<version>(v1|v2))/', include(v1_urls))
|
||||
]
|
||||
33
awx/api/urls/user.py
Normal file
33
awx/api/urls/user.py
Normal file
@@ -0,0 +1,33 @@
|
||||
# Copyright (c) 2017 Ansible, Inc.
|
||||
# All Rights Reserved.
|
||||
|
||||
from django.conf.urls import url
|
||||
|
||||
from awx.api.views import (
|
||||
UserList,
|
||||
UserDetail,
|
||||
UserTeamsList,
|
||||
UserOrganizationsList,
|
||||
UserAdminOfOrganizationsList,
|
||||
UserProjectsList,
|
||||
UserCredentialsList,
|
||||
UserRolesList,
|
||||
UserActivityStreamList,
|
||||
UserAccessList,
|
||||
)
|
||||
|
||||
|
||||
urls = [
|
||||
url(r'^$', UserList.as_view(), name='user_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/$', UserDetail.as_view(), name='user_detail'),
|
||||
url(r'^(?P<pk>[0-9]+)/teams/$', UserTeamsList.as_view(), name='user_teams_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/organizations/$', UserOrganizationsList.as_view(), name='user_organizations_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/admin_of_organizations/$', UserAdminOfOrganizationsList.as_view(), name='user_admin_of_organizations_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/projects/$', UserProjectsList.as_view(), name='user_projects_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/credentials/$', UserCredentialsList.as_view(), name='user_credentials_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/roles/$', UserRolesList.as_view(), name='user_roles_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/activity_stream/$', UserActivityStreamList.as_view(), name='user_activity_stream_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/access_list/$', UserAccessList.as_view(), name='user_access_list'),
|
||||
]
|
||||
|
||||
__all__ = ['urls']
|
||||
29
awx/api/urls/workflow_job.py
Normal file
29
awx/api/urls/workflow_job.py
Normal file
@@ -0,0 +1,29 @@
|
||||
# Copyright (c) 2017 Ansible, Inc.
|
||||
# All Rights Reserved.
|
||||
|
||||
from django.conf.urls import url
|
||||
|
||||
from awx.api.views import (
|
||||
WorkflowJobList,
|
||||
WorkflowJobDetail,
|
||||
WorkflowJobWorkflowNodesList,
|
||||
WorkflowJobLabelList,
|
||||
WorkflowJobCancel,
|
||||
WorkflowJobRelaunch,
|
||||
WorkflowJobNotificationsList,
|
||||
WorkflowJobActivityStreamList,
|
||||
)
|
||||
|
||||
|
||||
urls = [
|
||||
url(r'^$', WorkflowJobList.as_view(), name='workflow_job_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/$', WorkflowJobDetail.as_view(), name='workflow_job_detail'),
|
||||
url(r'^(?P<pk>[0-9]+)/workflow_nodes/$', WorkflowJobWorkflowNodesList.as_view(), name='workflow_job_workflow_nodes_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/labels/$', WorkflowJobLabelList.as_view(), name='workflow_job_label_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/cancel/$', WorkflowJobCancel.as_view(), name='workflow_job_cancel'),
|
||||
url(r'^(?P<pk>[0-9]+)/relaunch/$', WorkflowJobRelaunch.as_view(), name='workflow_job_relaunch'),
|
||||
url(r'^(?P<pk>[0-9]+)/notifications/$', WorkflowJobNotificationsList.as_view(), name='workflow_job_notifications_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/activity_stream/$', WorkflowJobActivityStreamList.as_view(), name='workflow_job_activity_stream_list'),
|
||||
]
|
||||
|
||||
__all__ = ['urls']
|
||||
25
awx/api/urls/workflow_job_node.py
Normal file
25
awx/api/urls/workflow_job_node.py
Normal file
@@ -0,0 +1,25 @@
|
||||
# Copyright (c) 2017 Ansible, Inc.
|
||||
# All Rights Reserved.
|
||||
|
||||
from django.conf.urls import url
|
||||
|
||||
from awx.api.views import (
|
||||
WorkflowJobNodeList,
|
||||
WorkflowJobNodeDetail,
|
||||
WorkflowJobNodeSuccessNodesList,
|
||||
WorkflowJobNodeFailureNodesList,
|
||||
WorkflowJobNodeAlwaysNodesList,
|
||||
WorkflowJobNodeCredentialsList,
|
||||
)
|
||||
|
||||
|
||||
urls = [
|
||||
url(r'^$', WorkflowJobNodeList.as_view(), name='workflow_job_node_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/$', WorkflowJobNodeDetail.as_view(), name='workflow_job_node_detail'),
|
||||
url(r'^(?P<pk>[0-9]+)/success_nodes/$', WorkflowJobNodeSuccessNodesList.as_view(), name='workflow_job_node_success_nodes_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/failure_nodes/$', WorkflowJobNodeFailureNodesList.as_view(), name='workflow_job_node_failure_nodes_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/always_nodes/$', WorkflowJobNodeAlwaysNodesList.as_view(), name='workflow_job_node_always_nodes_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/credentials/$', WorkflowJobNodeCredentialsList.as_view(), name='workflow_job_node_credentials_list'),
|
||||
]
|
||||
|
||||
__all__ = ['urls']
|
||||
46
awx/api/urls/workflow_job_template.py
Normal file
46
awx/api/urls/workflow_job_template.py
Normal file
@@ -0,0 +1,46 @@
|
||||
# Copyright (c) 2017 Ansible, Inc.
|
||||
# All Rights Reserved.
|
||||
|
||||
from django.conf.urls import url
|
||||
|
||||
from awx.api.views import (
|
||||
WorkflowJobTemplateList,
|
||||
WorkflowJobTemplateDetail,
|
||||
WorkflowJobTemplateJobsList,
|
||||
WorkflowJobTemplateLaunch,
|
||||
WorkflowJobTemplateCopy,
|
||||
WorkflowJobTemplateSchedulesList,
|
||||
WorkflowJobTemplateSurveySpec,
|
||||
WorkflowJobTemplateWorkflowNodesList,
|
||||
WorkflowJobTemplateActivityStreamList,
|
||||
WorkflowJobTemplateNotificationTemplatesAnyList,
|
||||
WorkflowJobTemplateNotificationTemplatesErrorList,
|
||||
WorkflowJobTemplateNotificationTemplatesSuccessList,
|
||||
WorkflowJobTemplateAccessList,
|
||||
WorkflowJobTemplateObjectRolesList,
|
||||
WorkflowJobTemplateLabelList,
|
||||
)
|
||||
|
||||
|
||||
urls = [
|
||||
url(r'^$', WorkflowJobTemplateList.as_view(), name='workflow_job_template_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/$', WorkflowJobTemplateDetail.as_view(), name='workflow_job_template_detail'),
|
||||
url(r'^(?P<pk>[0-9]+)/workflow_jobs/$', WorkflowJobTemplateJobsList.as_view(), name='workflow_job_template_jobs_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/launch/$', WorkflowJobTemplateLaunch.as_view(), name='workflow_job_template_launch'),
|
||||
url(r'^(?P<pk>[0-9]+)/copy/$', WorkflowJobTemplateCopy.as_view(), name='workflow_job_template_copy'),
|
||||
url(r'^(?P<pk>[0-9]+)/schedules/$', WorkflowJobTemplateSchedulesList.as_view(), name='workflow_job_template_schedules_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/survey_spec/$', WorkflowJobTemplateSurveySpec.as_view(), name='workflow_job_template_survey_spec'),
|
||||
url(r'^(?P<pk>[0-9]+)/workflow_nodes/$', WorkflowJobTemplateWorkflowNodesList.as_view(), name='workflow_job_template_workflow_nodes_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/activity_stream/$', WorkflowJobTemplateActivityStreamList.as_view(), name='workflow_job_template_activity_stream_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/notification_templates_any/$', WorkflowJobTemplateNotificationTemplatesAnyList.as_view(),
|
||||
name='workflow_job_template_notification_templates_any_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/notification_templates_error/$', WorkflowJobTemplateNotificationTemplatesErrorList.as_view(),
|
||||
name='workflow_job_template_notification_templates_error_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/notification_templates_success/$', WorkflowJobTemplateNotificationTemplatesSuccessList.as_view(),
|
||||
name='workflow_job_template_notification_templates_success_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/access_list/$', WorkflowJobTemplateAccessList.as_view(), name='workflow_job_template_access_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/object_roles/$', WorkflowJobTemplateObjectRolesList.as_view(), name='workflow_job_template_object_roles_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/labels/$', WorkflowJobTemplateLabelList.as_view(), name='workflow_job_template_label_list'),
|
||||
]
|
||||
|
||||
__all__ = ['urls']
|
||||
25
awx/api/urls/workflow_job_template_node.py
Normal file
25
awx/api/urls/workflow_job_template_node.py
Normal file
@@ -0,0 +1,25 @@
|
||||
# Copyright (c) 2017 Ansible, Inc.
|
||||
# All Rights Reserved.
|
||||
|
||||
from django.conf.urls import url
|
||||
|
||||
from awx.api.views import (
|
||||
WorkflowJobTemplateNodeList,
|
||||
WorkflowJobTemplateNodeDetail,
|
||||
WorkflowJobTemplateNodeSuccessNodesList,
|
||||
WorkflowJobTemplateNodeFailureNodesList,
|
||||
WorkflowJobTemplateNodeAlwaysNodesList,
|
||||
WorkflowJobTemplateNodeCredentialsList,
|
||||
)
|
||||
|
||||
|
||||
urls = [
|
||||
url(r'^$', WorkflowJobTemplateNodeList.as_view(), name='workflow_job_template_node_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/$', WorkflowJobTemplateNodeDetail.as_view(), name='workflow_job_template_node_detail'),
|
||||
url(r'^(?P<pk>[0-9]+)/success_nodes/$', WorkflowJobTemplateNodeSuccessNodesList.as_view(), name='workflow_job_template_node_success_nodes_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/failure_nodes/$', WorkflowJobTemplateNodeFailureNodesList.as_view(), name='workflow_job_template_node_failure_nodes_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/always_nodes/$', WorkflowJobTemplateNodeAlwaysNodesList.as_view(), name='workflow_job_template_node_always_nodes_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/credentials/$', WorkflowJobTemplateNodeCredentialsList.as_view(), name='workflow_job_template_node_credentials_list'),
|
||||
]
|
||||
|
||||
__all__ = ['urls']
|
||||
557
awx/api/views.py
557
awx/api/views.py
@@ -1,4 +1,3 @@
|
||||
|
||||
# Copyright (c) 2015 Ansible, Inc.
|
||||
# All Rights Reserved.
|
||||
|
||||
@@ -14,7 +13,7 @@ import sys
|
||||
import logging
|
||||
import requests
|
||||
from base64 import b64encode
|
||||
from collections import OrderedDict
|
||||
from collections import OrderedDict, Iterable
|
||||
|
||||
# Django
|
||||
from django.conf import settings
|
||||
@@ -28,7 +27,6 @@ from django.utils.timezone import now
|
||||
from django.views.decorators.csrf import csrf_exempt
|
||||
from django.views.decorators.cache import never_cache
|
||||
from django.template.loader import render_to_string
|
||||
from django.core.servers.basehttp import FileWrapper
|
||||
from django.http import HttpResponse
|
||||
from django.contrib.contenttypes.models import ContentType
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
@@ -54,13 +52,15 @@ import qsstats
|
||||
import ansiconv
|
||||
|
||||
# Python Social Auth
|
||||
from social.backends.utils import load_backends
|
||||
from social_core.backends.utils import load_backends
|
||||
|
||||
from wsgiref.util import FileWrapper
|
||||
|
||||
# AWX
|
||||
from awx.main.tasks import send_notifications
|
||||
from awx.main.access import get_user_queryset
|
||||
from awx.main.ha import is_ha_environment
|
||||
from awx.api.authentication import TaskAuthentication, TokenGetAuthentication
|
||||
from awx.api.authentication import TokenGetAuthentication
|
||||
from awx.api.filters import V1CredentialFilterBackend
|
||||
from awx.api.generics import get_view_name
|
||||
from awx.api.generics import * # noqa
|
||||
@@ -69,7 +69,7 @@ from awx.conf.license import get_license, feature_enabled, feature_exists, Licen
|
||||
from awx.main.models import * # noqa
|
||||
from awx.main.utils import * # noqa
|
||||
from awx.main.utils import (
|
||||
callback_filter_out_ansible_extra_vars,
|
||||
extract_ansible_vars,
|
||||
decrypt_field,
|
||||
)
|
||||
from awx.main.utils.filters import SmartFilter
|
||||
@@ -122,7 +122,7 @@ class WorkflowsEnforcementMixin(object):
|
||||
Mixin to check that license supports workflows.
|
||||
'''
|
||||
def check_permissions(self, request):
|
||||
if not feature_enabled('workflows') and request.method not in ('GET', 'OPTIONS'):
|
||||
if not feature_enabled('workflows') and request.method not in ('GET', 'OPTIONS', 'DELETE'):
|
||||
raise LicenseForbids(_('Your license does not allow use of workflows.'))
|
||||
return super(WorkflowsEnforcementMixin, self).check_permissions(request)
|
||||
|
||||
@@ -140,7 +140,8 @@ class UnifiedJobDeletionMixin(object):
|
||||
raise PermissionDenied(detail=_('Cannot delete job resource when associated workflow job is running.'))
|
||||
except self.model.unified_job_node.RelatedObjectDoesNotExist:
|
||||
pass
|
||||
if obj.status in ACTIVE_STATES:
|
||||
# Still allow deletion of new status, because these can be manually created
|
||||
if obj.status in ACTIVE_STATES and obj.status != 'new':
|
||||
raise PermissionDenied(detail=_("Cannot delete running job resource."))
|
||||
obj.delete()
|
||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||
@@ -360,7 +361,7 @@ class ApiV1ConfigView(APIView):
|
||||
try:
|
||||
settings.LICENSE = {}
|
||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||
except:
|
||||
except Exception:
|
||||
# FIX: Log
|
||||
return Response({"error": _("Failed to remove license (%s)") % has_error}, status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
@@ -606,6 +607,46 @@ class ScheduleDetail(RetrieveUpdateDestroyAPIView):
|
||||
new_in_148 = True
|
||||
|
||||
|
||||
class LaunchConfigCredentialsBase(SubListAttachDetachAPIView):
|
||||
|
||||
model = Credential
|
||||
serializer_class = CredentialSerializer
|
||||
relationship = 'credentials'
|
||||
|
||||
def is_valid_relation(self, parent, sub, created=False):
|
||||
if not parent.unified_job_template:
|
||||
return {"msg": _("Cannot assign credential when related template is null.")}
|
||||
|
||||
ask_mapping = parent.unified_job_template.get_ask_mapping()
|
||||
|
||||
if self.relationship not in ask_mapping:
|
||||
return {"msg": _("Related template cannot accept {} on launch.").format(self.relationship)}
|
||||
elif sub.passwords_needed:
|
||||
return {"msg": _("Credential that requires user input on launch "
|
||||
"cannot be used in saved launch configuration.")}
|
||||
|
||||
ask_field_name = ask_mapping[self.relationship]
|
||||
|
||||
if not getattr(parent.unified_job_template, ask_field_name):
|
||||
return {"msg": _("Related template is not configured to accept credentials on launch.")}
|
||||
elif sub.unique_hash() in [cred.unique_hash() for cred in parent.credentials.all()]:
|
||||
return {"msg": _("This launch configuration already provides a {credential_type} credential.").format(
|
||||
credential_type=sub.unique_hash(display=True))}
|
||||
elif sub.pk in parent.unified_job_template.credentials.values_list('pk', flat=True):
|
||||
return {"msg": _("Related template already uses {credential_type} credential.").format(
|
||||
credential_type=sub.name)}
|
||||
|
||||
# None means there were no validation errors
|
||||
return None
|
||||
|
||||
|
||||
class ScheduleCredentialsList(LaunchConfigCredentialsBase):
|
||||
|
||||
parent_model = Schedule
|
||||
new_in_330 = True
|
||||
new_in_api_v2 = True
|
||||
|
||||
|
||||
class ScheduleUnifiedJobsList(SubListAPIView):
|
||||
|
||||
model = UnifiedJob
|
||||
@@ -1300,8 +1341,11 @@ class ProjectUpdateView(RetrieveAPIView):
|
||||
if not project_update:
|
||||
return Response({}, status=status.HTTP_400_BAD_REQUEST)
|
||||
else:
|
||||
data = OrderedDict()
|
||||
data['project_update'] = project_update.id
|
||||
data.update(ProjectUpdateSerializer(project_update, context=self.get_serializer_context()).to_representation(project_update))
|
||||
headers = {'Location': project_update.get_absolute_url(request=request)}
|
||||
return Response({'project_update': project_update.id},
|
||||
return Response(data,
|
||||
headers=headers,
|
||||
status=status.HTTP_202_ACCEPTED)
|
||||
else:
|
||||
@@ -1325,8 +1369,8 @@ class ProjectUpdateDetail(UnifiedJobDeletionMixin, RetrieveDestroyAPIView):
|
||||
class ProjectUpdateCancel(RetrieveAPIView):
|
||||
|
||||
model = ProjectUpdate
|
||||
obj_permission_type = 'cancel'
|
||||
serializer_class = ProjectUpdateCancelSerializer
|
||||
is_job_cancel = True
|
||||
new_in_13 = True
|
||||
|
||||
def post(self, request, *args, **kwargs):
|
||||
@@ -2119,7 +2163,9 @@ class HostInsights(GenericAPIView):
|
||||
if res.status_code == 401:
|
||||
return (dict(error=_('Unauthorized access. Please check your Insights Credential username and password.')), status.HTTP_502_BAD_GATEWAY)
|
||||
elif res.status_code != 200:
|
||||
return (dict(error=_('Failed to gather reports and maintenance plans from Insights API at URL {}. Server responded with {} status code and message {}').format(url, res.status_code, res.content)), status.HTTP_502_BAD_GATEWAY)
|
||||
return (dict(error=_(
|
||||
'Failed to gather reports and maintenance plans from Insights API at URL {}. Server responded with {} status code and message {}'
|
||||
).format(url, res.status_code, res.content)), status.HTTP_502_BAD_GATEWAY)
|
||||
|
||||
try:
|
||||
filtered_insights_content = filter_insights_api_response(res.json())
|
||||
@@ -2367,82 +2413,23 @@ class InventoryScriptView(RetrieveAPIView):
|
||||
|
||||
model = Inventory
|
||||
serializer_class = InventoryScriptSerializer
|
||||
authentication_classes = [TaskAuthentication] + api_settings.DEFAULT_AUTHENTICATION_CLASSES
|
||||
permission_classes = (TaskPermission,)
|
||||
filter_backends = ()
|
||||
|
||||
def retrieve(self, request, *args, **kwargs):
|
||||
obj = self.get_object()
|
||||
hostname = request.query_params.get('host', '')
|
||||
hostvars = bool(request.query_params.get('hostvars', ''))
|
||||
show_all = bool(request.query_params.get('all', ''))
|
||||
if show_all:
|
||||
hosts_q = dict()
|
||||
else:
|
||||
hosts_q = dict(enabled=True)
|
||||
if hostname:
|
||||
host = get_object_or_404(obj.hosts, name=hostname, **hosts_q)
|
||||
data = host.variables_dict
|
||||
else:
|
||||
data = OrderedDict()
|
||||
if obj.variables_dict:
|
||||
all_group = data.setdefault('all', OrderedDict())
|
||||
all_group['vars'] = obj.variables_dict
|
||||
if obj.kind == 'smart':
|
||||
if len(obj.hosts.all()) == 0:
|
||||
return Response({})
|
||||
else:
|
||||
all_group = data.setdefault('all', OrderedDict())
|
||||
smart_hosts_qs = obj.hosts.all().order_by('name')
|
||||
smart_hosts = list(smart_hosts_qs.values_list('name', flat=True))
|
||||
all_group['hosts'] = smart_hosts
|
||||
else:
|
||||
# Add hosts without a group to the all group.
|
||||
groupless_hosts_qs = obj.hosts.filter(groups__isnull=True, **hosts_q).order_by('name')
|
||||
groupless_hosts = list(groupless_hosts_qs.values_list('name', flat=True))
|
||||
if groupless_hosts:
|
||||
all_group = data.setdefault('all', OrderedDict())
|
||||
all_group['hosts'] = groupless_hosts
|
||||
|
||||
# Build in-memory mapping of groups and their hosts.
|
||||
group_hosts_kw = dict(group__inventory_id=obj.id, host__inventory_id=obj.id)
|
||||
if 'enabled' in hosts_q:
|
||||
group_hosts_kw['host__enabled'] = hosts_q['enabled']
|
||||
group_hosts_qs = Group.hosts.through.objects.filter(**group_hosts_kw)
|
||||
group_hosts_qs = group_hosts_qs.order_by('host__name')
|
||||
group_hosts_qs = group_hosts_qs.values_list('group_id', 'host_id', 'host__name')
|
||||
group_hosts_map = {}
|
||||
for group_id, host_id, host_name in group_hosts_qs:
|
||||
group_hostnames = group_hosts_map.setdefault(group_id, [])
|
||||
group_hostnames.append(host_name)
|
||||
|
||||
# Build in-memory mapping of groups and their children.
|
||||
group_parents_qs = Group.parents.through.objects.filter(
|
||||
from_group__inventory_id=obj.id,
|
||||
to_group__inventory_id=obj.id,
|
||||
)
|
||||
group_parents_qs = group_parents_qs.order_by('from_group__name')
|
||||
group_parents_qs = group_parents_qs.values_list('from_group_id', 'from_group__name', 'to_group_id')
|
||||
group_children_map = {}
|
||||
for from_group_id, from_group_name, to_group_id in group_parents_qs:
|
||||
group_children = group_children_map.setdefault(to_group_id, [])
|
||||
group_children.append(from_group_name)
|
||||
|
||||
# Now use in-memory maps to build up group info.
|
||||
for group in obj.groups.all():
|
||||
group_info = OrderedDict()
|
||||
group_info['hosts'] = group_hosts_map.get(group.id, [])
|
||||
group_info['children'] = group_children_map.get(group.id, [])
|
||||
group_info['vars'] = group.variables_dict
|
||||
data[group.name] = group_info
|
||||
|
||||
if hostvars:
|
||||
data.setdefault('_meta', OrderedDict())
|
||||
data['_meta'].setdefault('hostvars', OrderedDict())
|
||||
for host in obj.hosts.filter(**hosts_q):
|
||||
data['_meta']['hostvars'][host.name] = host.variables_dict
|
||||
|
||||
return Response(data)
|
||||
hosts_q = dict(name=hostname)
|
||||
if not show_all:
|
||||
hosts_q['enabled'] = True
|
||||
host = get_object_or_404(obj.hosts, **hosts_q)
|
||||
return Response(host.variables_dict)
|
||||
return Response(obj.get_script_data(
|
||||
hostvars=bool(request.query_params.get('hostvars', '')),
|
||||
show_all=show_all
|
||||
))
|
||||
|
||||
|
||||
class InventoryTreeView(RetrieveAPIView):
|
||||
@@ -2495,9 +2482,9 @@ class InventoryInventorySourcesUpdate(RetrieveAPIView):
|
||||
view_name = _('Inventory Sources Update')
|
||||
|
||||
model = Inventory
|
||||
obj_permission_type = 'start'
|
||||
serializer_class = InventorySourceUpdateSerializer
|
||||
permission_classes = (InventoryInventorySourcesUpdatePermission,)
|
||||
is_job_start = True
|
||||
new_in_320 = True
|
||||
|
||||
def retrieve(self, request, *args, **kwargs):
|
||||
@@ -2515,10 +2502,14 @@ class InventoryInventorySourcesUpdate(RetrieveAPIView):
|
||||
successes = 0
|
||||
failures = 0
|
||||
for inventory_source in inventory.inventory_sources.exclude(source=''):
|
||||
details = {'inventory_source': inventory_source.pk, 'status': None}
|
||||
details = OrderedDict()
|
||||
details['inventory_source'] = inventory_source.pk
|
||||
details['status'] = None
|
||||
if inventory_source.can_update:
|
||||
update = inventory_source.update()
|
||||
details.update(InventoryUpdateSerializer(update, context=self.get_serializer_context()).to_representation(update))
|
||||
details['status'] = 'started'
|
||||
details['inventory_update'] = inventory_source.update().id
|
||||
details['inventory_update'] = update.id
|
||||
successes += 1
|
||||
else:
|
||||
if not details.get('status'):
|
||||
@@ -2647,8 +2638,8 @@ class InventorySourceUpdatesList(SubListAPIView):
|
||||
class InventorySourceUpdateView(RetrieveAPIView):
|
||||
|
||||
model = InventorySource
|
||||
obj_permission_type = 'start'
|
||||
serializer_class = InventorySourceUpdateSerializer
|
||||
is_job_start = True
|
||||
new_in_14 = True
|
||||
|
||||
def post(self, request, *args, **kwargs):
|
||||
@@ -2659,8 +2650,10 @@ class InventorySourceUpdateView(RetrieveAPIView):
|
||||
return Response({}, status=status.HTTP_400_BAD_REQUEST)
|
||||
else:
|
||||
headers = {'Location': update.get_absolute_url(request=request)}
|
||||
return Response(dict(inventory_update=update.id),
|
||||
status=status.HTTP_202_ACCEPTED, headers=headers)
|
||||
data = OrderedDict()
|
||||
data['inventory_update'] = update.id
|
||||
data.update(InventoryUpdateSerializer(update, context=self.get_serializer_context()).to_representation(update))
|
||||
return Response(data, status=status.HTTP_202_ACCEPTED, headers=headers)
|
||||
else:
|
||||
return self.http_method_not_allowed(request, *args, **kwargs)
|
||||
|
||||
@@ -2681,8 +2674,8 @@ class InventoryUpdateDetail(UnifiedJobDeletionMixin, RetrieveDestroyAPIView):
|
||||
class InventoryUpdateCancel(RetrieveAPIView):
|
||||
|
||||
model = InventoryUpdate
|
||||
obj_permission_type = 'cancel'
|
||||
serializer_class = InventoryUpdateCancelSerializer
|
||||
is_job_cancel = True
|
||||
new_in_14 = True
|
||||
|
||||
def post(self, request, *args, **kwargs):
|
||||
@@ -2711,7 +2704,7 @@ class JobTemplateList(ListCreateAPIView):
|
||||
always_allow_superuser = False
|
||||
capabilities_prefetch = [
|
||||
'admin', 'execute',
|
||||
{'copy': ['project.use', 'inventory.use', 'credential.use', 'vault_credential.use']}
|
||||
{'copy': ['project.use', 'inventory.use']}
|
||||
]
|
||||
|
||||
def post(self, request, *args, **kwargs):
|
||||
@@ -2730,12 +2723,12 @@ class JobTemplateDetail(RetrieveUpdateDestroyAPIView):
|
||||
always_allow_superuser = False
|
||||
|
||||
|
||||
class JobTemplateLaunch(RetrieveAPIView, GenericAPIView):
|
||||
class JobTemplateLaunch(RetrieveAPIView):
|
||||
|
||||
model = JobTemplate
|
||||
obj_permission_type = 'start'
|
||||
metadata_class = JobTypeMetadata
|
||||
serializer_class = JobLaunchSerializer
|
||||
is_job_start = True
|
||||
always_allow_superuser = False
|
||||
|
||||
def update_raw_data(self, data):
|
||||
@@ -2745,68 +2738,124 @@ class JobTemplateLaunch(RetrieveAPIView, GenericAPIView):
|
||||
return data
|
||||
extra_vars = data.pop('extra_vars', None) or {}
|
||||
if obj:
|
||||
for p in obj.passwords_needed_to_start:
|
||||
data[p] = u''
|
||||
needed_passwords = obj.passwords_needed_to_start
|
||||
if needed_passwords:
|
||||
data['credential_passwords'] = {}
|
||||
for p in needed_passwords:
|
||||
data['credential_passwords'][p] = u''
|
||||
else:
|
||||
data.pop('credential_passwords')
|
||||
for v in obj.variables_needed_to_start:
|
||||
extra_vars.setdefault(v, u'')
|
||||
if extra_vars:
|
||||
data['extra_vars'] = extra_vars
|
||||
ask_for_vars_dict = obj._ask_for_vars_dict()
|
||||
ask_for_vars_dict.pop('extra_vars')
|
||||
if get_request_version(self.request) == 1: # TODO: remove in 3.3
|
||||
ask_for_vars_dict.pop('extra_credentials')
|
||||
for field in ask_for_vars_dict:
|
||||
if not ask_for_vars_dict[field]:
|
||||
modified_ask_mapping = JobTemplate.get_ask_mapping()
|
||||
modified_ask_mapping.pop('extra_vars')
|
||||
for field, ask_field_name in modified_ask_mapping.items():
|
||||
if not getattr(obj, ask_field_name):
|
||||
data.pop(field, None)
|
||||
elif field == 'inventory' or field == 'credential':
|
||||
elif field == 'inventory':
|
||||
data[field] = getattrd(obj, "%s.%s" % (field, 'id'), None)
|
||||
elif field == 'extra_credentials':
|
||||
data[field] = [cred.id for cred in obj.extra_credentials.all()]
|
||||
elif field == 'credentials':
|
||||
data[field] = [cred.id for cred in obj.credentials.all()]
|
||||
else:
|
||||
data[field] = getattr(obj, field)
|
||||
return data
|
||||
|
||||
def post(self, request, *args, **kwargs):
|
||||
obj = self.get_object()
|
||||
def modernize_launch_payload(self, data, obj):
|
||||
'''
|
||||
Steps to do simple translations of request data to support
|
||||
old field structure to launch endpoint
|
||||
TODO: delete this method with future API version changes
|
||||
'''
|
||||
ignored_fields = {}
|
||||
modern_data = data.copy()
|
||||
|
||||
for fd in ('credential', 'vault_credential', 'inventory'):
|
||||
id_fd = '{}_id'.format(fd)
|
||||
if fd not in request.data and id_fd in request.data:
|
||||
request.data[fd] = request.data[id_fd]
|
||||
if fd not in modern_data and id_fd in modern_data:
|
||||
modern_data[fd] = modern_data[id_fd]
|
||||
|
||||
if get_request_version(self.request) == 1 and 'extra_credentials' in request.data: # TODO: remove in 3.3
|
||||
if hasattr(request.data, '_mutable') and not request.data._mutable:
|
||||
request.data._mutable = True
|
||||
extra_creds = request.data.pop('extra_credentials', None)
|
||||
# This block causes `extra_credentials` to _always_ be ignored for
|
||||
# the launch endpoint if we're accessing `/api/v1/`
|
||||
if get_request_version(self.request) == 1 and 'extra_credentials' in modern_data:
|
||||
extra_creds = modern_data.pop('extra_credentials', None)
|
||||
if extra_creds is not None:
|
||||
ignored_fields['extra_credentials'] = extra_creds
|
||||
|
||||
passwords = {}
|
||||
serializer = self.serializer_class(instance=obj, data=request.data, context={'obj': obj, 'data': request.data, 'passwords': passwords})
|
||||
# Automatically convert legacy launch credential arguments into a list of `.credentials`
|
||||
if 'credentials' in modern_data and (
|
||||
'credential' in modern_data or
|
||||
'vault_credential' in modern_data or
|
||||
'extra_credentials' in modern_data
|
||||
):
|
||||
raise ParseError({"error": _(
|
||||
"'credentials' cannot be used in combination with 'credential', 'vault_credential', or 'extra_credentials'."
|
||||
)})
|
||||
|
||||
if (
|
||||
'credential' in modern_data or
|
||||
'vault_credential' in modern_data or
|
||||
'extra_credentials' in modern_data
|
||||
):
|
||||
# make a list of the current credentials
|
||||
existing_credentials = obj.credentials.all()
|
||||
new_credentials = []
|
||||
for key, conditional in (
|
||||
('credential', lambda cred: cred.credential_type.kind != 'ssh'),
|
||||
('vault_credential', lambda cred: cred.credential_type.kind != 'vault'),
|
||||
('extra_credentials', lambda cred: cred.credential_type.kind not in ('cloud', 'net'))
|
||||
):
|
||||
if key in modern_data:
|
||||
# if a specific deprecated key is specified, remove all
|
||||
# credentials of _that_ type from the list of current
|
||||
# credentials
|
||||
existing_credentials = filter(conditional, existing_credentials)
|
||||
prompted_value = modern_data.pop(key)
|
||||
|
||||
# add the deprecated credential specified in the request
|
||||
if not isinstance(prompted_value, Iterable) or isinstance(prompted_value, basestring):
|
||||
prompted_value = [prompted_value]
|
||||
|
||||
# If user gave extra_credentials, special case to use exactly
|
||||
# the given list without merging with JT credentials
|
||||
if key == 'extra_credentials' and prompted_value:
|
||||
obj._deprecated_credential_launch = True # signal to not merge credentials
|
||||
new_credentials.extend(prompted_value)
|
||||
|
||||
# combine the list of "new" and the filtered list of "old"
|
||||
new_credentials.extend([cred.pk for cred in existing_credentials])
|
||||
if new_credentials:
|
||||
modern_data['credentials'] = new_credentials
|
||||
|
||||
# credential passwords were historically provided as top-level attributes
|
||||
if 'credential_passwords' not in modern_data:
|
||||
modern_data['credential_passwords'] = data.copy()
|
||||
|
||||
return (modern_data, ignored_fields)
|
||||
|
||||
|
||||
def post(self, request, *args, **kwargs):
|
||||
obj = self.get_object()
|
||||
|
||||
try:
|
||||
modern_data, ignored_fields = self.modernize_launch_payload(
|
||||
data=request.data, obj=obj
|
||||
)
|
||||
except ParseError as exc:
|
||||
return Response(exc.detail, status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
serializer = self.serializer_class(data=modern_data, context={'template': obj})
|
||||
if not serializer.is_valid():
|
||||
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
_accepted_or_ignored = obj._accept_or_ignore_job_kwargs(**request.data)
|
||||
prompted_fields = _accepted_or_ignored[0]
|
||||
ignored_fields.update(_accepted_or_ignored[1])
|
||||
ignored_fields.update(serializer._ignored_fields)
|
||||
|
||||
for fd, model in (
|
||||
('credential', Credential),
|
||||
('vault_credential', Credential),
|
||||
('inventory', Inventory)):
|
||||
if fd in prompted_fields and prompted_fields[fd] != getattrd(obj, '{}.pk'.format(fd), None):
|
||||
new_res = get_object_or_400(model, pk=get_pk_from_dict(prompted_fields, fd))
|
||||
use_role = getattr(new_res, 'use_role')
|
||||
if request.user not in use_role:
|
||||
raise PermissionDenied()
|
||||
if not request.user.can_access(JobLaunchConfig, 'add', serializer.validated_data, template=obj):
|
||||
raise PermissionDenied()
|
||||
|
||||
for cred in prompted_fields.get('extra_credentials', []):
|
||||
new_credential = get_object_or_400(Credential, pk=cred)
|
||||
if request.user not in new_credential.use_role:
|
||||
raise PermissionDenied()
|
||||
|
||||
new_job = obj.create_unified_job(**prompted_fields)
|
||||
passwords = serializer.validated_data.pop('credential_passwords', {})
|
||||
new_job = obj.create_unified_job(**serializer.validated_data)
|
||||
result = new_job.signal_start(**passwords)
|
||||
|
||||
if not result:
|
||||
@@ -2815,12 +2864,36 @@ class JobTemplateLaunch(RetrieveAPIView, GenericAPIView):
|
||||
return Response(data, status=status.HTTP_400_BAD_REQUEST)
|
||||
else:
|
||||
data = OrderedDict()
|
||||
data['ignored_fields'] = ignored_fields
|
||||
data.update(JobSerializer(new_job, context=self.get_serializer_context()).to_representation(new_job))
|
||||
data['job'] = new_job.id
|
||||
data['ignored_fields'] = self.sanitize_for_response(ignored_fields)
|
||||
data.update(JobSerializer(new_job, context=self.get_serializer_context()).to_representation(new_job))
|
||||
return Response(data, status=status.HTTP_201_CREATED)
|
||||
|
||||
|
||||
def sanitize_for_response(self, data):
|
||||
'''
|
||||
Model objects cannot be serialized by DRF,
|
||||
this replaces objects with their ids for inclusion in response
|
||||
'''
|
||||
|
||||
def display_value(val):
|
||||
if hasattr(val, 'id'):
|
||||
return val.id
|
||||
else:
|
||||
return val
|
||||
|
||||
sanitized_data = {}
|
||||
for field_name, value in data.items():
|
||||
if isinstance(value, (set, list)):
|
||||
sanitized_data[field_name] = []
|
||||
for sub_value in value:
|
||||
sanitized_data[field_name].append(display_value(sub_value))
|
||||
else:
|
||||
sanitized_data[field_name] = display_value(value)
|
||||
|
||||
return sanitized_data
|
||||
|
||||
|
||||
class JobTemplateSchedulesList(SubListCreateAPIView):
|
||||
|
||||
view_name = _("Job Template Schedules")
|
||||
@@ -2836,7 +2909,7 @@ class JobTemplateSchedulesList(SubListCreateAPIView):
|
||||
class JobTemplateSurveySpec(GenericAPIView):
|
||||
|
||||
model = JobTemplate
|
||||
parent_model = JobTemplate
|
||||
obj_permission_type = 'admin'
|
||||
serializer_class = EmptySerializer
|
||||
new_in_210 = True
|
||||
|
||||
@@ -2923,7 +2996,6 @@ class JobTemplateSurveySpec(GenericAPIView):
|
||||
class WorkflowJobTemplateSurveySpec(WorkflowsEnforcementMixin, JobTemplateSurveySpec):
|
||||
|
||||
model = WorkflowJobTemplate
|
||||
parent_model = WorkflowJobTemplate
|
||||
new_in_310 = True
|
||||
|
||||
|
||||
@@ -2963,17 +3035,17 @@ class JobTemplateNotificationTemplatesSuccessList(SubListCreateAttachDetachAPIVi
|
||||
new_in_300 = True
|
||||
|
||||
|
||||
class JobTemplateExtraCredentialsList(SubListCreateAttachDetachAPIView):
|
||||
class JobTemplateCredentialsList(SubListCreateAttachDetachAPIView):
|
||||
|
||||
model = Credential
|
||||
serializer_class = CredentialSerializer
|
||||
parent_model = JobTemplate
|
||||
relationship = 'extra_credentials'
|
||||
new_in_320 = True
|
||||
relationship = 'credentials'
|
||||
new_in_330 = True
|
||||
new_in_api_v2 = True
|
||||
|
||||
def get_queryset(self):
|
||||
# Return the full list of extra_credentials
|
||||
# Return the full list of credentials
|
||||
parent = self.get_parent_object()
|
||||
self.check_parent_access(parent)
|
||||
sublist_qs = getattrd(parent, self.relationship)
|
||||
@@ -2984,15 +3056,29 @@ class JobTemplateExtraCredentialsList(SubListCreateAttachDetachAPIView):
|
||||
return sublist_qs
|
||||
|
||||
def is_valid_relation(self, parent, sub, created=False):
|
||||
current_extra_types = [
|
||||
cred.credential_type.pk for cred in parent.extra_credentials.all()
|
||||
]
|
||||
if sub.credential_type.pk in current_extra_types:
|
||||
return {'error': _('Cannot assign multiple %s credentials.' % sub.credential_type.name)}
|
||||
if sub.unique_hash() in [cred.unique_hash() for cred in parent.credentials.all()]:
|
||||
return {"error": _("Cannot assign multiple {credential_type} credentials.".format(
|
||||
credential_type=sub.unique_hash(display=True)))}
|
||||
|
||||
if sub.credential_type.kind not in ('net', 'cloud'):
|
||||
return super(JobTemplateCredentialsList, self).is_valid_relation(parent, sub, created)
|
||||
|
||||
|
||||
class JobTemplateExtraCredentialsList(JobTemplateCredentialsList):
|
||||
|
||||
deprecated = True
|
||||
new_in_320 = True
|
||||
new_in_330 = False
|
||||
|
||||
def get_queryset(self):
|
||||
sublist_qs = super(JobTemplateExtraCredentialsList, self).get_queryset()
|
||||
sublist_qs = sublist_qs.filter(credential_type__kind__in=['cloud', 'net'])
|
||||
return sublist_qs
|
||||
|
||||
def is_valid_relation(self, parent, sub, created=False):
|
||||
valid = super(JobTemplateExtraCredentialsList, self).is_valid_relation(parent, sub, created)
|
||||
if sub.credential_type.kind not in ('cloud', 'net'):
|
||||
return {'error': _('Extra credentials must be network or cloud.')}
|
||||
return super(JobTemplateExtraCredentialsList, self).is_valid_relation(parent, sub, created)
|
||||
return valid
|
||||
|
||||
|
||||
class JobTemplateLabelList(DeleteLastUnattachLabelMixin, SubListCreateAttachDetachAPIView):
|
||||
@@ -3085,6 +3171,8 @@ class JobTemplateCallback(GenericAPIView):
|
||||
matches.update(host_mappings[host_name])
|
||||
except socket.gaierror:
|
||||
pass
|
||||
except UnicodeError:
|
||||
pass
|
||||
return matches
|
||||
|
||||
def get(self, request, *args, **kwargs):
|
||||
@@ -3154,7 +3242,8 @@ class JobTemplateCallback(GenericAPIView):
|
||||
# Everything is fine; actually create the job.
|
||||
kv = {"limit": limit, "launch_type": 'callback'}
|
||||
if extra_vars is not None and job_template.ask_variables_on_launch:
|
||||
kv['extra_vars'] = callback_filter_out_ansible_extra_vars(extra_vars)
|
||||
extra_vars_redacted, removed = extract_ansible_vars(extra_vars)
|
||||
kv['extra_vars'] = extra_vars_redacted
|
||||
with transaction.atomic():
|
||||
job = job_template.create_job(**kv)
|
||||
|
||||
@@ -3221,10 +3310,20 @@ class WorkflowJobNodeDetail(WorkflowsEnforcementMixin, RetrieveAPIView):
|
||||
new_in_310 = True
|
||||
|
||||
|
||||
class WorkflowJobNodeCredentialsList(SubListAPIView):
|
||||
|
||||
model = Credential
|
||||
serializer_class = CredentialSerializer
|
||||
parent_model = WorkflowJobNode
|
||||
relationship = 'credentials'
|
||||
new_in_330 = True
|
||||
new_in_api_v2 = True
|
||||
|
||||
|
||||
class WorkflowJobTemplateNodeList(WorkflowsEnforcementMixin, ListCreateAPIView):
|
||||
|
||||
model = WorkflowJobTemplateNode
|
||||
serializer_class = WorkflowJobTemplateNodeListSerializer
|
||||
serializer_class = WorkflowJobTemplateNodeSerializer
|
||||
new_in_310 = True
|
||||
|
||||
|
||||
@@ -3234,21 +3333,18 @@ class WorkflowJobTemplateNodeDetail(WorkflowsEnforcementMixin, RetrieveUpdateDes
|
||||
serializer_class = WorkflowJobTemplateNodeDetailSerializer
|
||||
new_in_310 = True
|
||||
|
||||
def update_raw_data(self, data):
|
||||
for fd in ['job_type', 'job_tags', 'skip_tags', 'limit', 'skip_tags']:
|
||||
data[fd] = None
|
||||
try:
|
||||
obj = self.get_object()
|
||||
data.update(obj.char_prompts)
|
||||
except:
|
||||
pass
|
||||
return super(WorkflowJobTemplateNodeDetail, self).update_raw_data(data)
|
||||
|
||||
class WorkflowJobTemplateNodeCredentialsList(LaunchConfigCredentialsBase):
|
||||
|
||||
parent_model = WorkflowJobTemplateNode
|
||||
new_in_330 = True
|
||||
new_in_api_v2 = True
|
||||
|
||||
|
||||
class WorkflowJobTemplateNodeChildrenBaseList(WorkflowsEnforcementMixin, EnforceParentRelationshipMixin, SubListCreateAttachDetachAPIView):
|
||||
|
||||
model = WorkflowJobTemplateNode
|
||||
serializer_class = WorkflowJobTemplateNodeListSerializer
|
||||
serializer_class = WorkflowJobTemplateNodeSerializer
|
||||
always_allow_superuser = True
|
||||
parent_model = WorkflowJobTemplateNode
|
||||
relationship = ''
|
||||
@@ -3366,7 +3462,6 @@ class WorkflowJobTemplateDetail(WorkflowsEnforcementMixin, RetrieveUpdateDestroy
|
||||
class WorkflowJobTemplateCopy(WorkflowsEnforcementMixin, GenericAPIView):
|
||||
|
||||
model = WorkflowJobTemplate
|
||||
parent_model = WorkflowJobTemplate
|
||||
serializer_class = EmptySerializer
|
||||
new_in_310 = True
|
||||
|
||||
@@ -3406,9 +3501,9 @@ class WorkflowJobTemplateLaunch(WorkflowsEnforcementMixin, RetrieveAPIView):
|
||||
|
||||
|
||||
model = WorkflowJobTemplate
|
||||
obj_permission_type = 'start'
|
||||
serializer_class = WorkflowJobLaunchSerializer
|
||||
new_in_310 = True
|
||||
is_job_start = True
|
||||
always_allow_superuser = False
|
||||
|
||||
def update_raw_data(self, data):
|
||||
@@ -3426,30 +3521,28 @@ class WorkflowJobTemplateLaunch(WorkflowsEnforcementMixin, RetrieveAPIView):
|
||||
|
||||
def post(self, request, *args, **kwargs):
|
||||
obj = self.get_object()
|
||||
if not request.user.can_access(self.model, 'start', obj):
|
||||
raise PermissionDenied()
|
||||
|
||||
serializer = self.serializer_class(instance=obj, data=request.data)
|
||||
if not serializer.is_valid():
|
||||
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
prompted_fields, ignored_fields = obj._accept_or_ignore_job_kwargs(**request.data)
|
||||
prompted_fields, ignored_fields, errors = obj._accept_or_ignore_job_kwargs(**request.data)
|
||||
|
||||
new_job = obj.create_unified_job(**prompted_fields)
|
||||
new_job.signal_start()
|
||||
|
||||
data = OrderedDict()
|
||||
data['workflow_job'] = new_job.id
|
||||
data['ignored_fields'] = ignored_fields
|
||||
data.update(WorkflowJobSerializer(new_job, context=self.get_serializer_context()).to_representation(new_job))
|
||||
data['workflow_job'] = new_job.id
|
||||
return Response(data, status=status.HTTP_201_CREATED)
|
||||
|
||||
|
||||
class WorkflowJobRelaunch(WorkflowsEnforcementMixin, GenericAPIView):
|
||||
|
||||
model = WorkflowJob
|
||||
obj_permission_type = 'start'
|
||||
serializer_class = EmptySerializer
|
||||
is_job_start = True
|
||||
new_in_310 = True
|
||||
|
||||
def check_object_permissions(self, request, obj):
|
||||
@@ -3475,17 +3568,12 @@ class WorkflowJobRelaunch(WorkflowsEnforcementMixin, GenericAPIView):
|
||||
class WorkflowJobTemplateWorkflowNodesList(WorkflowsEnforcementMixin, SubListCreateAPIView):
|
||||
|
||||
model = WorkflowJobTemplateNode
|
||||
serializer_class = WorkflowJobTemplateNodeListSerializer
|
||||
serializer_class = WorkflowJobTemplateNodeSerializer
|
||||
parent_model = WorkflowJobTemplate
|
||||
relationship = 'workflow_job_template_nodes'
|
||||
parent_key = 'workflow_job_template'
|
||||
new_in_310 = True
|
||||
|
||||
def update_raw_data(self, data):
|
||||
for fd in ['job_type', 'job_tags', 'skip_tags', 'limit', 'skip_tags']:
|
||||
data[fd] = None
|
||||
return super(WorkflowJobTemplateWorkflowNodesList, self).update_raw_data(data)
|
||||
|
||||
def get_queryset(self):
|
||||
return super(WorkflowJobTemplateWorkflowNodesList, self).get_queryset().order_by('id')
|
||||
|
||||
@@ -3606,8 +3694,8 @@ class WorkflowJobWorkflowNodesList(WorkflowsEnforcementMixin, SubListAPIView):
|
||||
class WorkflowJobCancel(WorkflowsEnforcementMixin, RetrieveAPIView):
|
||||
|
||||
model = WorkflowJob
|
||||
obj_permission_type = 'cancel'
|
||||
serializer_class = WorkflowJobCancelSerializer
|
||||
is_job_cancel = True
|
||||
new_in_310 = True
|
||||
|
||||
def post(self, request, *args, **kwargs):
|
||||
@@ -3661,8 +3749,8 @@ class SystemJobTemplateDetail(RetrieveAPIView):
|
||||
class SystemJobTemplateLaunch(GenericAPIView):
|
||||
|
||||
model = SystemJobTemplate
|
||||
obj_permission_type = 'start'
|
||||
serializer_class = EmptySerializer
|
||||
is_job_start = True
|
||||
new_in_210 = True
|
||||
|
||||
def get(self, request, *args, **kwargs):
|
||||
@@ -3673,7 +3761,9 @@ class SystemJobTemplateLaunch(GenericAPIView):
|
||||
|
||||
new_job = obj.create_unified_job(extra_vars=request.data.get('extra_vars', {}))
|
||||
new_job.signal_start()
|
||||
data = dict(system_job=new_job.id)
|
||||
data = OrderedDict()
|
||||
data['system_job'] = new_job.id
|
||||
data.update(SystemJobSerializer(new_job, context=self.get_serializer_context()).to_representation(new_job))
|
||||
return Response(data, status=status.HTTP_201_CREATED)
|
||||
|
||||
|
||||
@@ -3739,6 +3829,13 @@ class JobList(ListCreateAPIView):
|
||||
methods.remove('POST')
|
||||
return methods
|
||||
|
||||
# NOTE: Remove in 3.3, switch ListCreateAPIView to ListAPIView
|
||||
def post(self, request, *args, **kwargs):
|
||||
if get_request_version(self.request) > 1:
|
||||
return Response({"error": _("POST not allowed for Job launching in version 2 of the api")},
|
||||
status=status.HTTP_405_METHOD_NOT_ALLOWED)
|
||||
return super(JobList, self).post(request, *args, **kwargs)
|
||||
|
||||
|
||||
class JobDetail(UnifiedJobDeletionMixin, RetrieveUpdateDestroyAPIView):
|
||||
|
||||
@@ -3754,14 +3851,26 @@ class JobDetail(UnifiedJobDeletionMixin, RetrieveUpdateDestroyAPIView):
|
||||
return super(JobDetail, self).update(request, *args, **kwargs)
|
||||
|
||||
|
||||
class JobExtraCredentialsList(SubListAPIView):
|
||||
class JobCredentialsList(SubListAPIView):
|
||||
|
||||
model = Credential
|
||||
serializer_class = CredentialSerializer
|
||||
parent_model = Job
|
||||
relationship = 'extra_credentials'
|
||||
new_in_320 = True
|
||||
relationship = 'credentials'
|
||||
new_in_api_v2 = True
|
||||
new_in_330 = True
|
||||
|
||||
|
||||
class JobExtraCredentialsList(JobCredentialsList):
|
||||
|
||||
deprecated = True
|
||||
new_in_320 = True
|
||||
new_in_330 = False
|
||||
|
||||
def get_queryset(self):
|
||||
sublist_qs = super(JobExtraCredentialsList, self).get_queryset()
|
||||
sublist_qs = sublist_qs.filter(credential_type__kind__in=['cloud', 'net'])
|
||||
return sublist_qs
|
||||
|
||||
|
||||
class JobLabelList(SubListAPIView):
|
||||
@@ -3788,14 +3897,21 @@ class JobActivityStreamList(ActivityStreamEnforcementMixin, SubListAPIView):
|
||||
new_in_145 = True
|
||||
|
||||
|
||||
# TODO: remove endpoint in 3.3
|
||||
class JobStart(GenericAPIView):
|
||||
|
||||
model = Job
|
||||
obj_permission_type = 'start'
|
||||
serializer_class = EmptySerializer
|
||||
is_job_start = True
|
||||
deprecated = True
|
||||
|
||||
def v2_not_allowed(self):
|
||||
return Response({'detail': 'Action only possible through v1 API.'},
|
||||
status=status.HTTP_404_NOT_FOUND)
|
||||
|
||||
def get(self, request, *args, **kwargs):
|
||||
if get_request_version(request) > 1:
|
||||
return self.v2_not_allowed()
|
||||
obj = self.get_object()
|
||||
data = dict(
|
||||
can_start=obj.can_start,
|
||||
@@ -3806,6 +3922,8 @@ class JobStart(GenericAPIView):
|
||||
return Response(data)
|
||||
|
||||
def post(self, request, *args, **kwargs):
|
||||
if get_request_version(request) > 1:
|
||||
return self.v2_not_allowed()
|
||||
obj = self.get_object()
|
||||
if obj.can_start:
|
||||
result = obj.signal_start(**request.data)
|
||||
@@ -3821,8 +3939,8 @@ class JobStart(GenericAPIView):
|
||||
class JobCancel(RetrieveAPIView):
|
||||
|
||||
model = Job
|
||||
obj_permission_type = 'cancel'
|
||||
serializer_class = JobCancelSerializer
|
||||
is_job_cancel = True
|
||||
|
||||
def post(self, request, *args, **kwargs):
|
||||
obj = self.get_object()
|
||||
@@ -3833,11 +3951,11 @@ class JobCancel(RetrieveAPIView):
|
||||
return self.http_method_not_allowed(request, *args, **kwargs)
|
||||
|
||||
|
||||
class JobRelaunch(RetrieveAPIView, GenericAPIView):
|
||||
class JobRelaunch(RetrieveAPIView):
|
||||
|
||||
model = Job
|
||||
obj_permission_type = 'start'
|
||||
serializer_class = JobRelaunchSerializer
|
||||
is_job_start = True
|
||||
|
||||
@csrf_exempt
|
||||
@transaction.non_atomic_requests
|
||||
@@ -3860,7 +3978,26 @@ class JobRelaunch(RetrieveAPIView, GenericAPIView):
|
||||
if not serializer.is_valid():
|
||||
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
new_job = obj.copy_unified_job()
|
||||
copy_kwargs = {}
|
||||
retry_hosts = request.data.get('hosts', None)
|
||||
if retry_hosts and retry_hosts != 'all':
|
||||
if obj.status in ACTIVE_STATES:
|
||||
return Response({'hosts': _(
|
||||
'Wait until job finishes before retrying on {status_value} hosts.'
|
||||
).format(status_value=retry_hosts)}, status=status.HTTP_400_BAD_REQUEST)
|
||||
host_qs = obj.retry_qs(retry_hosts)
|
||||
if not obj.job_events.filter(event='playbook_on_stats').exists():
|
||||
return Response({'hosts': _(
|
||||
'Cannot retry on {status_value} hosts, playbook stats not available.'
|
||||
).format(status_value=retry_hosts)}, status=status.HTTP_400_BAD_REQUEST)
|
||||
retry_host_list = host_qs.values_list('name', flat=True)
|
||||
if len(retry_host_list) == 0:
|
||||
return Response({'hosts': _(
|
||||
'Cannot relaunch because previous job had 0 {status_value} hosts.'
|
||||
).format(status_value=retry_hosts)}, status=status.HTTP_400_BAD_REQUEST)
|
||||
copy_kwargs['limit'] = ','.join(retry_host_list)
|
||||
|
||||
new_job = obj.copy_unified_job(**copy_kwargs)
|
||||
result = new_job.signal_start(**request.data)
|
||||
if not result:
|
||||
data = dict(passwords_needed_to_start=new_job.passwords_needed_to_start)
|
||||
@@ -3873,6 +4010,52 @@ class JobRelaunch(RetrieveAPIView, GenericAPIView):
|
||||
return Response(data, status=status.HTTP_201_CREATED, headers=headers)
|
||||
|
||||
|
||||
class JobCreateSchedule(RetrieveAPIView):
|
||||
|
||||
model = Job
|
||||
obj_permission_type = 'start'
|
||||
serializer_class = JobCreateScheduleSerializer
|
||||
new_in_330 = True
|
||||
|
||||
def post(self, request, *args, **kwargs):
|
||||
obj = self.get_object()
|
||||
|
||||
if not obj.can_schedule:
|
||||
return Response({"error": _('Information needed to schedule this job is missing.')},
|
||||
status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
config = obj.launch_config
|
||||
if not request.user.can_access(JobLaunchConfig, 'add', {'reference_obj': obj}):
|
||||
raise PermissionDenied()
|
||||
|
||||
# Make up a name for the schedule, guarentee that it is unique
|
||||
name = 'Auto-generated schedule from job {}'.format(obj.id)
|
||||
existing_names = Schedule.objects.filter(name__startswith=name).values_list('name', flat=True)
|
||||
if name in existing_names:
|
||||
idx = 1
|
||||
alt_name = '{} - number {}'.format(name, idx)
|
||||
while alt_name in existing_names:
|
||||
idx += 1
|
||||
alt_name = '{} - number {}'.format(name, idx)
|
||||
name = alt_name
|
||||
|
||||
schedule = Schedule.objects.create(
|
||||
name=name,
|
||||
unified_job_template=obj.unified_job_template,
|
||||
enabled=False,
|
||||
rrule='{}Z RRULE:FREQ=MONTHLY;INTERVAL=1'.format(now().strftime('DTSTART:%Y%m%dT%H%M%S')),
|
||||
extra_data=config.extra_data,
|
||||
survey_passwords=config.survey_passwords,
|
||||
inventory=config.inventory,
|
||||
char_prompts=config.char_prompts
|
||||
)
|
||||
schedule.credentials.add(*config.credentials.all())
|
||||
|
||||
data = ScheduleSerializer(schedule, context=self.get_serializer_context()).data
|
||||
headers = {'Location': schedule.get_absolute_url(request=request)}
|
||||
return Response(data, status=status.HTTP_201_CREATED, headers=headers)
|
||||
|
||||
|
||||
class JobNotificationsList(SubListAPIView):
|
||||
|
||||
model = Notification
|
||||
@@ -4077,8 +4260,8 @@ class AdHocCommandDetail(UnifiedJobDeletionMixin, RetrieveDestroyAPIView):
|
||||
class AdHocCommandCancel(RetrieveAPIView):
|
||||
|
||||
model = AdHocCommand
|
||||
obj_permission_type = 'cancel'
|
||||
serializer_class = AdHocCommandCancelSerializer
|
||||
is_job_cancel = True
|
||||
new_in_220 = True
|
||||
|
||||
def post(self, request, *args, **kwargs):
|
||||
@@ -4093,8 +4276,8 @@ class AdHocCommandCancel(RetrieveAPIView):
|
||||
class AdHocCommandRelaunch(GenericAPIView):
|
||||
|
||||
model = AdHocCommand
|
||||
obj_permission_type = 'start'
|
||||
serializer_class = AdHocCommandRelaunchSerializer
|
||||
is_job_start = True
|
||||
new_in_220 = True
|
||||
|
||||
# FIXME: Figure out why OPTIONS request still shows all fields.
|
||||
@@ -4228,8 +4411,8 @@ class SystemJobDetail(UnifiedJobDeletionMixin, RetrieveDestroyAPIView):
|
||||
class SystemJobCancel(RetrieveAPIView):
|
||||
|
||||
model = SystemJob
|
||||
obj_permission_type = 'cancel'
|
||||
serializer_class = SystemJobCancelSerializer
|
||||
is_job_cancel = True
|
||||
new_in_210 = True
|
||||
|
||||
def post(self, request, *args, **kwargs):
|
||||
@@ -4258,7 +4441,6 @@ class UnifiedJobTemplateList(ListAPIView):
|
||||
capabilities_prefetch = [
|
||||
'admin', 'execute',
|
||||
{'copy': ['jobtemplate.project.use', 'jobtemplate.inventory.use',
|
||||
'jobtemplate.credential.use', 'jobtemplate.vault_credential.use',
|
||||
'workflowjobtemplate.organization.admin']}
|
||||
]
|
||||
|
||||
@@ -4450,9 +4632,9 @@ class NotificationTemplateTest(GenericAPIView):
|
||||
|
||||
view_name = _('Notification Template Test')
|
||||
model = NotificationTemplate
|
||||
obj_permission_type = 'start'
|
||||
serializer_class = EmptySerializer
|
||||
new_in_300 = True
|
||||
is_job_start = True
|
||||
|
||||
def post(self, request, *args, **kwargs):
|
||||
obj = self.get_object()
|
||||
@@ -4462,8 +4644,11 @@ class NotificationTemplateTest(GenericAPIView):
|
||||
return Response({}, status=status.HTTP_400_BAD_REQUEST)
|
||||
else:
|
||||
send_notifications.delay([notification.id])
|
||||
data = OrderedDict()
|
||||
data['notification'] = notification.id
|
||||
data.update(NotificationSerializer(notification, context=self.get_serializer_context()).to_representation(notification))
|
||||
headers = {'Location': notification.get_absolute_url(request=request)}
|
||||
return Response({"notification": notification.id},
|
||||
return Response(data,
|
||||
headers=headers,
|
||||
status=status.HTTP_202_ACCEPTED)
|
||||
|
||||
|
||||
23
awx/celery.py
Normal file
23
awx/celery.py
Normal file
@@ -0,0 +1,23 @@
|
||||
# Copyright (c) 2017 Ansible, Inc.
|
||||
# All Rights Reserved.
|
||||
|
||||
from __future__ import absolute_import, unicode_literals
|
||||
|
||||
import os
|
||||
from celery import Celery
|
||||
|
||||
|
||||
try:
|
||||
import awx.devonly # noqa
|
||||
MODE = 'development'
|
||||
except ImportError: # pragma: no cover
|
||||
MODE = 'production'
|
||||
|
||||
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'awx.settings.%s' % MODE)
|
||||
|
||||
app = Celery('awx')
|
||||
app.config_from_object('django.conf:settings', namespace='CELERY')
|
||||
app.autodiscover_tasks()
|
||||
|
||||
if __name__ == '__main__':
|
||||
app.start()
|
||||
@@ -53,6 +53,47 @@ class StringListField(ListField):
|
||||
return super(StringListField, self).to_representation(value)
|
||||
|
||||
|
||||
class StringListBooleanField(ListField):
|
||||
|
||||
default_error_messages = {
|
||||
'type_error': _('Expected None, True, False, a string or list of strings but got {input_type} instead.'),
|
||||
}
|
||||
child = CharField()
|
||||
|
||||
def to_representation(self, value):
|
||||
try:
|
||||
if isinstance(value, (list, tuple)):
|
||||
return super(StringListBooleanField, self).to_representation(value)
|
||||
elif value in NullBooleanField.TRUE_VALUES:
|
||||
return True
|
||||
elif value in NullBooleanField.FALSE_VALUES:
|
||||
return False
|
||||
elif value in NullBooleanField.NULL_VALUES:
|
||||
return None
|
||||
elif isinstance(value, basestring):
|
||||
return self.child.to_representation(value)
|
||||
except TypeError:
|
||||
pass
|
||||
|
||||
self.fail('type_error', input_type=type(value))
|
||||
|
||||
def to_internal_value(self, data):
|
||||
try:
|
||||
if isinstance(data, (list, tuple)):
|
||||
return super(StringListBooleanField, self).to_internal_value(data)
|
||||
elif data in NullBooleanField.TRUE_VALUES:
|
||||
return True
|
||||
elif data in NullBooleanField.FALSE_VALUES:
|
||||
return False
|
||||
elif data in NullBooleanField.NULL_VALUES:
|
||||
return None
|
||||
elif isinstance(data, basestring):
|
||||
return self.child.run_validation(data)
|
||||
except TypeError:
|
||||
pass
|
||||
self.fail('type_error', input_type=type(data))
|
||||
|
||||
|
||||
class URLField(CharField):
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
@@ -83,7 +124,7 @@ class URLField(CharField):
|
||||
else:
|
||||
netloc = '{}@{}' % (url_parts.username, netloc)
|
||||
value = urlparse.urlunsplit([url_parts.scheme, netloc, url_parts.path, url_parts.query, url_parts.fragment])
|
||||
except:
|
||||
except Exception:
|
||||
raise # If something fails here, just fall through and let the validators check it.
|
||||
super(URLField, self).run_validators(value)
|
||||
|
||||
@@ -100,3 +141,25 @@ class KeyValueField(DictField):
|
||||
if not isinstance(value, six.string_types + six.integer_types + (float,)):
|
||||
self.fail('invalid_child', input=value)
|
||||
return ret
|
||||
|
||||
|
||||
class ListTuplesField(ListField):
|
||||
default_error_messages = {
|
||||
'type_error': _('Expected a list of tuples of max length 2 but got {input_type} instead.'),
|
||||
}
|
||||
|
||||
def to_representation(self, value):
|
||||
if isinstance(value, (list, tuple)):
|
||||
return super(ListTuplesField, self).to_representation(value)
|
||||
else:
|
||||
self.fail('type_error', input_type=type(value))
|
||||
|
||||
def to_internal_value(self, data):
|
||||
if isinstance(data, list):
|
||||
for x in data:
|
||||
if not isinstance(x, (list, tuple)) or len(x) > 2:
|
||||
self.fail('type_error', input_type=type(x))
|
||||
|
||||
return super(ListTuplesField, self).to_internal_value(data)
|
||||
else:
|
||||
self.fail('type_error', input_type=type(data))
|
||||
|
||||
@@ -2,6 +2,8 @@
|
||||
# All Rights Reserved.
|
||||
|
||||
# Django
|
||||
from django.core.signals import setting_changed
|
||||
from django.dispatch import receiver
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
|
||||
# Django REST Framework
|
||||
@@ -9,6 +11,7 @@ from rest_framework.exceptions import APIException
|
||||
|
||||
# Tower
|
||||
from awx.main.utils.common import get_licenser
|
||||
from awx.main.utils import memoize, memoize_delete
|
||||
|
||||
__all__ = ['LicenseForbids', 'get_license', 'get_licensed_features',
|
||||
'feature_enabled', 'feature_exists']
|
||||
@@ -23,6 +26,13 @@ def _get_validated_license_data():
|
||||
return get_licenser().validate()
|
||||
|
||||
|
||||
@receiver(setting_changed)
|
||||
def _on_setting_changed(sender, **kwargs):
|
||||
# Clear cached result above when license changes.
|
||||
if kwargs.get('setting', None) == 'LICENSE':
|
||||
memoize_delete('feature_enabled')
|
||||
|
||||
|
||||
def get_license(show_key=False):
|
||||
"""Return a dictionary representing the active license on this Tower instance."""
|
||||
license_data = _get_validated_license_data()
|
||||
@@ -40,6 +50,7 @@ def get_licensed_features():
|
||||
return features
|
||||
|
||||
|
||||
@memoize(track_function=True)
|
||||
def feature_enabled(name):
|
||||
"""Return True if the requested feature is enabled, False otherwise."""
|
||||
validated_license_data = _get_validated_license_data()
|
||||
|
||||
@@ -54,6 +54,13 @@ class Command(BaseCommand):
|
||||
default=False,
|
||||
help=_('Skip commenting out settings in files.'),
|
||||
)
|
||||
parser.add_argument(
|
||||
'--comment-only',
|
||||
action='store_true',
|
||||
dest='comment_only',
|
||||
default=False,
|
||||
help=_('Skip migrating and only comment out settings in files.'),
|
||||
)
|
||||
parser.add_argument(
|
||||
'--backup-suffix',
|
||||
dest='backup_suffix',
|
||||
@@ -67,6 +74,7 @@ class Command(BaseCommand):
|
||||
self.dry_run = bool(options.get('dry_run', False))
|
||||
self.skip_errors = bool(options.get('skip_errors', False))
|
||||
self.no_comment = bool(options.get('no_comment', False))
|
||||
self.comment_only = bool(options.get('comment_only', False))
|
||||
self.backup_suffix = options.get('backup_suffix', '')
|
||||
self.categories = options.get('category', None) or ['all']
|
||||
self.style.HEADING = self.style.MIGRATE_HEADING
|
||||
@@ -103,7 +111,7 @@ class Command(BaseCommand):
|
||||
def _get_settings_file_patterns(self):
|
||||
if MODE == 'development':
|
||||
return [
|
||||
'/etc/tower/settings.py',
|
||||
'/etc/tower/settings.py',
|
||||
'/etc/tower/conf.d/*.py',
|
||||
os.path.join(os.path.dirname(__file__), '..', '..', '..', 'settings', 'local_*.py')
|
||||
]
|
||||
@@ -360,14 +368,15 @@ class Command(BaseCommand):
|
||||
if filename:
|
||||
self._display_diff_summary(filename, lines_added, lines_removed)
|
||||
|
||||
def _migrate_settings(self, registered_settings):
|
||||
patterns = self._get_settings_file_patterns()
|
||||
|
||||
# Determine which settings need to be commented/migrated.
|
||||
def _discover_settings(self, registered_settings):
|
||||
if self.verbosity >= 1:
|
||||
self.stdout.write(self.style.HEADING('Discovering settings to be migrated and commented:'))
|
||||
|
||||
# Determine which settings need to be commented/migrated.
|
||||
to_migrate = collections.OrderedDict()
|
||||
to_comment = collections.OrderedDict()
|
||||
patterns = self._get_settings_file_patterns()
|
||||
|
||||
for name in registered_settings:
|
||||
comment_error, migrate_error = None, None
|
||||
files_to_comment = []
|
||||
@@ -398,8 +407,9 @@ class Command(BaseCommand):
|
||||
self._display_tbd(name, files_to_comment, migrate_value, comment_error, migrate_error)
|
||||
if self.verbosity == 1 and not to_migrate and not to_comment:
|
||||
self.stdout.write(' No settings found to migrate or comment!')
|
||||
return (to_migrate, to_comment)
|
||||
|
||||
# Now migrate those settings to the database.
|
||||
def _migrate(self, to_migrate):
|
||||
if self.verbosity >= 1:
|
||||
if self.dry_run:
|
||||
self.stdout.write(self.style.HEADING('Migrating settings to database (dry-run):'))
|
||||
@@ -407,6 +417,8 @@ class Command(BaseCommand):
|
||||
self.stdout.write(self.style.HEADING('Migrating settings to database:'))
|
||||
if not to_migrate:
|
||||
self.stdout.write(' No settings to migrate!')
|
||||
|
||||
# Now migrate those settings to the database.
|
||||
for name, db_value in to_migrate.items():
|
||||
display_value = json.dumps(db_value, indent=4)
|
||||
setting = Setting.objects.filter(key=name, user__isnull=True).order_by('pk').first()
|
||||
@@ -422,7 +434,7 @@ class Command(BaseCommand):
|
||||
setting.save(update_fields=['value'])
|
||||
self._display_migrate(name, action, display_value)
|
||||
|
||||
# Now comment settings in settings files.
|
||||
def _comment(self, to_comment):
|
||||
if self.verbosity >= 1:
|
||||
if bool(self.dry_run or self.no_comment):
|
||||
self.stdout.write(self.style.HEADING('Commenting settings in files (dry-run):'))
|
||||
@@ -430,6 +442,8 @@ class Command(BaseCommand):
|
||||
self.stdout.write(self.style.HEADING('Commenting settings in files:'))
|
||||
if not to_comment:
|
||||
self.stdout.write(' No settings to comment!')
|
||||
|
||||
# Now comment settings in settings files.
|
||||
if to_comment:
|
||||
to_comment_patterns = []
|
||||
license_file_to_comment = None
|
||||
@@ -457,3 +471,10 @@ class Command(BaseCommand):
|
||||
if custom_logo_file_to_comment:
|
||||
diffs.extend(self._comment_custom_logo_file(dry_run=False))
|
||||
self._display_comment(diffs)
|
||||
|
||||
def _migrate_settings(self, registered_settings):
|
||||
to_migrate, to_comment = self._discover_settings(registered_settings)
|
||||
|
||||
if not bool(self.comment_only):
|
||||
self._migrate(to_migrate)
|
||||
self._comment(to_comment)
|
||||
|
||||
@@ -74,6 +74,10 @@ class Setting(CreatedModifiedModel):
|
||||
def get_cache_key(self, key):
|
||||
return key
|
||||
|
||||
@classmethod
|
||||
def get_cache_id_key(self, key):
|
||||
return '{}_ID'.format(key)
|
||||
|
||||
|
||||
import awx.conf.signals # noqa
|
||||
|
||||
|
||||
@@ -159,14 +159,14 @@ class SettingsRegistry(object):
|
||||
if category_slug == 'user' and for_user:
|
||||
try:
|
||||
field_instance.default = original_field_instance.to_representation(getattr(self.settings, setting))
|
||||
except:
|
||||
except Exception:
|
||||
logger.warning('Unable to retrieve default value for user setting "%s".', setting, exc_info=True)
|
||||
elif not field_instance.read_only or field_instance.default is empty or field_instance.defined_in_file:
|
||||
try:
|
||||
field_instance.default = original_field_instance.to_representation(self.settings._awx_conf_settings._get_default(setting))
|
||||
except AttributeError:
|
||||
pass
|
||||
except:
|
||||
except Exception:
|
||||
logger.warning('Unable to retrieve default value for setting "%s".', setting, exc_info=True)
|
||||
|
||||
# `PENDO_TRACKING_STATE` is disabled for the open source awx license
|
||||
|
||||
@@ -16,7 +16,7 @@ class SettingSerializer(BaseSerializer):
|
||||
class Meta:
|
||||
model = Setting
|
||||
fields = ('id', 'key', 'value')
|
||||
readonly_fields = ('id', 'key', 'value')
|
||||
read_only_fields = ('id', 'key', 'value')
|
||||
|
||||
def __init__(self, instance=None, data=serializers.empty, **kwargs):
|
||||
if instance is None and data is not serializers.empty and 'key' in data:
|
||||
|
||||
@@ -9,6 +9,7 @@ import time
|
||||
import six
|
||||
|
||||
# Django
|
||||
from django.conf import LazySettings
|
||||
from django.conf import settings, UserSettingsHolder
|
||||
from django.core.cache import cache as django_cache
|
||||
from django.core.exceptions import ImproperlyConfigured
|
||||
@@ -69,6 +70,12 @@ def _log_database_error():
|
||||
pass
|
||||
|
||||
|
||||
def filter_sensitive(registry, key, value):
|
||||
if registry.is_setting_encrypted(key):
|
||||
return '$encrypted$'
|
||||
return value
|
||||
|
||||
|
||||
class EncryptedCacheProxy(object):
|
||||
|
||||
def __init__(self, cache, registry, encrypter=None, decrypter=None):
|
||||
@@ -105,9 +112,13 @@ class EncryptedCacheProxy(object):
|
||||
six.text_type(value)
|
||||
except UnicodeDecodeError:
|
||||
value = value.decode('utf-8')
|
||||
logger.debug('cache get(%r, %r) -> %r', key, empty, filter_sensitive(self.registry, key, value))
|
||||
return value
|
||||
|
||||
def set(self, key, value, **kwargs):
|
||||
def set(self, key, value, log=True, **kwargs):
|
||||
if log is True:
|
||||
logger.debug('cache set(%r, %r, %r)', key, filter_sensitive(self.registry, key, value),
|
||||
SETTING_CACHE_TIMEOUT)
|
||||
self.cache.set(
|
||||
key,
|
||||
self._handle_encryption(self.encrypter, key, value),
|
||||
@@ -115,8 +126,13 @@ class EncryptedCacheProxy(object):
|
||||
)
|
||||
|
||||
def set_many(self, data, **kwargs):
|
||||
filtered_data = dict(
|
||||
(key, filter_sensitive(self.registry, key, value))
|
||||
for key, value in data.items()
|
||||
)
|
||||
logger.debug('cache set_many(%r, %r)', filtered_data, SETTING_CACHE_TIMEOUT)
|
||||
for key, value in data.items():
|
||||
self.set(key, value, **kwargs)
|
||||
self.set(key, value, log=False, **kwargs)
|
||||
|
||||
def _handle_encryption(self, method, key, value):
|
||||
TransientSetting = namedtuple('TransientSetting', ['pk', 'value'])
|
||||
@@ -124,9 +140,16 @@ class EncryptedCacheProxy(object):
|
||||
if value is not empty and self.registry.is_setting_encrypted(key):
|
||||
# If the setting exists in the database, we'll use its primary key
|
||||
# as part of the AES key when encrypting/decrypting
|
||||
obj_id = self.cache.get(Setting.get_cache_id_key(key), default=empty)
|
||||
if obj_id is empty:
|
||||
logger.info('Efficiency notice: Corresponding id not stored in cache %s',
|
||||
Setting.get_cache_id_key(key))
|
||||
obj_id = getattr(self._get_setting_from_db(key), 'pk', None)
|
||||
elif obj_id == SETTING_CACHE_NONE:
|
||||
obj_id = None
|
||||
return method(
|
||||
TransientSetting(
|
||||
pk=getattr(self._get_setting_from_db(key), 'pk', None),
|
||||
pk=obj_id,
|
||||
value=value
|
||||
),
|
||||
'value'
|
||||
@@ -241,11 +264,13 @@ class SettingsWrapper(UserSettingsHolder):
|
||||
# to indicate from the cache that the setting is not configured without
|
||||
# a database lookup.
|
||||
settings_to_cache = get_settings_to_cache(self.registry)
|
||||
setting_ids = {}
|
||||
# Load all settings defined in the database.
|
||||
for setting in Setting.objects.filter(key__in=settings_to_cache.keys(), user__isnull=True).order_by('pk'):
|
||||
if settings_to_cache[setting.key] != SETTING_CACHE_NOTSET:
|
||||
continue
|
||||
if self.registry.is_setting_encrypted(setting.key):
|
||||
setting_ids[setting.key] = setting.id
|
||||
try:
|
||||
value = decrypt_field(setting, 'value')
|
||||
except ValueError, e:
|
||||
@@ -264,12 +289,18 @@ class SettingsWrapper(UserSettingsHolder):
|
||||
field = self.registry.get_setting_field(key)
|
||||
try:
|
||||
settings_to_cache[key] = get_cache_value(field.get_default())
|
||||
if self.registry.is_setting_encrypted(key):
|
||||
# No database pk, so None will be passed to encryption algorithm
|
||||
setting_ids[key] = SETTING_CACHE_NOTSET
|
||||
except SkipField:
|
||||
pass
|
||||
# Generate a cache key for each setting and store them all at once.
|
||||
settings_to_cache = dict([(Setting.get_cache_key(k), v) for k, v in settings_to_cache.items()])
|
||||
for k, id_val in setting_ids.items():
|
||||
logger.debug('Saving id in cache for encrypted setting %s, %s',
|
||||
Setting.get_cache_id_key(k), id_val)
|
||||
self.cache.cache.set(Setting.get_cache_id_key(k), id_val)
|
||||
settings_to_cache['_awx_conf_preload_expires'] = self._awx_conf_preload_expires
|
||||
logger.debug('cache set_many(%r, %r)', settings_to_cache, SETTING_CACHE_TIMEOUT)
|
||||
self.cache.set_many(settings_to_cache, timeout=SETTING_CACHE_TIMEOUT)
|
||||
|
||||
def _get_local(self, name):
|
||||
@@ -279,7 +310,6 @@ class SettingsWrapper(UserSettingsHolder):
|
||||
cache_value = self.cache.get(cache_key, default=empty)
|
||||
except ValueError:
|
||||
cache_value = empty
|
||||
logger.debug('cache get(%r, %r) -> %r', cache_key, empty, cache_value)
|
||||
if cache_value == SETTING_CACHE_NOTSET:
|
||||
value = empty
|
||||
elif cache_value == SETTING_CACHE_NONE:
|
||||
@@ -293,6 +323,7 @@ class SettingsWrapper(UserSettingsHolder):
|
||||
field = self.registry.get_setting_field(name)
|
||||
if value is empty:
|
||||
setting = None
|
||||
setting_id = None
|
||||
if not field.read_only or name in (
|
||||
# these two values are read-only - however - we *do* want
|
||||
# to fetch their value from the database
|
||||
@@ -303,6 +334,7 @@ class SettingsWrapper(UserSettingsHolder):
|
||||
if setting:
|
||||
if getattr(field, 'encrypted', False):
|
||||
value = decrypt_field(setting, 'value')
|
||||
setting_id = setting.id
|
||||
else:
|
||||
value = setting.value
|
||||
else:
|
||||
@@ -310,15 +342,17 @@ class SettingsWrapper(UserSettingsHolder):
|
||||
if SETTING_CACHE_DEFAULTS:
|
||||
try:
|
||||
value = field.get_default()
|
||||
if getattr(field, 'encrypted', False):
|
||||
setting_id = SETTING_CACHE_NONE
|
||||
except SkipField:
|
||||
pass
|
||||
# If None implies not set, convert when reading the value.
|
||||
if value is None and SETTING_CACHE_NOTSET == SETTING_CACHE_NONE:
|
||||
value = SETTING_CACHE_NOTSET
|
||||
if cache_value != value:
|
||||
logger.debug('cache set(%r, %r, %r)', cache_key,
|
||||
get_cache_value(value),
|
||||
SETTING_CACHE_TIMEOUT)
|
||||
if setting_id:
|
||||
logger.debug('Saving id in cache for encrypted setting %s', cache_key)
|
||||
self.cache.cache.set(Setting.get_cache_id_key(cache_key), setting_id)
|
||||
self.cache.set(cache_key, get_cache_value(value), timeout=SETTING_CACHE_TIMEOUT)
|
||||
if value == SETTING_CACHE_NOTSET and not SETTING_CACHE_DEFAULTS:
|
||||
try:
|
||||
@@ -333,7 +367,7 @@ class SettingsWrapper(UserSettingsHolder):
|
||||
return internal_value
|
||||
else:
|
||||
return field.run_validation(value)
|
||||
except:
|
||||
except Exception:
|
||||
logger.warning(
|
||||
'The current value "%r" for setting "%s" is invalid.',
|
||||
value, name, exc_info=True)
|
||||
@@ -425,3 +459,19 @@ class SettingsWrapper(UserSettingsHolder):
|
||||
set_locally = Setting.objects.filter(key=setting, user__isnull=True).exists()
|
||||
set_on_default = getattr(self.default_settings, 'is_overridden', lambda s: False)(setting)
|
||||
return (set_locally or set_on_default)
|
||||
|
||||
|
||||
def __getattr_without_cache__(self, name):
|
||||
# Django 1.10 added an optimization to settings lookup:
|
||||
# https://code.djangoproject.com/ticket/27625
|
||||
# https://github.com/django/django/commit/c1b221a9b913315998a1bcec2f29a9361a74d1ac
|
||||
# This change caches settings lookups on the __dict__ of the LazySettings
|
||||
# object, which is not okay to do in an environment where settings can
|
||||
# change in-process (the entire point of awx's custom settings implementation)
|
||||
# This restores the original behavior that *does not* cache.
|
||||
if self._wrapped is empty:
|
||||
self._setup(name)
|
||||
return getattr(self._wrapped, name)
|
||||
|
||||
|
||||
LazySettings.__getattr__ = __getattr_without_cache__
|
||||
|
||||
43
awx/conf/tests/functional/conftest.py
Normal file
43
awx/conf/tests/functional/conftest.py
Normal file
@@ -0,0 +1,43 @@
|
||||
import pytest
|
||||
|
||||
from django.core.urlresolvers import resolve
|
||||
from django.utils.six.moves.urllib.parse import urlparse
|
||||
from django.contrib.auth.models import User
|
||||
|
||||
from rest_framework.test import (
|
||||
APIRequestFactory,
|
||||
force_authenticate,
|
||||
)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def normal_user():
|
||||
try:
|
||||
user = User.objects.get(username='conf-normal')
|
||||
except User.DoesNotExist:
|
||||
user = User(username='conf-normal', is_superuser=False, password='conf-normal')
|
||||
user.save()
|
||||
return user
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def admin():
|
||||
try:
|
||||
user = User.objects.get(username='conf-admin')
|
||||
except User.DoesNotExist:
|
||||
user = User(username='conf-admin', is_superuser=True, password='conf-admin')
|
||||
user.save()
|
||||
return user
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def api_request(admin):
|
||||
def rf(verb, url, data=None, user=admin):
|
||||
view, view_args, view_kwargs = resolve(urlparse(url)[2])
|
||||
request = getattr(APIRequestFactory(), verb)(url, data=data, format='json')
|
||||
if user:
|
||||
force_authenticate(request, user=user)
|
||||
response = view(request, *view_args, **view_kwargs)
|
||||
response.render()
|
||||
return response
|
||||
return rf
|
||||
350
awx/conf/tests/functional/test_api.py
Normal file
350
awx/conf/tests/functional/test_api.py
Normal file
@@ -0,0 +1,350 @@
|
||||
import pytest
|
||||
import mock
|
||||
|
||||
from rest_framework import serializers
|
||||
|
||||
from awx.api.versioning import reverse
|
||||
from awx.main.utils.encryption import decrypt_field
|
||||
from awx.conf import fields
|
||||
from awx.conf.registry import settings_registry
|
||||
from awx.conf.models import Setting
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def dummy_setting():
|
||||
class context_manager(object):
|
||||
def __init__(self, name, **kwargs):
|
||||
self.name = name
|
||||
self.kwargs = kwargs
|
||||
|
||||
def __enter__(self):
|
||||
settings_registry.register(self.name, **(self.kwargs))
|
||||
|
||||
def __exit__(self, *args):
|
||||
settings_registry.unregister(self.name)
|
||||
|
||||
return context_manager
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def dummy_validate():
|
||||
class context_manager(object):
|
||||
def __init__(self, category_slug, func):
|
||||
self.category_slug = category_slug
|
||||
self.func = func
|
||||
|
||||
def __enter__(self):
|
||||
settings_registry.register_validate(self.category_slug, self.func)
|
||||
|
||||
def __exit__(self, *args):
|
||||
settings_registry.unregister_validate(self.category_slug)
|
||||
|
||||
return context_manager
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_non_admin_user_does_not_see_categories(api_request, dummy_setting, normal_user):
|
||||
with dummy_setting(
|
||||
'FOO_BAR',
|
||||
field_class=fields.IntegerField,
|
||||
category='FooBar',
|
||||
category_slug='foobar'
|
||||
):
|
||||
response = api_request(
|
||||
'get',
|
||||
reverse('api:setting_category_list',
|
||||
kwargs={'version': 'v2'})
|
||||
)
|
||||
assert response.data['results']
|
||||
response = api_request(
|
||||
'get',
|
||||
reverse('api:setting_category_list',
|
||||
kwargs={'version': 'v2'}),
|
||||
user=normal_user
|
||||
)
|
||||
assert not response.data['results']
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@mock.patch(
|
||||
'awx.conf.views.VERSION_SPECIFIC_CATEGORIES_TO_EXCLUDE',
|
||||
{
|
||||
1: set([]),
|
||||
2: set(['foobar']),
|
||||
}
|
||||
)
|
||||
def test_version_specific_category_slug_to_exclude_does_not_show_up(api_request, dummy_setting):
|
||||
with dummy_setting(
|
||||
'FOO_BAR',
|
||||
field_class=fields.IntegerField,
|
||||
category='FooBar',
|
||||
category_slug='foobar'
|
||||
):
|
||||
response = api_request(
|
||||
'get',
|
||||
reverse('api:setting_category_list',
|
||||
kwargs={'version': 'v2'})
|
||||
)
|
||||
for item in response.data['results']:
|
||||
assert item['slug'] != 'foobar'
|
||||
response = api_request(
|
||||
'get',
|
||||
reverse('api:setting_category_list',
|
||||
kwargs={'version': 'v1'})
|
||||
)
|
||||
contains = False
|
||||
for item in response.data['results']:
|
||||
if item['slug'] != 'foobar':
|
||||
contains = True
|
||||
break
|
||||
assert contains
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_setting_singleton_detail_retrieve(api_request, dummy_setting):
|
||||
with dummy_setting(
|
||||
'FOO_BAR_1',
|
||||
field_class=fields.IntegerField,
|
||||
category='FooBar',
|
||||
category_slug='foobar'
|
||||
), dummy_setting(
|
||||
'FOO_BAR_2',
|
||||
field_class=fields.IntegerField,
|
||||
category='FooBar',
|
||||
category_slug='foobar'
|
||||
):
|
||||
response = api_request(
|
||||
'get',
|
||||
reverse('api:setting_singleton_detail', kwargs={'category_slug': 'foobar'})
|
||||
)
|
||||
assert response.status_code == 200
|
||||
assert 'FOO_BAR_1' in response.data and response.data['FOO_BAR_1'] is None
|
||||
assert 'FOO_BAR_2' in response.data and response.data['FOO_BAR_2'] is None
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_setting_singleton_detail_invalid_retrieve(api_request, dummy_setting, normal_user):
|
||||
with dummy_setting(
|
||||
'FOO_BAR_1',
|
||||
field_class=fields.IntegerField,
|
||||
category='FooBar',
|
||||
category_slug='foobar'
|
||||
), dummy_setting(
|
||||
'FOO_BAR_2',
|
||||
field_class=fields.IntegerField,
|
||||
category='FooBar',
|
||||
category_slug='foobar'
|
||||
):
|
||||
response = api_request(
|
||||
'get',
|
||||
reverse('api:setting_singleton_detail', kwargs={'category_slug': 'barfoo'})
|
||||
)
|
||||
assert response.status_code == 404
|
||||
response = api_request(
|
||||
'get',
|
||||
reverse('api:setting_singleton_detail', kwargs={'category_slug': 'foobar'}),
|
||||
user = normal_user
|
||||
)
|
||||
assert response.status_code == 403
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_setting_signleton_retrieve_hierachy(api_request, dummy_setting):
|
||||
with dummy_setting(
|
||||
'FOO_BAR',
|
||||
field_class=fields.IntegerField,
|
||||
default=0,
|
||||
category='FooBar',
|
||||
category_slug='foobar'
|
||||
):
|
||||
response = api_request(
|
||||
'get',
|
||||
reverse('api:setting_singleton_detail', kwargs={'category_slug': 'foobar'})
|
||||
)
|
||||
assert response.data['FOO_BAR'] == 0
|
||||
s = Setting(key='FOO_BAR', value=1)
|
||||
s.save()
|
||||
response = api_request(
|
||||
'get',
|
||||
reverse('api:setting_singleton_detail', kwargs={'category_slug': 'foobar'})
|
||||
)
|
||||
assert response.data['FOO_BAR'] == 1
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_setting_signleton_retrieve_readonly(api_request, dummy_setting):
|
||||
with dummy_setting(
|
||||
'FOO_BAR',
|
||||
field_class=fields.IntegerField,
|
||||
read_only=True,
|
||||
default=2,
|
||||
category='FooBar',
|
||||
category_slug='foobar'
|
||||
):
|
||||
response = api_request(
|
||||
'get',
|
||||
reverse('api:setting_singleton_detail', kwargs={'category_slug': 'foobar'})
|
||||
)
|
||||
assert response.data['FOO_BAR'] == 2
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_setting_singleton_update(api_request, dummy_setting):
|
||||
with dummy_setting(
|
||||
'FOO_BAR',
|
||||
field_class=fields.IntegerField,
|
||||
category='FooBar',
|
||||
category_slug='foobar'
|
||||
), mock.patch('awx.conf.views.handle_setting_changes'):
|
||||
api_request(
|
||||
'patch',
|
||||
reverse('api:setting_singleton_detail', kwargs={'category_slug': 'foobar'}),
|
||||
data={'FOO_BAR': 3}
|
||||
)
|
||||
response = api_request(
|
||||
'get',
|
||||
reverse('api:setting_singleton_detail', kwargs={'category_slug': 'foobar'})
|
||||
)
|
||||
assert response.data['FOO_BAR'] == 3
|
||||
api_request(
|
||||
'patch',
|
||||
reverse('api:setting_singleton_detail', kwargs={'category_slug': 'foobar'}),
|
||||
data={'FOO_BAR': 4}
|
||||
)
|
||||
response = api_request(
|
||||
'get',
|
||||
reverse('api:setting_singleton_detail', kwargs={'category_slug': 'foobar'})
|
||||
)
|
||||
assert response.data['FOO_BAR'] == 4
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_setting_singleton_update_dont_change_readonly_fields(api_request, dummy_setting):
|
||||
with dummy_setting(
|
||||
'FOO_BAR',
|
||||
field_class=fields.IntegerField,
|
||||
read_only=True,
|
||||
default=4,
|
||||
category='FooBar',
|
||||
category_slug='foobar'
|
||||
), mock.patch('awx.conf.views.handle_setting_changes'):
|
||||
api_request(
|
||||
'patch',
|
||||
reverse('api:setting_singleton_detail', kwargs={'category_slug': 'foobar'}),
|
||||
data={'FOO_BAR': 5}
|
||||
)
|
||||
response = api_request(
|
||||
'get',
|
||||
reverse('api:setting_singleton_detail', kwargs={'category_slug': 'foobar'})
|
||||
)
|
||||
assert response.data['FOO_BAR'] == 4
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_setting_singleton_update_dont_change_encripted_mark(api_request, dummy_setting):
|
||||
with dummy_setting(
|
||||
'FOO_BAR',
|
||||
field_class=fields.CharField,
|
||||
encrypted=True,
|
||||
category='FooBar',
|
||||
category_slug='foobar'
|
||||
), mock.patch('awx.conf.views.handle_setting_changes'):
|
||||
api_request(
|
||||
'patch',
|
||||
reverse('api:setting_singleton_detail', kwargs={'category_slug': 'foobar'}),
|
||||
data={'FOO_BAR': 'password'}
|
||||
)
|
||||
assert Setting.objects.get(key='FOO_BAR').value.startswith('$encrypted$')
|
||||
response = api_request(
|
||||
'get',
|
||||
reverse('api:setting_singleton_detail', kwargs={'category_slug': 'foobar'})
|
||||
)
|
||||
assert response.data['FOO_BAR'] == '$encrypted$'
|
||||
api_request(
|
||||
'patch',
|
||||
reverse('api:setting_singleton_detail', kwargs={'category_slug': 'foobar'}),
|
||||
data={'FOO_BAR': '$encrypted$'}
|
||||
)
|
||||
assert decrypt_field(Setting.objects.get(key='FOO_BAR'), 'value') == 'password'
|
||||
api_request(
|
||||
'patch',
|
||||
reverse('api:setting_singleton_detail', kwargs={'category_slug': 'foobar'}),
|
||||
data={'FOO_BAR': 'new_pw'}
|
||||
)
|
||||
assert decrypt_field(Setting.objects.get(key='FOO_BAR'), 'value') == 'new_pw'
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_setting_singleton_update_runs_custom_validate(api_request, dummy_setting, dummy_validate):
|
||||
|
||||
def func_raising_exception(serializer, attrs):
|
||||
raise serializers.ValidationError('Error')
|
||||
|
||||
with dummy_setting(
|
||||
'FOO_BAR',
|
||||
field_class=fields.IntegerField,
|
||||
category='FooBar',
|
||||
category_slug='foobar'
|
||||
), dummy_validate(
|
||||
'foobar', func_raising_exception
|
||||
), mock.patch('awx.conf.views.handle_setting_changes'):
|
||||
response = api_request(
|
||||
'patch',
|
||||
reverse('api:setting_singleton_detail', kwargs={'category_slug': 'foobar'}),
|
||||
data={'FOO_BAR': 23}
|
||||
)
|
||||
assert response.status_code == 400
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_setting_singleton_delete(api_request, dummy_setting):
|
||||
with dummy_setting(
|
||||
'FOO_BAR',
|
||||
field_class=fields.IntegerField,
|
||||
category='FooBar',
|
||||
category_slug='foobar'
|
||||
), mock.patch('awx.conf.views.handle_setting_changes'):
|
||||
api_request(
|
||||
'delete',
|
||||
reverse('api:setting_singleton_detail', kwargs={'category_slug': 'foobar'})
|
||||
)
|
||||
response = api_request(
|
||||
'get',
|
||||
reverse('api:setting_singleton_detail', kwargs={'category_slug': 'foobar'})
|
||||
)
|
||||
assert not response.data['FOO_BAR']
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_setting_singleton_delete_no_read_only_fields(api_request, dummy_setting):
|
||||
with dummy_setting(
|
||||
'FOO_BAR',
|
||||
field_class=fields.IntegerField,
|
||||
read_only=True,
|
||||
default=23,
|
||||
category='FooBar',
|
||||
category_slug='foobar'
|
||||
), mock.patch('awx.conf.views.handle_setting_changes'):
|
||||
api_request(
|
||||
'delete',
|
||||
reverse('api:setting_singleton_detail', kwargs={'category_slug': 'foobar'})
|
||||
)
|
||||
response = api_request(
|
||||
'get',
|
||||
reverse('api:setting_singleton_detail', kwargs={'category_slug': 'foobar'})
|
||||
)
|
||||
assert response.data['FOO_BAR'] == 23
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_setting_logging_test(api_request):
|
||||
with mock.patch('awx.conf.views.BaseHTTPSHandler.perform_test') as mock_func:
|
||||
api_request(
|
||||
'post',
|
||||
reverse('api:setting_logging_test'),
|
||||
data={'LOG_AGGREGATOR_HOST': 'http://foobar', 'LOG_AGGREGATOR_TYPE': 'logstash'}
|
||||
)
|
||||
test_arguments = mock_func.call_args[0][0]
|
||||
assert test_arguments.LOG_AGGREGATOR_HOST == 'http://foobar'
|
||||
assert test_arguments.LOG_AGGREGATOR_TYPE == 'logstash'
|
||||
assert test_arguments.LOG_AGGREGATOR_LEVEL == 'DEBUG'
|
||||
86
awx/conf/tests/unit/test_fields.py
Normal file
86
awx/conf/tests/unit/test_fields.py
Normal file
@@ -0,0 +1,86 @@
|
||||
import pytest
|
||||
|
||||
from rest_framework.fields import ValidationError
|
||||
from awx.conf.fields import StringListBooleanField, ListTuplesField
|
||||
|
||||
|
||||
class TestStringListBooleanField():
|
||||
|
||||
FIELD_VALUES = [
|
||||
("hello", "hello"),
|
||||
(("a", "b"), ["a", "b"]),
|
||||
(["a", "b", 1, 3.13, "foo", "bar", "foobar"], ["a", "b", "1", "3.13", "foo", "bar", "foobar"]),
|
||||
("True", True),
|
||||
("TRUE", True),
|
||||
("true", True),
|
||||
(True, True),
|
||||
("False", False),
|
||||
("FALSE", False),
|
||||
("false", False),
|
||||
(False, False),
|
||||
("", None),
|
||||
("null", None),
|
||||
("NULL", None),
|
||||
]
|
||||
|
||||
FIELD_VALUES_INVALID = [
|
||||
1.245,
|
||||
{"a": "b"},
|
||||
]
|
||||
|
||||
@pytest.mark.parametrize("value_in, value_known", FIELD_VALUES)
|
||||
def test_to_internal_value_valid(self, value_in, value_known):
|
||||
field = StringListBooleanField()
|
||||
v = field.to_internal_value(value_in)
|
||||
assert v == value_known
|
||||
|
||||
@pytest.mark.parametrize("value", FIELD_VALUES_INVALID)
|
||||
def test_to_internal_value_invalid(self, value):
|
||||
field = StringListBooleanField()
|
||||
with pytest.raises(ValidationError) as e:
|
||||
field.to_internal_value(value)
|
||||
assert e.value.detail[0] == "Expected None, True, False, a string or list " \
|
||||
"of strings but got {} instead.".format(type(value))
|
||||
|
||||
@pytest.mark.parametrize("value_in, value_known", FIELD_VALUES)
|
||||
def test_to_representation_valid(self, value_in, value_known):
|
||||
field = StringListBooleanField()
|
||||
v = field.to_representation(value_in)
|
||||
assert v == value_known
|
||||
|
||||
@pytest.mark.parametrize("value", FIELD_VALUES_INVALID)
|
||||
def test_to_representation_invalid(self, value):
|
||||
field = StringListBooleanField()
|
||||
with pytest.raises(ValidationError) as e:
|
||||
field.to_representation(value)
|
||||
assert e.value.detail[0] == "Expected None, True, False, a string or list " \
|
||||
"of strings but got {} instead.".format(type(value))
|
||||
|
||||
|
||||
class TestListTuplesField():
|
||||
|
||||
FIELD_VALUES = [
|
||||
([('a', 'b'), ('abc', '123')], [("a", "b"), ("abc", "123")]),
|
||||
]
|
||||
|
||||
FIELD_VALUES_INVALID = [
|
||||
("abc", type("abc")),
|
||||
([('a', 'b', 'c'), ('abc', '123', '456')], type(('a',))),
|
||||
(['a', 'b'], type('a')),
|
||||
(123, type(123)),
|
||||
]
|
||||
|
||||
@pytest.mark.parametrize("value_in, value_known", FIELD_VALUES)
|
||||
def test_to_internal_value_valid(self, value_in, value_known):
|
||||
field = ListTuplesField()
|
||||
v = field.to_internal_value(value_in)
|
||||
assert v == value_known
|
||||
|
||||
@pytest.mark.parametrize("value, t", FIELD_VALUES_INVALID)
|
||||
def test_to_internal_value_invalid(self, value, t):
|
||||
field = ListTuplesField()
|
||||
with pytest.raises(ValidationError) as e:
|
||||
field.to_internal_value(value)
|
||||
assert e.value.detail[0] == "Expected a list of tuples of max length 2 " \
|
||||
"but got {} instead.".format(t)
|
||||
|
||||
@@ -264,7 +264,7 @@ def test_setting_from_db_with_unicode(settings, mocker, encrypted):
|
||||
# this simulates a bug in python-memcached; see https://github.com/linsomniac/python-memcached/issues/79
|
||||
value = six.u('Iñtërnâtiônàlizætiøn').encode('utf-8')
|
||||
|
||||
setting_from_db = mocker.Mock(key='AWX_SOME_SETTING', value=value)
|
||||
setting_from_db = mocker.Mock(id=1, key='AWX_SOME_SETTING', value=value)
|
||||
mocks = mocker.Mock(**{
|
||||
'order_by.return_value': mocker.Mock(**{
|
||||
'__iter__': lambda self: iter([setting_from_db]),
|
||||
@@ -391,7 +391,20 @@ def test_charfield_properly_sets_none(settings, mocker):
|
||||
)
|
||||
|
||||
|
||||
def test_settings_use_an_encrypted_cache(settings):
|
||||
def test_settings_use_cache(settings, mocker):
|
||||
settings.registry.register(
|
||||
'AWX_VAR',
|
||||
field_class=fields.CharField,
|
||||
category=_('System'),
|
||||
category_slug='system'
|
||||
)
|
||||
settings.cache.set('AWX_VAR', 'foobar')
|
||||
settings.cache.set('_awx_conf_preload_expires', 100)
|
||||
# Will fail test if database is used
|
||||
getattr(settings, 'AWX_VAR')
|
||||
|
||||
|
||||
def test_settings_use_an_encrypted_cache(settings, mocker):
|
||||
settings.registry.register(
|
||||
'AWX_ENCRYPTED',
|
||||
field_class=fields.CharField,
|
||||
@@ -402,6 +415,11 @@ def test_settings_use_an_encrypted_cache(settings):
|
||||
assert isinstance(settings.cache, EncryptedCacheProxy)
|
||||
assert settings.cache.__dict__['encrypter'] == encrypt_field
|
||||
assert settings.cache.__dict__['decrypter'] == decrypt_field
|
||||
settings.cache.set('AWX_ENCRYPTED_ID', 402)
|
||||
settings.cache.set('AWX_ENCRYPTED', 'foobar')
|
||||
settings.cache.set('_awx_conf_preload_expires', 100)
|
||||
# Will fail test if database is used
|
||||
getattr(settings, 'AWX_ENCRYPTED')
|
||||
|
||||
|
||||
def test_sensitive_cache_data_is_encrypted(settings, mocker):
|
||||
|
||||
@@ -1,16 +1,17 @@
|
||||
# Copyright (c) 2016 Ansible, Inc.
|
||||
# All Rights Reserved.
|
||||
|
||||
# Django
|
||||
from django.conf.urls import patterns
|
||||
|
||||
# Tower
|
||||
from awx.api.urls import url
|
||||
|
||||
|
||||
urlpatterns = patterns(
|
||||
'awx.conf.views',
|
||||
url(r'^$', 'setting_category_list'),
|
||||
url(r'^(?P<category_slug>[a-z0-9-]+)/$', 'setting_singleton_detail'),
|
||||
url(r'^logging/test/$', 'setting_logging_test'),
|
||||
from django.conf.urls import url
|
||||
from awx.conf.views import (
|
||||
SettingCategoryList,
|
||||
SettingSingletonDetail,
|
||||
SettingLoggingTest,
|
||||
)
|
||||
|
||||
|
||||
urlpatterns = [
|
||||
url(r'^$', SettingCategoryList.as_view(), name='setting_category_list'),
|
||||
url(r'^(?P<category_slug>[a-z0-9-]+)/$', SettingSingletonDetail.as_view(), name='setting_singleton_detail'),
|
||||
url(r'^logging/test/$', SettingLoggingTest.as_view(), name='setting_logging_test'),
|
||||
]
|
||||
|
||||
@@ -16,7 +16,7 @@ class argv_placeholder(object):
|
||||
def __del__(self):
|
||||
try:
|
||||
argv_ready(sys.argv)
|
||||
except:
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
|
||||
|
||||
@@ -1,3 +1,8 @@
|
||||
# Copyright (c) 2017 Ansible by Red Hat
|
||||
# All Rights Reserved
|
||||
|
||||
from __future__ import absolute_import
|
||||
|
||||
from collections import OrderedDict
|
||||
import json
|
||||
import mock
|
||||
@@ -23,9 +28,9 @@ with mock.patch.dict(os.environ, {'ANSIBLE_STDOUT_CALLBACK': CALLBACK,
|
||||
'ANSIBLE_CALLBACK_PLUGINS': PLUGINS}):
|
||||
from ansible.cli.playbook import PlaybookCLI
|
||||
from ansible.executor.playbook_executor import PlaybookExecutor
|
||||
from ansible.inventory import Inventory
|
||||
from ansible.inventory.manager import InventoryManager
|
||||
from ansible.parsing.dataloader import DataLoader
|
||||
from ansible.vars import VariableManager
|
||||
from ansible.vars.manager import VariableManager
|
||||
|
||||
# Add awx/lib to sys.path so we can use the plugin
|
||||
path = os.path.abspath(os.path.join(PLUGINS, '..', '..'))
|
||||
@@ -62,9 +67,8 @@ def executor(tmpdir_factory, request):
|
||||
cli.parse()
|
||||
options = cli.parser.parse_args(['-v'])[0]
|
||||
loader = DataLoader()
|
||||
variable_manager = VariableManager()
|
||||
inventory = Inventory(loader=loader, variable_manager=variable_manager,
|
||||
host_list=['localhost'])
|
||||
variable_manager = VariableManager(loader=loader)
|
||||
inventory = InventoryManager(loader=loader, sources='localhost,')
|
||||
variable_manager.set_inventory(inventory)
|
||||
|
||||
return PlaybookExecutor(playbooks=playbook_files, inventory=inventory,
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
4893
awx/locale/nl/LC_MESSAGES/django.po
Normal file
4893
awx/locale/nl/LC_MESSAGES/django.po
Normal file
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@@ -70,14 +70,9 @@ register(
|
||||
label=_('Remote Host Headers'),
|
||||
help_text=_('HTTP headers and meta keys to search to determine remote host '
|
||||
'name or IP. Add additional items to this list, such as '
|
||||
'"HTTP_X_FORWARDED_FOR", if behind a reverse proxy.\n\n'
|
||||
'Note: The headers will be searched in order and the first '
|
||||
'found remote host name or IP will be used.\n\n'
|
||||
'In the below example 8.8.8.7 would be the chosen IP address.\n'
|
||||
'X-Forwarded-For: 8.8.8.7, 192.168.2.1, 127.0.0.1\n'
|
||||
'Host: 127.0.0.1\n'
|
||||
'REMOTE_HOST_HEADERS = [\'HTTP_X_FORWARDED_FOR\', '
|
||||
'\'REMOTE_ADDR\', \'REMOTE_HOST\']'),
|
||||
'"HTTP_X_FORWARDED_FOR", if behind a reverse proxy. '
|
||||
'See the "Proxy Support" section of the Adminstrator guide for'
|
||||
'more details.'),
|
||||
category=_('System'),
|
||||
category_slug='system',
|
||||
)
|
||||
@@ -88,9 +83,7 @@ register(
|
||||
label=_('Proxy IP Whitelist'),
|
||||
help_text=_("If Tower is behind a reverse proxy/load balancer, use this setting "
|
||||
"to whitelist the proxy IP addresses from which Tower should trust "
|
||||
"custom REMOTE_HOST_HEADERS header values\n"
|
||||
"REMOTE_HOST_HEADERS = ['HTTP_X_FORWARDED_FOR', ''REMOTE_ADDR', 'REMOTE_HOST']\n"
|
||||
"PROXY_IP_WHITELIST = ['10.0.1.100', '10.0.1.101']\n"
|
||||
"custom REMOTE_HOST_HEADERS header values. "
|
||||
"If this setting is an empty list (the default), the headers specified by "
|
||||
"REMOTE_HOST_HEADERS will be trusted unconditionally')"),
|
||||
category=_('System'),
|
||||
@@ -105,7 +98,7 @@ def _load_default_license_from_file():
|
||||
license_data = json.load(open(license_file))
|
||||
logger.debug('Read license data from "%s".', license_file)
|
||||
return license_data
|
||||
except:
|
||||
except Exception:
|
||||
logger.warning('Could not read license from "%s".', license_file, exc_info=True)
|
||||
return {}
|
||||
|
||||
@@ -268,7 +261,8 @@ register(
|
||||
field_class=fields.IntegerField,
|
||||
min_value=0,
|
||||
label=_('Job Event Standard Output Maximum Display Size'),
|
||||
help_text=_(u'Maximum Size of Standard Output in bytes to display for a single job or ad hoc command event. `stdout` will end with `\u2026` when truncated.'),
|
||||
help_text=_(
|
||||
u'Maximum Size of Standard Output in bytes to display for a single job or ad hoc command event. `stdout` will end with `\u2026` when truncated.'),
|
||||
category=_('Jobs'),
|
||||
category_slug='jobs',
|
||||
)
|
||||
@@ -311,7 +305,7 @@ register(
|
||||
min_value=0,
|
||||
default=0,
|
||||
label=_('Default Inventory Update Timeout'),
|
||||
help_text=_('Maximum time to allow inventory updates to run. Use value of 0 to indicate that no '
|
||||
help_text=_('Maximum time in seconds to allow inventory updates to run. Use value of 0 to indicate that no '
|
||||
'timeout should be imposed. A timeout set on an individual inventory source will override this.'),
|
||||
category=_('Jobs'),
|
||||
category_slug='jobs',
|
||||
@@ -323,7 +317,7 @@ register(
|
||||
min_value=0,
|
||||
default=0,
|
||||
label=_('Default Project Update Timeout'),
|
||||
help_text=_('Maximum time to allow project updates to run. Use value of 0 to indicate that no '
|
||||
help_text=_('Maximum time in seconds to allow project updates to run. Use value of 0 to indicate that no '
|
||||
'timeout should be imposed. A timeout set on an individual project will override this.'),
|
||||
category=_('Jobs'),
|
||||
category_slug='jobs',
|
||||
@@ -446,7 +440,7 @@ register(
|
||||
register(
|
||||
'LOG_AGGREGATOR_PROTOCOL',
|
||||
field_class=fields.ChoiceField,
|
||||
choices=['https', 'tcp', 'udp'],
|
||||
choices=[('https', 'HTTPS'), ('tcp', 'TCP'), ('udp', 'UDP')],
|
||||
default='https',
|
||||
label=_('Logging Aggregator Protocol'),
|
||||
help_text=_('Protocol used to communicate with log aggregator.'),
|
||||
|
||||
@@ -1,8 +1,13 @@
|
||||
# Copyright (c) 2015 Ansible, Inc.
|
||||
# All Rights Reserved.
|
||||
|
||||
import re
|
||||
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
|
||||
CLOUD_PROVIDERS = ('azure', 'azure_rm', 'ec2', 'gce', 'rax', 'vmware', 'openstack', 'satellite6', 'cloudforms')
|
||||
CLOUD_PROVIDERS = ('azure_rm', 'ec2', 'gce', 'vmware', 'openstack', 'satellite6', 'cloudforms')
|
||||
SCHEDULEABLE_PROVIDERS = CLOUD_PROVIDERS + ('custom', 'scm',)
|
||||
PRIVILEGE_ESCALATION_METHODS = [ ('sudo', _('Sudo')), ('su', _('Su')), ('pbrun', _('Pbrun')), ('pfexec', _('Pfexec')), ('dzdo', _('DZDO')), ('pmrun', _('Pmrun')), ('runas', _('Runas'))]
|
||||
PRIVILEGE_ESCALATION_METHODS = [
|
||||
('sudo', _('Sudo')), ('su', _('Su')), ('pbrun', _('Pbrun')), ('pfexec', _('Pfexec')),
|
||||
('dzdo', _('DZDO')), ('pmrun', _('Pmrun')), ('runas', _('Runas'))]
|
||||
ANSI_SGR_PATTERN = re.compile(r'\x1b\[[0-9;]*m')
|
||||
|
||||
@@ -24,7 +24,7 @@ def discard_groups(message):
|
||||
|
||||
@channel_session
|
||||
def ws_connect(message):
|
||||
connect_text = {'accept':False, 'user':None}
|
||||
message.reply_channel.send({"accept": True})
|
||||
|
||||
message.content['method'] = 'FAKE'
|
||||
request = AsgiRequest(message)
|
||||
@@ -35,11 +35,12 @@ def ws_connect(message):
|
||||
auth_token = AuthToken.objects.get(key=token)
|
||||
if auth_token.in_valid_tokens:
|
||||
message.channel_session['user_id'] = auth_token.user_id
|
||||
connect_text['accept'] = True
|
||||
connect_text['user'] = auth_token.user_id
|
||||
message.reply_channel.send({"text": json.dumps({"accept": True, "user": auth_token.user_id})})
|
||||
return None
|
||||
except AuthToken.DoesNotExist:
|
||||
logger.error("auth_token provided was invalid.")
|
||||
message.reply_channel.send({"text": json.dumps(connect_text)})
|
||||
message.reply_channel.send({"close": True})
|
||||
return None
|
||||
|
||||
|
||||
@channel_session
|
||||
@@ -81,7 +82,8 @@ def ws_receive(message):
|
||||
if access_cls is not None:
|
||||
user_access = access_cls(user)
|
||||
if not user_access.get_queryset().filter(pk=oid).exists():
|
||||
message.reply_channel.send({"text": json.dumps({"error": "access denied to channel {0} for resource id {1}".format(group_name, oid)})})
|
||||
message.reply_channel.send({"text": json.dumps(
|
||||
{"error": "access denied to channel {0} for resource id {1}".format(group_name, oid)})})
|
||||
continue
|
||||
current_groups.add(name)
|
||||
Group(name).add(message.reply_channel)
|
||||
|
||||
24
awx/main/exceptions.py
Normal file
24
awx/main/exceptions.py
Normal file
@@ -0,0 +1,24 @@
|
||||
class AwxTaskError(Exception):
|
||||
"""Base exception for errors in unified job runs"""
|
||||
def __init__(self, task, message=None):
|
||||
if message is None:
|
||||
message = "Execution error running {}".format(task.log_format)
|
||||
super(AwxTaskError, self).__init__(message)
|
||||
self.task = task
|
||||
|
||||
|
||||
class TaskCancel(AwxTaskError):
|
||||
"""Canceled flag caused run_pexpect to kill the job run"""
|
||||
def __init__(self, task, rc):
|
||||
super(TaskCancel, self).__init__(
|
||||
task, message="{} was canceled (rc={})".format(task.log_format, rc))
|
||||
self.rc = rc
|
||||
|
||||
|
||||
class TaskError(AwxTaskError):
|
||||
"""Userspace error (non-zero exit code) in run_pexpect subprocess"""
|
||||
def __init__(self, task, rc):
|
||||
super(TaskError, self).__init__(
|
||||
task, message="%s encountered an error (rc=%s), please see task stdout for details.".format(task.log_format, rc))
|
||||
self.rc = rc
|
||||
|
||||
@@ -9,6 +9,7 @@ import stat
|
||||
import tempfile
|
||||
import time
|
||||
import logging
|
||||
from distutils.version import LooseVersion as Version
|
||||
|
||||
from django.conf import settings
|
||||
|
||||
@@ -370,7 +371,24 @@ class IsolatedManager(object):
|
||||
logger.warning('Isolated job {} cleanup error, output:\n{}'.format(self.instance.id, output))
|
||||
|
||||
@classmethod
|
||||
def health_check(cls, instance_qs):
|
||||
def update_capacity(cls, instance, task_result, awx_application_version):
|
||||
instance.version = task_result['version']
|
||||
|
||||
isolated_version = instance.version.split("-", 1)[0]
|
||||
cluster_version = awx_application_version.split("-", 1)[0]
|
||||
|
||||
if Version(cluster_version) > Version(isolated_version):
|
||||
err_template = "Isolated instance {} reports version {}, cluster node is at {}, setting capacity to zero."
|
||||
logger.error(err_template.format(instance.hostname, instance.version, awx_application_version))
|
||||
instance.capacity = 0
|
||||
else:
|
||||
if instance.capacity == 0 and task_result['capacity']:
|
||||
logger.warning('Isolated instance {} has re-joined.'.format(instance.hostname))
|
||||
instance.capacity = int(task_result['capacity'])
|
||||
instance.save(update_fields=['capacity', 'version', 'modified'])
|
||||
|
||||
@classmethod
|
||||
def health_check(cls, instance_qs, awx_application_version):
|
||||
'''
|
||||
:param instance_qs: List of Django objects representing the
|
||||
isolated instances to manage
|
||||
@@ -412,11 +430,7 @@ class IsolatedManager(object):
|
||||
except (KeyError, IndexError):
|
||||
task_result = {}
|
||||
if 'capacity' in task_result:
|
||||
instance.version = task_result['version']
|
||||
if instance.capacity == 0 and task_result['capacity']:
|
||||
logger.warning('Isolated instance {} has re-joined.'.format(instance.hostname))
|
||||
instance.capacity = int(task_result['capacity'])
|
||||
instance.save(update_fields=['capacity', 'version', 'modified'])
|
||||
cls.update_capacity(instance, task_result, awx_application_version)
|
||||
elif instance.capacity == 0:
|
||||
logger.debug('Isolated instance {} previously marked as lost, could not re-join.'.format(
|
||||
instance.hostname))
|
||||
|
||||
@@ -122,7 +122,7 @@ def run_pexpect(args, cwd, env, logfile,
|
||||
if cancelled_callback:
|
||||
try:
|
||||
canceled = cancelled_callback()
|
||||
except:
|
||||
except Exception:
|
||||
logger.exception('Could not check cancel callback - canceling immediately')
|
||||
if isinstance(extra_update_fields, dict):
|
||||
extra_update_fields['job_explanation'] = "System error during job execution, check system logs"
|
||||
@@ -271,12 +271,8 @@ def __run__(private_data_dir):
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
__version__ = '3.2.0'
|
||||
try:
|
||||
import awx
|
||||
__version__ = awx.__version__
|
||||
except ImportError:
|
||||
pass # in devel, `awx` isn't an installed package
|
||||
import awx
|
||||
__version__ = awx.__version__
|
||||
parser = argparse.ArgumentParser(description='manage a daemonized, isolated ansible playbook')
|
||||
parser.add_argument('--version', action='version', version=__version__ + '-isolated')
|
||||
parser.add_argument('command', choices=['start', 'stop', 'is-alive'])
|
||||
|
||||
@@ -18,12 +18,12 @@ from django.db.models.signals import (
|
||||
)
|
||||
from django.db.models.signals import m2m_changed
|
||||
from django.db import models
|
||||
from django.db.models.fields.related import (
|
||||
add_lazy_relation,
|
||||
SingleRelatedObjectDescriptor,
|
||||
ReverseSingleRelatedObjectDescriptor,
|
||||
ManyRelatedObjectsDescriptor,
|
||||
ReverseManyRelatedObjectsDescriptor,
|
||||
from django.db.models.fields.related import add_lazy_relation
|
||||
from django.db.models.fields.related_descriptors import (
|
||||
ReverseOneToOneDescriptor,
|
||||
ForwardManyToOneDescriptor,
|
||||
ManyToManyDescriptor,
|
||||
ReverseManyToOneDescriptor,
|
||||
)
|
||||
from django.utils.encoding import smart_text
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
@@ -96,7 +96,7 @@ class JSONBField(upstream_JSONBField):
|
||||
# https://bitbucket.org/offline/django-annoying/src/a0de8b294db3/annoying/fields.py
|
||||
|
||||
|
||||
class AutoSingleRelatedObjectDescriptor(SingleRelatedObjectDescriptor):
|
||||
class AutoSingleRelatedObjectDescriptor(ReverseOneToOneDescriptor):
|
||||
"""Descriptor for access to the object from its related class."""
|
||||
|
||||
def __get__(self, instance, instance_type=None):
|
||||
@@ -139,7 +139,7 @@ def resolve_role_field(obj, field):
|
||||
raise Exception(smart_text('{} refers to a {}, not a Role'.format(field, type(obj))))
|
||||
ret.append(obj.id)
|
||||
else:
|
||||
if type(obj) is ManyRelatedObjectsDescriptor:
|
||||
if type(obj) is ManyToManyDescriptor:
|
||||
for o in obj.all():
|
||||
ret += resolve_role_field(o, field_components[1])
|
||||
else:
|
||||
@@ -179,7 +179,7 @@ def is_implicit_parent(parent_role, child_role):
|
||||
return False
|
||||
|
||||
|
||||
class ImplicitRoleDescriptor(ReverseSingleRelatedObjectDescriptor):
|
||||
class ImplicitRoleDescriptor(ForwardManyToOneDescriptor):
|
||||
pass
|
||||
|
||||
|
||||
@@ -230,18 +230,18 @@ class ImplicitRoleField(models.ForeignKey):
|
||||
field_name, sep, field_attr = field_name.partition('.')
|
||||
field = getattr(cls, field_name)
|
||||
|
||||
if type(field) is ReverseManyRelatedObjectsDescriptor or \
|
||||
type(field) is ManyRelatedObjectsDescriptor:
|
||||
if type(field) is ReverseManyToOneDescriptor or \
|
||||
type(field) is ManyToManyDescriptor:
|
||||
|
||||
if '.' in field_attr:
|
||||
raise Exception('Referencing deep roles through ManyToMany fields is unsupported.')
|
||||
|
||||
if type(field) is ReverseManyRelatedObjectsDescriptor:
|
||||
if type(field) is ReverseManyToOneDescriptor:
|
||||
sender = field.through
|
||||
else:
|
||||
sender = field.related.through
|
||||
|
||||
reverse = type(field) is ManyRelatedObjectsDescriptor
|
||||
reverse = type(field) is ManyToManyDescriptor
|
||||
m2m_changed.connect(self.m2m_update(field_attr, reverse), sender, weak=False)
|
||||
|
||||
def m2m_update(self, field_attr, _reverse):
|
||||
@@ -415,6 +415,13 @@ class JSONSchemaField(JSONBField):
|
||||
return value
|
||||
|
||||
|
||||
@JSONSchemaField.format_checker.checks('vault_id')
|
||||
def format_vault_id(value):
|
||||
if '@' in value:
|
||||
raise jsonschema.exceptions.FormatError('@ is not an allowed character')
|
||||
return True
|
||||
|
||||
|
||||
@JSONSchemaField.format_checker.checks('ssh_private_key')
|
||||
def format_ssh_private_key(value):
|
||||
# Sanity check: GCE, in particular, provides JSON-encoded private
|
||||
@@ -754,3 +761,22 @@ class CredentialTypeInjectorField(JSONSchemaField):
|
||||
code='invalid',
|
||||
params={'value': value},
|
||||
)
|
||||
|
||||
|
||||
class AskForField(models.BooleanField):
|
||||
"""
|
||||
Denotes whether to prompt on launch for another field on the same template
|
||||
"""
|
||||
def __init__(self, allows_field=None, **kwargs):
|
||||
super(AskForField, self).__init__(**kwargs)
|
||||
self._allows_field = allows_field
|
||||
|
||||
@property
|
||||
def allows_field(self):
|
||||
if self._allows_field is None:
|
||||
try:
|
||||
return self.name[len('ask_'):-len('_on_launch')]
|
||||
except AttributeError:
|
||||
# self.name will be set by the model metaclass, not this field
|
||||
raise Exception('Corresponding allows_field cannot be accessed until model is initialized.')
|
||||
return self._allows_field
|
||||
|
||||
@@ -2,19 +2,12 @@
|
||||
# All Rights Reserved
|
||||
|
||||
from awx.main.utils import get_licenser
|
||||
from django.core.management.base import NoArgsCommand
|
||||
from django.core.management.base import BaseCommand
|
||||
|
||||
|
||||
class Command(NoArgsCommand):
|
||||
"""Return 0 if licensed; 1 if unlicensed
|
||||
"""
|
||||
class Command(BaseCommand):
|
||||
"""Returns license type, e.g., 'enterprise', 'open', 'none'"""
|
||||
|
||||
def handle(self, **options):
|
||||
def handle(self, *args, **options):
|
||||
super(Command, self).__init__()
|
||||
|
||||
license_info = get_licenser().validate()
|
||||
if license_info['valid_key'] is True:
|
||||
return 0
|
||||
else:
|
||||
return 1
|
||||
|
||||
return get_licenser().validate().get('license_type', 'none')
|
||||
|
||||
@@ -4,29 +4,28 @@
|
||||
# Python
|
||||
import datetime
|
||||
import logging
|
||||
from optparse import make_option
|
||||
|
||||
# Django
|
||||
from django.core.management.base import NoArgsCommand
|
||||
from django.core.management.base import BaseCommand
|
||||
from django.utils.timezone import now
|
||||
|
||||
# AWX
|
||||
from awx.main.models import ActivityStream
|
||||
|
||||
|
||||
class Command(NoArgsCommand):
|
||||
class Command(BaseCommand):
|
||||
'''
|
||||
Management command to purge old activity stream events.
|
||||
'''
|
||||
|
||||
help = 'Remove old activity stream events from the database'
|
||||
|
||||
option_list = NoArgsCommand.option_list + (
|
||||
make_option('--days', dest='days', type='int', default=90, metavar='N',
|
||||
help='Remove activity stream events more than N days old'),
|
||||
make_option('--dry-run', dest='dry_run', action='store_true',
|
||||
default=False, help='Dry run mode (show items that would '
|
||||
'be removed)'),)
|
||||
def add_arguments(self, parser):
|
||||
parser.add_argument('--days', dest='days', type=int, default=90, metavar='N',
|
||||
help='Remove activity stream events more than N days old')
|
||||
parser.add_argument('--dry-run', dest='dry_run', action='store_true',
|
||||
default=False, help='Dry run mode (show items that would '
|
||||
'be removed)')
|
||||
|
||||
def init_logging(self):
|
||||
log_levels = dict(enumerate([logging.ERROR, logging.INFO,
|
||||
@@ -61,7 +60,7 @@ class Command(NoArgsCommand):
|
||||
n_deleted_items += len(pks_to_delete)
|
||||
self.logger.log(99, "Removed %d items", n_deleted_items)
|
||||
|
||||
def handle_noargs(self, **options):
|
||||
def handle(self, *args, **options):
|
||||
self.verbosity = int(options.get('verbosity', 1))
|
||||
self.init_logging()
|
||||
self.days = int(options.get('days', 30))
|
||||
|
||||
@@ -4,7 +4,6 @@
|
||||
# Python
|
||||
import re
|
||||
from dateutil.relativedelta import relativedelta
|
||||
from optparse import make_option
|
||||
|
||||
# Django
|
||||
from django.core.management.base import BaseCommand, CommandError
|
||||
@@ -93,19 +92,20 @@ class CleanupFacts(object):
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = 'Cleanup facts. For each host older than the value specified, keep one fact scan for each time window (granularity).'
|
||||
option_list = BaseCommand.option_list + (
|
||||
make_option('--older_than',
|
||||
dest='older_than',
|
||||
default='30d',
|
||||
help='Specify the relative time to consider facts older than (w)eek (d)ay or (y)ear (i.e. 5d, 2w, 1y). Defaults to 30d.'),
|
||||
make_option('--granularity',
|
||||
dest='granularity',
|
||||
default='1w',
|
||||
help='Window duration to group same hosts by for deletion (w)eek (d)ay or (y)ear (i.e. 5d, 2w, 1y). Defaults to 1w.'),
|
||||
make_option('--module',
|
||||
dest='module',
|
||||
default=None,
|
||||
help='Limit cleanup to a particular module.'),)
|
||||
|
||||
def add_arguments(self, parser):
|
||||
parser.add_argument('--older_than',
|
||||
dest='older_than',
|
||||
default='30d',
|
||||
help='Specify the relative time to consider facts older than (w)eek (d)ay or (y)ear (i.e. 5d, 2w, 1y). Defaults to 30d.')
|
||||
parser.add_argument('--granularity',
|
||||
dest='granularity',
|
||||
default='1w',
|
||||
help='Window duration to group same hosts by for deletion (w)eek (d)ay or (y)ear (i.e. 5d, 2w, 1y). Defaults to 1w.')
|
||||
parser.add_argument('--module',
|
||||
dest='module',
|
||||
default=None,
|
||||
help='Limit cleanup to a particular module.')
|
||||
|
||||
def __init__(self):
|
||||
super(Command, self).__init__()
|
||||
|
||||
@@ -4,10 +4,9 @@
|
||||
# Python
|
||||
import datetime
|
||||
import logging
|
||||
from optparse import make_option
|
||||
|
||||
# Django
|
||||
from django.core.management.base import NoArgsCommand, CommandError
|
||||
from django.core.management.base import BaseCommand, CommandError
|
||||
from django.db import transaction
|
||||
from django.utils.timezone import now
|
||||
|
||||
@@ -25,47 +24,47 @@ from awx.main.signals import ( # noqa
|
||||
from django.db.models.signals import post_save, post_delete, m2m_changed # noqa
|
||||
|
||||
|
||||
class Command(NoArgsCommand):
|
||||
class Command(BaseCommand):
|
||||
'''
|
||||
Management command to cleanup old jobs and project updates.
|
||||
'''
|
||||
|
||||
help = 'Remove old jobs, project and inventory updates from the database.'
|
||||
|
||||
option_list = NoArgsCommand.option_list + (
|
||||
make_option('--days', dest='days', type='int', default=90, metavar='N',
|
||||
help='Remove jobs/updates executed more than N days ago. Defaults to 90.'),
|
||||
make_option('--dry-run', dest='dry_run', action='store_true',
|
||||
default=False, help='Dry run mode (show items that would '
|
||||
'be removed)'),
|
||||
make_option('--jobs', dest='only_jobs', action='store_true',
|
||||
default=False,
|
||||
help='Remove jobs'),
|
||||
make_option('--ad-hoc-commands', dest='only_ad_hoc_commands',
|
||||
action='store_true', default=False,
|
||||
help='Remove ad hoc commands'),
|
||||
make_option('--project-updates', dest='only_project_updates',
|
||||
action='store_true', default=False,
|
||||
help='Remove project updates'),
|
||||
make_option('--inventory-updates', dest='only_inventory_updates',
|
||||
action='store_true', default=False,
|
||||
help='Remove inventory updates'),
|
||||
make_option('--management-jobs', default=False,
|
||||
action='store_true', dest='only_management_jobs',
|
||||
help='Remove management jobs'),
|
||||
make_option('--notifications', dest='only_notifications',
|
||||
action='store_true', default=False,
|
||||
help='Remove notifications'),
|
||||
make_option('--workflow-jobs', default=False,
|
||||
action='store_true', dest='only_workflow_jobs',
|
||||
help='Remove workflow jobs')
|
||||
)
|
||||
def add_arguments(self, parser):
|
||||
parser.add_argument('--days', dest='days', type=int, default=90, metavar='N',
|
||||
help='Remove jobs/updates executed more than N days ago. Defaults to 90.')
|
||||
parser.add_argument('--dry-run', dest='dry_run', action='store_true',
|
||||
default=False, help='Dry run mode (show items that would '
|
||||
'be removed)')
|
||||
parser.add_argument('--jobs', dest='only_jobs', action='store_true',
|
||||
default=False,
|
||||
help='Remove jobs')
|
||||
parser.add_argument('--ad-hoc-commands', dest='only_ad_hoc_commands',
|
||||
action='store_true', default=False,
|
||||
help='Remove ad hoc commands')
|
||||
parser.add_argument('--project-updates', dest='only_project_updates',
|
||||
action='store_true', default=False,
|
||||
help='Remove project updates')
|
||||
parser.add_argument('--inventory-updates', dest='only_inventory_updates',
|
||||
action='store_true', default=False,
|
||||
help='Remove inventory updates')
|
||||
parser.add_argument('--management-jobs', default=False,
|
||||
action='store_true', dest='only_management_jobs',
|
||||
help='Remove management jobs')
|
||||
parser.add_argument('--notifications', dest='only_notifications',
|
||||
action='store_true', default=False,
|
||||
help='Remove notifications')
|
||||
parser.add_argument('--workflow-jobs', default=False,
|
||||
action='store_true', dest='only_workflow_jobs',
|
||||
help='Remove workflow jobs')
|
||||
|
||||
def cleanup_jobs(self):
|
||||
#jobs_qs = Job.objects.exclude(status__in=('pending', 'running'))
|
||||
#jobs_qs = jobs_qs.filter(created__lte=self.cutoff)
|
||||
skipped, deleted = 0, 0
|
||||
for job in Job.objects.all():
|
||||
jobs = Job.objects.filter(created__lt=self.cutoff)
|
||||
for job in jobs.iterator():
|
||||
job_display = '"%s" (%d host summaries, %d events)' % \
|
||||
(unicode(job),
|
||||
job.job_host_summaries.count(), job.job_events.count())
|
||||
@@ -73,21 +72,20 @@ class Command(NoArgsCommand):
|
||||
action_text = 'would skip' if self.dry_run else 'skipping'
|
||||
self.logger.debug('%s %s job %s', action_text, job.status, job_display)
|
||||
skipped += 1
|
||||
elif job.created >= self.cutoff:
|
||||
action_text = 'would skip' if self.dry_run else 'skipping'
|
||||
self.logger.debug('%s %s', action_text, job_display)
|
||||
skipped += 1
|
||||
else:
|
||||
action_text = 'would delete' if self.dry_run else 'deleting'
|
||||
self.logger.info('%s %s', action_text, job_display)
|
||||
if not self.dry_run:
|
||||
job.delete()
|
||||
deleted += 1
|
||||
|
||||
skipped += Job.objects.filter(created__gte=self.cutoff).count()
|
||||
return skipped, deleted
|
||||
|
||||
def cleanup_ad_hoc_commands(self):
|
||||
skipped, deleted = 0, 0
|
||||
for ad_hoc_command in AdHocCommand.objects.all():
|
||||
ad_hoc_commands = AdHocCommand.objects.filter(created__lt=self.cutoff)
|
||||
for ad_hoc_command in ad_hoc_commands.iterator():
|
||||
ad_hoc_command_display = '"%s" (%d events)' % \
|
||||
(unicode(ad_hoc_command),
|
||||
ad_hoc_command.ad_hoc_command_events.count())
|
||||
@@ -95,21 +93,20 @@ class Command(NoArgsCommand):
|
||||
action_text = 'would skip' if self.dry_run else 'skipping'
|
||||
self.logger.debug('%s %s ad hoc command %s', action_text, ad_hoc_command.status, ad_hoc_command_display)
|
||||
skipped += 1
|
||||
elif ad_hoc_command.created >= self.cutoff:
|
||||
action_text = 'would skip' if self.dry_run else 'skipping'
|
||||
self.logger.debug('%s %s', action_text, ad_hoc_command_display)
|
||||
skipped += 1
|
||||
else:
|
||||
action_text = 'would delete' if self.dry_run else 'deleting'
|
||||
self.logger.info('%s %s', action_text, ad_hoc_command_display)
|
||||
if not self.dry_run:
|
||||
ad_hoc_command.delete()
|
||||
deleted += 1
|
||||
|
||||
skipped += AdHocCommand.objects.filter(created__gte=self.cutoff).count()
|
||||
return skipped, deleted
|
||||
|
||||
def cleanup_project_updates(self):
|
||||
skipped, deleted = 0, 0
|
||||
for pu in ProjectUpdate.objects.all():
|
||||
project_updates = ProjectUpdate.objects.filter(created__lt=self.cutoff)
|
||||
for pu in project_updates.iterator():
|
||||
pu_display = '"%s" (type %s)' % (unicode(pu), unicode(pu.launch_type))
|
||||
if pu.status in ('pending', 'waiting', 'running'):
|
||||
action_text = 'would skip' if self.dry_run else 'skipping'
|
||||
@@ -119,21 +116,20 @@ class Command(NoArgsCommand):
|
||||
action_text = 'would skip' if self.dry_run else 'skipping'
|
||||
self.logger.debug('%s %s', action_text, pu_display)
|
||||
skipped += 1
|
||||
elif pu.created >= self.cutoff:
|
||||
action_text = 'would skip' if self.dry_run else 'skipping'
|
||||
self.logger.debug('%s %s', action_text, pu_display)
|
||||
skipped += 1
|
||||
else:
|
||||
action_text = 'would delete' if self.dry_run else 'deleting'
|
||||
self.logger.info('%s %s', action_text, pu_display)
|
||||
if not self.dry_run:
|
||||
pu.delete()
|
||||
deleted += 1
|
||||
|
||||
skipped += ProjectUpdate.objects.filter(created__gte=self.cutoff).count()
|
||||
return skipped, deleted
|
||||
|
||||
def cleanup_inventory_updates(self):
|
||||
skipped, deleted = 0, 0
|
||||
for iu in InventoryUpdate.objects.all():
|
||||
inventory_updates = InventoryUpdate.objects.filter(created__lt=self.cutoff)
|
||||
for iu in inventory_updates.iterator():
|
||||
iu_display = '"%s" (source %s)' % (unicode(iu), unicode(iu.source))
|
||||
if iu.status in ('pending', 'waiting', 'running'):
|
||||
action_text = 'would skip' if self.dry_run else 'skipping'
|
||||
@@ -143,36 +139,33 @@ class Command(NoArgsCommand):
|
||||
action_text = 'would skip' if self.dry_run else 'skipping'
|
||||
self.logger.debug('%s %s', action_text, iu_display)
|
||||
skipped += 1
|
||||
elif iu.created >= self.cutoff:
|
||||
action_text = 'would skip' if self.dry_run else 'skipping'
|
||||
self.logger.debug('%s %s', action_text, iu_display)
|
||||
skipped += 1
|
||||
else:
|
||||
action_text = 'would delete' if self.dry_run else 'deleting'
|
||||
self.logger.info('%s %s', action_text, iu_display)
|
||||
if not self.dry_run:
|
||||
iu.delete()
|
||||
deleted += 1
|
||||
|
||||
skipped += InventoryUpdate.objects.filter(created__gte=self.cutoff).count()
|
||||
return skipped, deleted
|
||||
|
||||
def cleanup_management_jobs(self):
|
||||
skipped, deleted = 0, 0
|
||||
for sj in SystemJob.objects.all():
|
||||
system_jobs = SystemJob.objects.filter(created__lt=self.cutoff)
|
||||
for sj in system_jobs.iterator():
|
||||
sj_display = '"%s" (type %s)' % (unicode(sj), unicode(sj.job_type))
|
||||
if sj.status in ('pending', 'waiting', 'running'):
|
||||
action_text = 'would skip' if self.dry_run else 'skipping'
|
||||
self.logger.debug('%s %s system_job %s', action_text, sj.status, sj_display)
|
||||
skipped += 1
|
||||
elif sj.created >= self.cutoff:
|
||||
action_text = 'would skip' if self.dry_run else 'skipping'
|
||||
self.logger.debug('%s %s', action_text, sj_display)
|
||||
skipped += 1
|
||||
else:
|
||||
action_text = 'would delete' if self.dry_run else 'deleting'
|
||||
self.logger.info('%s %s', action_text, sj_display)
|
||||
if not self.dry_run:
|
||||
sj.delete()
|
||||
deleted += 1
|
||||
|
||||
skipped += SystemJob.objects.filter(created__gte=self.cutoff).count()
|
||||
return skipped, deleted
|
||||
|
||||
def init_logging(self):
|
||||
@@ -187,7 +180,8 @@ class Command(NoArgsCommand):
|
||||
|
||||
def cleanup_workflow_jobs(self):
|
||||
skipped, deleted = 0, 0
|
||||
for workflow_job in WorkflowJob.objects.all():
|
||||
workflow_jobs = WorkflowJob.objects.filter(created__lt=self.cutoff)
|
||||
for workflow_job in workflow_jobs.iterator():
|
||||
workflow_job_display = '"{}" ({} nodes)'.format(
|
||||
unicode(workflow_job),
|
||||
workflow_job.workflow_nodes.count())
|
||||
@@ -195,21 +189,20 @@ class Command(NoArgsCommand):
|
||||
action_text = 'would skip' if self.dry_run else 'skipping'
|
||||
self.logger.debug('%s %s job %s', action_text, workflow_job.status, workflow_job_display)
|
||||
skipped += 1
|
||||
elif workflow_job.created >= self.cutoff:
|
||||
action_text = 'would skip' if self.dry_run else 'skipping'
|
||||
self.logger.debug('%s %s', action_text, workflow_job_display)
|
||||
skipped += 1
|
||||
else:
|
||||
action_text = 'would delete' if self.dry_run else 'deleting'
|
||||
self.logger.info('%s %s', action_text, workflow_job_display)
|
||||
if not self.dry_run:
|
||||
workflow_job.delete()
|
||||
deleted += 1
|
||||
|
||||
skipped += WorkflowJob.objects.filter(created__gte=self.cutoff).count()
|
||||
return skipped, deleted
|
||||
|
||||
def cleanup_notifications(self):
|
||||
skipped, deleted = 0, 0
|
||||
for notification in Notification.objects.all():
|
||||
notifications = Notification.objects.filter(created__lt=self.cutoff)
|
||||
for notification in notifications.iterator():
|
||||
notification_display = '"{}" (started {}, {} type, {} sent)'.format(
|
||||
unicode(notification), unicode(notification.created),
|
||||
notification.notification_type, notification.notifications_sent)
|
||||
@@ -217,20 +210,18 @@ class Command(NoArgsCommand):
|
||||
action_text = 'would skip' if self.dry_run else 'skipping'
|
||||
self.logger.debug('%s %s notification %s', action_text, notification.status, notification_display)
|
||||
skipped += 1
|
||||
elif notification.created >= self.cutoff:
|
||||
action_text = 'would skip' if self.dry_run else 'skipping'
|
||||
self.logger.debug('%s %s', action_text, notification_display)
|
||||
skipped += 1
|
||||
else:
|
||||
action_text = 'would delete' if self.dry_run else 'deleting'
|
||||
self.logger.info('%s %s', action_text, notification_display)
|
||||
if not self.dry_run:
|
||||
notification.delete()
|
||||
deleted += 1
|
||||
|
||||
skipped += Notification.objects.filter(created__gte=self.cutoff).count()
|
||||
return skipped, deleted
|
||||
|
||||
@transaction.atomic
|
||||
def handle_noargs(self, **options):
|
||||
def handle(self, *args, **options):
|
||||
self.verbosity = int(options.get('verbosity', 1))
|
||||
self.init_logging()
|
||||
self.days = int(options.get('days', 90))
|
||||
@@ -255,3 +246,4 @@ class Command(NoArgsCommand):
|
||||
self.logger.log(99, '%s: %d would be deleted, %d would be skipped.', m.replace('_', ' '), deleted, skipped)
|
||||
else:
|
||||
self.logger.log(99, '%s: %d deleted, %d skipped.', m.replace('_', ' '), deleted, skipped)
|
||||
|
||||
|
||||
@@ -45,10 +45,10 @@ class Command(BaseCommand):
|
||||
inventory=i,
|
||||
variables="ansible_connection: local",
|
||||
created_by=superuser)
|
||||
JobTemplate.objects.create(name='Demo Job Template',
|
||||
playbook='hello_world.yml',
|
||||
project=p,
|
||||
inventory=i,
|
||||
credential=c)
|
||||
jt = JobTemplate.objects.create(name='Demo Job Template',
|
||||
playbook='hello_world.yml',
|
||||
project=p,
|
||||
inventory=i)
|
||||
jt.credentials.add(c)
|
||||
print('Default organization added.')
|
||||
print('Demo Credential, Inventory, and Job Template added.')
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
# Copyright (c) 2016 Ansible, Inc.
|
||||
# All Rights Reserved
|
||||
|
||||
from optparse import make_option
|
||||
import subprocess
|
||||
import warnings
|
||||
|
||||
@@ -17,13 +16,17 @@ class Command(BaseCommand):
|
||||
Deprovision a Tower cluster node
|
||||
"""
|
||||
|
||||
option_list = BaseCommand.option_list + (
|
||||
make_option('--hostname', dest='hostname', type='string',
|
||||
help='Hostname used during provisioning'),
|
||||
make_option('--name', dest='name', type='string',
|
||||
help='(PENDING DEPRECIATION) Hostname used during provisioning'),
|
||||
help = (
|
||||
'Remove instance from the database. '
|
||||
'Specify `--hostname` to use this command.'
|
||||
)
|
||||
|
||||
def add_arguments(self, parser):
|
||||
parser.add_argument('--hostname', dest='hostname', type=str,
|
||||
help='Hostname used during provisioning')
|
||||
parser.add_argument('--name', dest='name', type=str,
|
||||
help='(PENDING DEPRECIATION) Hostname used during provisioning')
|
||||
|
||||
@transaction.atomic
|
||||
def handle(self, *args, **options):
|
||||
# TODO: remove in 3.3
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user