mirror of
https://github.com/ansible/awx.git
synced 2026-02-05 03:24:50 -03:30
Compare commits
817 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
4f829ab93f | ||
|
|
5ce78b383d | ||
|
|
2404faa5d8 | ||
|
|
e72b2fac6d | ||
|
|
11b36982cd | ||
|
|
d438a93fd2 | ||
|
|
dfa8d44eb8 | ||
|
|
4470e9ca26 | ||
|
|
cf0fe729f5 | ||
|
|
913e06b865 | ||
|
|
4d7c49372c | ||
|
|
5c338e582a | ||
|
|
eacf819caf | ||
|
|
273415b9aa | ||
|
|
e612a167e2 | ||
|
|
0a7d6e603e | ||
|
|
f05bed6366 | ||
|
|
cbe6c5bd3b | ||
|
|
e9ac44f561 | ||
|
|
aab29bef5b | ||
|
|
9f42d9426c | ||
|
|
b369609f07 | ||
|
|
01d31231c0 | ||
|
|
c46be3e718 | ||
|
|
38aedcdd48 | ||
|
|
59bec99f4c | ||
|
|
b0249a9a8b | ||
|
|
acb6d9c4d1 | ||
|
|
78912d20f7 | ||
|
|
52712a0d9a | ||
|
|
a7c787af02 | ||
|
|
e269634afc | ||
|
|
4daf574899 | ||
|
|
067ba7f8fe | ||
|
|
7d77727a60 | ||
|
|
47560fdf7c | ||
|
|
2882f4afb5 | ||
|
|
aaceccc426 | ||
|
|
1f3242900a | ||
|
|
e6232957b4 | ||
|
|
1a72ff4c47 | ||
|
|
c585c3d07d | ||
|
|
1413c1be7b | ||
|
|
a5c057cc18 | ||
|
|
9c06dc7106 | ||
|
|
fe850dff38 | ||
|
|
40840e3789 | ||
|
|
e3750f541e | ||
|
|
5d49fe2170 | ||
|
|
ca0e8102fd | ||
|
|
08aff9bd2c | ||
|
|
164d305b51 | ||
|
|
4d33e484d0 | ||
|
|
1897b18a6e | ||
|
|
ec92abf014 | ||
|
|
5ed7db8cc2 | ||
|
|
69502bc133 | ||
|
|
17c89ed412 | ||
|
|
f5b6bd65cf | ||
|
|
c6f1806a23 | ||
|
|
c65e6ba30b | ||
|
|
d511d63a5a | ||
|
|
30741e762a | ||
|
|
7687eddf6d | ||
|
|
9cfed6f2a8 | ||
|
|
95896b1acd | ||
|
|
68fe23d8b7 | ||
|
|
dd372548a9 | ||
|
|
8d6e1f0927 | ||
|
|
98fa1fc813 | ||
|
|
8ec97235e3 | ||
|
|
863d962ec2 | ||
|
|
468e79a754 | ||
|
|
049b3a2e87 | ||
|
|
196b6572b2 | ||
|
|
37cb912367 | ||
|
|
d8bd72054d | ||
|
|
9958f382d7 | ||
|
|
1d767a15d8 | ||
|
|
612373c849 | ||
|
|
3ea6171b54 | ||
|
|
302d8589c9 | ||
|
|
01f51219a6 | ||
|
|
38ea82bf3d | ||
|
|
d4ad674899 | ||
|
|
9d3aca5e1b | ||
|
|
ae9032ce03 | ||
|
|
a454102e77 | ||
|
|
e1d60ff4f1 | ||
|
|
bd93d97a60 | ||
|
|
793e78d9c0 | ||
|
|
76ebcf914b | ||
|
|
effe7151eb | ||
|
|
0023591bb0 | ||
|
|
9c50609776 | ||
|
|
28cc08f215 | ||
|
|
b83cef6ed7 | ||
|
|
c729b698dd | ||
|
|
e70c7ab458 | ||
|
|
aa548442ce | ||
|
|
ab587e7e6c | ||
|
|
3f5c018c8a | ||
|
|
12e3d0aebf | ||
|
|
ba4ad191fc | ||
|
|
34d76422d6 | ||
|
|
2a81643308 | ||
|
|
76d4de24df | ||
|
|
ed7a7e5f7b | ||
|
|
f94959d120 | ||
|
|
b5728fc548 | ||
|
|
4a19da650d | ||
|
|
8db27611ca | ||
|
|
7161f28d26 | ||
|
|
290c242221 | ||
|
|
90fb7c6769 | ||
|
|
f8c69aadcb | ||
|
|
c7b38bc9b9 | ||
|
|
cc1ef50729 | ||
|
|
42b3aa45c5 | ||
|
|
1e91e4e531 | ||
|
|
b3979eb2b9 | ||
|
|
75ef30d21b | ||
|
|
31b78cc00f | ||
|
|
a510f9f2c7 | ||
|
|
05aab5da4c | ||
|
|
3054fbc61c | ||
|
|
be8a30b9d9 | ||
|
|
d4301bd9bd | ||
|
|
adb768bed3 | ||
|
|
9253ab28c8 | ||
|
|
31d4e8362e | ||
|
|
329b40fd69 | ||
|
|
6bc5c4da74 | ||
|
|
51d7de296f | ||
|
|
5987aafb82 | ||
|
|
7a0a2fb54c | ||
|
|
2f57a1ea93 | ||
|
|
6da445f7c0 | ||
|
|
176f8632e5 | ||
|
|
958c192ff7 | ||
|
|
7e8990dff9 | ||
|
|
a727de184b | ||
|
|
6d1ba411e6 | ||
|
|
b00979792e | ||
|
|
8be0b01c33 | ||
|
|
62a3e0df98 | ||
|
|
c7f49c1193 | ||
|
|
3fcf3b20c4 | ||
|
|
5db3a8e7dc | ||
|
|
3d7bd8579b | ||
|
|
99704af302 | ||
|
|
a13b733191 | ||
|
|
4a7cd56e4a | ||
|
|
839f49c6ed | ||
|
|
c6afd98500 | ||
|
|
67fb898a9d | ||
|
|
f18f9ec0ef | ||
|
|
ef22986aa0 | ||
|
|
1829017ad4 | ||
|
|
be3d095067 | ||
|
|
112b9e7381 | ||
|
|
87bd3c2f93 | ||
|
|
abb37299cb | ||
|
|
724ca9cd57 | ||
|
|
39fb0d1679 | ||
|
|
11630a8803 | ||
|
|
a7b96d5aec | ||
|
|
d8a80f9f3e | ||
|
|
1dcb1eda7c | ||
|
|
f64e31735c | ||
|
|
d7c33a7246 | ||
|
|
fedd1cf22f | ||
|
|
12ff7a481c | ||
|
|
9b5494a6cc | ||
|
|
99296cf5f1 | ||
|
|
e6b78292ec | ||
|
|
49b54f2d60 | ||
|
|
0a256a98be | ||
|
|
5756151568 | ||
|
|
6606fd7461 | ||
|
|
84b6866875 | ||
|
|
4b9024bcb0 | ||
|
|
4b1fc7894d | ||
|
|
a5a6fdf1d6 | ||
|
|
15c699de7c | ||
|
|
515d4fe20f | ||
|
|
75380b9576 | ||
|
|
cb279843d2 | ||
|
|
41f2b83ae2 | ||
|
|
40b1e89b67 | ||
|
|
8c56d1d3a7 | ||
|
|
2f77c67a62 | ||
|
|
5a502f8709 | ||
|
|
873ff3de78 | ||
|
|
eb3ef809e0 | ||
|
|
9c90694f12 | ||
|
|
ca3735ee73 | ||
|
|
380f122456 | ||
|
|
0aa8c7427d | ||
|
|
dbc65baa43 | ||
|
|
1ce587025e | ||
|
|
45458b3265 | ||
|
|
874465a2d4 | ||
|
|
34c3aaee3d | ||
|
|
13ff5ffdf2 | ||
|
|
a606fdc958 | ||
|
|
b11995e638 | ||
|
|
40f9d0b512 | ||
|
|
9a1b205e06 | ||
|
|
98c923a715 | ||
|
|
1d328134fd | ||
|
|
d3f047d731 | ||
|
|
8ca0c1b992 | ||
|
|
4a711ec2dc | ||
|
|
75fe801efb | ||
|
|
56df930b99 | ||
|
|
f48713f4ae | ||
|
|
bb009f0d12 | ||
|
|
dc1bf3ef07 | ||
|
|
9d4cfa7400 | ||
|
|
06be3a29b9 | ||
|
|
2addf20907 | ||
|
|
29bbecb6bf | ||
|
|
f4c18843a3 | ||
|
|
bda838f723 | ||
|
|
2bec5ddb41 | ||
|
|
74643520c7 | ||
|
|
44907b33dc | ||
|
|
f174902bb2 | ||
|
|
1223148116 | ||
|
|
ab1e45d6c4 | ||
|
|
bd50e5d6a8 | ||
|
|
f085b828e4 | ||
|
|
e95339ba6e | ||
|
|
d353daebc5 | ||
|
|
6681cd918c | ||
|
|
2b327935de | ||
|
|
0c4925afe8 | ||
|
|
e0062484d0 | ||
|
|
faa353521a | ||
|
|
ff7e244a84 | ||
|
|
17f71600df | ||
|
|
9120a69006 | ||
|
|
d9965cfe7e | ||
|
|
c38ee06642 | ||
|
|
b1d75327e3 | ||
|
|
8981c7d59a | ||
|
|
b10f06201d | ||
|
|
681fe4865c | ||
|
|
faae55d085 | ||
|
|
efddd9f679 | ||
|
|
6fb173da8a | ||
|
|
64e8b76a10 | ||
|
|
d0e160a037 | ||
|
|
20e5d8200e | ||
|
|
a6d3c0fd32 | ||
|
|
318e0631b7 | ||
|
|
da4153d653 | ||
|
|
26e9dd307e | ||
|
|
6a2d59963f | ||
|
|
68800d0e8e | ||
|
|
97dc77ea63 | ||
|
|
6bc1856658 | ||
|
|
9ea3ec24ca | ||
|
|
7466873f69 | ||
|
|
07fa533b6f | ||
|
|
8aa28092ff | ||
|
|
3579584ffc | ||
|
|
05cae23180 | ||
|
|
d6e89092d3 | ||
|
|
fe344038b5 | ||
|
|
250484339b | ||
|
|
5ca0cdb124 | ||
|
|
0d3f1f4ac2 | ||
|
|
18c69fa391 | ||
|
|
6d6eae571e | ||
|
|
7d8a910be7 | ||
|
|
df04660cdd | ||
|
|
9d1ed837f9 | ||
|
|
be4705ef8b | ||
|
|
015234287c | ||
|
|
677a8b34ba | ||
|
|
232c706b75 | ||
|
|
8725d3e539 | ||
|
|
e7290e6452 | ||
|
|
21105b836e | ||
|
|
99dc84c275 | ||
|
|
adfdfcdd0a | ||
|
|
6feb58f76d | ||
|
|
2910a9dfff | ||
|
|
371966613f | ||
|
|
91968a09c8 | ||
|
|
190098bbd5 | ||
|
|
2585c5030b | ||
|
|
22858f0044 | ||
|
|
7e6a73f892 | ||
|
|
1874e8bb4c | ||
|
|
83c286580b | ||
|
|
0be8fe521a | ||
|
|
186ec88581 | ||
|
|
6407ab58ff | ||
|
|
466e965047 | ||
|
|
e1de0a528d | ||
|
|
766a5c0c3f | ||
|
|
231abf865b | ||
|
|
70972f7ea1 | ||
|
|
993b0a889d | ||
|
|
205f2c33c1 | ||
|
|
1e77053bbf | ||
|
|
ae25717700 | ||
|
|
11244f85a4 | ||
|
|
e05c6e67b6 | ||
|
|
42f30e72b5 | ||
|
|
0fb3851a2b | ||
|
|
e4a50f3595 | ||
|
|
f524c94bad | ||
|
|
c7fe840868 | ||
|
|
0154d80f19 | ||
|
|
08d60d0b78 | ||
|
|
6908558acd | ||
|
|
4dc7178f3c | ||
|
|
b85cc716a4 | ||
|
|
418521f4a3 | ||
|
|
5b8fba58e8 | ||
|
|
842d48810c | ||
|
|
70e513a3cf | ||
|
|
c6c14d4fb9 | ||
|
|
a874ed0424 | ||
|
|
cf6a103207 | ||
|
|
d2e67aea19 | ||
|
|
e5cf5be18d | ||
|
|
f26ae8ef13 | ||
|
|
755c3e89e2 | ||
|
|
2800e89fd2 | ||
|
|
8d75fc5f56 | ||
|
|
17d2efde95 | ||
|
|
8909a8a8e4 | ||
|
|
90a86f53ba | ||
|
|
4be05f1bf6 | ||
|
|
29822ee140 | ||
|
|
7fc13b8bb5 | ||
|
|
e560dccd36 | ||
|
|
de56e20f11 | ||
|
|
d4cc595630 | ||
|
|
b754e0dbba | ||
|
|
72fe6e400e | ||
|
|
6bb9902588 | ||
|
|
04d22a930d | ||
|
|
fe3a2d1a4e | ||
|
|
50f9c70afd | ||
|
|
5ab7f888f1 | ||
|
|
7d692d08f9 | ||
|
|
f3c023a11f | ||
|
|
a87c87b7c9 | ||
|
|
5f1aeb0f4e | ||
|
|
cc001c9892 | ||
|
|
a68ab19e16 | ||
|
|
530a7ba51d | ||
|
|
383fe278f5 | ||
|
|
c41da766fb | ||
|
|
46795cc71e | ||
|
|
4fac608890 | ||
|
|
84b21620b2 | ||
|
|
eaaad89a8c | ||
|
|
6f309bd2d2 | ||
|
|
6e00038d35 | ||
|
|
ad4e413a36 | ||
|
|
27ca5e1fd5 | ||
|
|
4191b21052 | ||
|
|
9737ab620c | ||
|
|
81f0662161 | ||
|
|
d1dc6007fd | ||
|
|
52276ebbab | ||
|
|
d204f12184 | ||
|
|
42dd3c5cf5 | ||
|
|
5720601a2e | ||
|
|
5d1346b956 | ||
|
|
30d0130e79 | ||
|
|
4baecef866 | ||
|
|
aaeb2d6fb9 | ||
|
|
c707e60bde | ||
|
|
b7e26b3507 | ||
|
|
44fcf2e0e4 | ||
|
|
3cd8d4f7e6 | ||
|
|
ddde669083 | ||
|
|
36e384e8ab | ||
|
|
586c3e4583 | ||
|
|
14045c1017 | ||
|
|
d11dfd0a2b | ||
|
|
cb7914dfa4 | ||
|
|
639e01e884 | ||
|
|
e4d6d51cf5 | ||
|
|
7f452ee8d1 | ||
|
|
27d74528c0 | ||
|
|
0ada1e965f | ||
|
|
2808a852eb | ||
|
|
80393e9194 | ||
|
|
aa52e41c02 | ||
|
|
d7cdec37f2 | ||
|
|
f60857013e | ||
|
|
c9424f9af8 | ||
|
|
66f883befe | ||
|
|
fad0274373 | ||
|
|
8c715fc6e1 | ||
|
|
870ebb4b43 | ||
|
|
e78ef82385 | ||
|
|
1eb565543c | ||
|
|
082a819160 | ||
|
|
519d2eebcb | ||
|
|
f2ea9003a3 | ||
|
|
64ae7a6e45 | ||
|
|
b47be3c8c7 | ||
|
|
2f7ec6ff13 | ||
|
|
75065b6407 | ||
|
|
2cb6104fe4 | ||
|
|
f90f8ba9a8 | ||
|
|
9733fde560 | ||
|
|
231c76c9cb | ||
|
|
311daf10b8 | ||
|
|
4ca4563a19 | ||
|
|
96183cf9c4 | ||
|
|
0b555e938a | ||
|
|
c36dbb3448 | ||
|
|
adfce6edf1 | ||
|
|
140394fe1f | ||
|
|
6ff539e6ee | ||
|
|
49ba6c6b3d | ||
|
|
e2861c6c39 | ||
|
|
6dae4a1d6d | ||
|
|
e66f9241a9 | ||
|
|
72da961550 | ||
|
|
4c86c5065c | ||
|
|
80a855c57a | ||
|
|
11b85250e8 | ||
|
|
10cfac2f0e | ||
|
|
355a83441a | ||
|
|
e25adca233 | ||
|
|
7c743904b0 | ||
|
|
a97865de0c | ||
|
|
596a5173ce | ||
|
|
63209197dd | ||
|
|
af2484cd97 | ||
|
|
f01a936202 | ||
|
|
05f670a6d9 | ||
|
|
f4b0910e98 | ||
|
|
95b17892ee | ||
|
|
8fdc53cb21 | ||
|
|
04521f5c5c | ||
|
|
8c2b3e9b84 | ||
|
|
864fef4f29 | ||
|
|
787e369bcf | ||
|
|
805a0c7a9a | ||
|
|
c80a5e2164 | ||
|
|
eb0463890c | ||
|
|
f6076052bd | ||
|
|
0d18d46ccc | ||
|
|
23ea419aa9 | ||
|
|
b084622c9e | ||
|
|
ef7e1afa34 | ||
|
|
be86086134 | ||
|
|
a06d2946b6 | ||
|
|
0ba87c9729 | ||
|
|
41b476544d | ||
|
|
97a6255531 | ||
|
|
4159a9b6af | ||
|
|
798bc4b8de | ||
|
|
6adf4ca2c1 | ||
|
|
8cc18f501e | ||
|
|
63049fe99d | ||
|
|
d6ecb486d2 | ||
|
|
f4dc4d5849 | ||
|
|
87d55b13bc | ||
|
|
5d570a017a | ||
|
|
6d5897f371 | ||
|
|
e14f19468b | ||
|
|
8ad0b3f787 | ||
|
|
70b0679a0c | ||
|
|
add8673d7c | ||
|
|
84c09a19d1 | ||
|
|
fbc7d1a9f2 | ||
|
|
daeeb31590 | ||
|
|
66886fb57a | ||
|
|
1ddb675fa2 | ||
|
|
4fd04e095f | ||
|
|
9d000a76de | ||
|
|
7f5227809f | ||
|
|
f1d87bf392 | ||
|
|
941009bf6d | ||
|
|
9c71204435 | ||
|
|
15ef095366 | ||
|
|
c2daccade7 | ||
|
|
3bfb54d2fd | ||
|
|
a5d31e56d6 | ||
|
|
d222bed932 | ||
|
|
45cc8f1cc9 | ||
|
|
e0c4fd4b3a | ||
|
|
6ef3b18803 | ||
|
|
f23b4e7b9a | ||
|
|
1d4773545e | ||
|
|
e40c6da552 | ||
|
|
8ef81065b7 | ||
|
|
08fcdf0e25 | ||
|
|
387682ed8d | ||
|
|
707c3883a8 | ||
|
|
47b0a96e96 | ||
|
|
fb4495b1b5 | ||
|
|
5dbc269de1 | ||
|
|
5c7939a6ac | ||
|
|
890de400e2 | ||
|
|
e8de7bc845 | ||
|
|
40393e201f | ||
|
|
97e2137d07 | ||
|
|
29c3927a16 | ||
|
|
17a803f49c | ||
|
|
d0c3882d9d | ||
|
|
9da5cc9c23 | ||
|
|
7913247eaa | ||
|
|
569cd3b34e | ||
|
|
2c67bec3cb | ||
|
|
77e587babe | ||
|
|
235011f5e9 | ||
|
|
0c6a522813 | ||
|
|
dc833bbaa7 | ||
|
|
9b0329021c | ||
|
|
3611f3491b | ||
|
|
213a0c3cef | ||
|
|
0cc640403a | ||
|
|
85898fd708 | ||
|
|
14d4d624e4 | ||
|
|
de34a64115 | ||
|
|
d67a336e2f | ||
|
|
58966d7368 | ||
|
|
fc5322b2a4 | ||
|
|
817b350de9 | ||
|
|
0c4ed78bee | ||
|
|
0252af0d30 | ||
|
|
f56f6c11f7 | ||
|
|
ef84bfc1c2 | ||
|
|
8383568e3c | ||
|
|
fd5d9c45d7 | ||
|
|
23d0f6022c | ||
|
|
9e98058290 | ||
|
|
e073b5e017 | ||
|
|
21bec83a4e | ||
|
|
cd966f2669 | ||
|
|
1830da4268 | ||
|
|
98ec5c8250 | ||
|
|
0caf1e8a3d | ||
|
|
ae7b173e17 | ||
|
|
c326b186a6 | ||
|
|
4f2443e7bb | ||
|
|
213a70b98a | ||
|
|
c91cd606ed | ||
|
|
4eab362318 | ||
|
|
df81eb7533 | ||
|
|
1bb5d560bd | ||
|
|
652281b6fa | ||
|
|
b978a28f8b | ||
|
|
4664d9556d | ||
|
|
7f935084df | ||
|
|
988438f119 | ||
|
|
20a023e243 | ||
|
|
f49532090b | ||
|
|
75a1c69ea2 | ||
|
|
7f6659f767 | ||
|
|
8f05482f8f | ||
|
|
01a79dd23e | ||
|
|
3d308cc2a3 | ||
|
|
4f55ffe4a0 | ||
|
|
6ce972a2a4 | ||
|
|
57b3565f42 | ||
|
|
1abb0b2c35 | ||
|
|
809fcac738 | ||
|
|
fa665e81e4 | ||
|
|
c4b41a174b | ||
|
|
ce65fdd26c | ||
|
|
aa0179690d | ||
|
|
67aeecdee5 | ||
|
|
d14b71ccee | ||
|
|
47a3ee7d76 | ||
|
|
a2bfb0f65c | ||
|
|
fc9da002d2 | ||
|
|
e1c6057b4c | ||
|
|
520cbd2015 | ||
|
|
e2039b7d3f | ||
|
|
9125313ed3 | ||
|
|
2c8900568b | ||
|
|
5c1d2a6f0b | ||
|
|
bb5c7a98f3 | ||
|
|
3fb3079264 | ||
|
|
c3812de3d6 | ||
|
|
4821a94944 | ||
|
|
97e030dd1f | ||
|
|
dfd4cb55e5 | ||
|
|
13fadd3838 | ||
|
|
7b636a7566 | ||
|
|
28e3c63562 | ||
|
|
cca9de9a3e | ||
|
|
880341ac05 | ||
|
|
e9f2fddc7f | ||
|
|
a9a479a51b | ||
|
|
ddcb7d4881 | ||
|
|
cd90ad2497 | ||
|
|
e3dfc6c796 | ||
|
|
1a151ad63a | ||
|
|
4095a6c9d4 | ||
|
|
161fd9cde0 | ||
|
|
dc77ddbc5b | ||
|
|
81fe923577 | ||
|
|
3521c56baf | ||
|
|
dd609b8a7c | ||
|
|
ef1a5c09b6 | ||
|
|
fa3a41f25b | ||
|
|
50c7807483 | ||
|
|
531d97d3b3 | ||
|
|
d4c69429db | ||
|
|
79d580d5b9 | ||
|
|
8302992a35 | ||
|
|
4f8b197b55 | ||
|
|
5a4a812c73 | ||
|
|
5c5173956d | ||
|
|
2423d9df44 | ||
|
|
ce2fc1a9dd | ||
|
|
fd3a423b07 | ||
|
|
c6643946c5 | ||
|
|
6b422d3bb7 | ||
|
|
3f6d3506c6 | ||
|
|
ac11b3782b | ||
|
|
cfdecd7297 | ||
|
|
5debfdf5e8 | ||
|
|
3f9fd3a3a8 | ||
|
|
2493374d44 | ||
|
|
7722f0ca08 | ||
|
|
e3f10ebd65 | ||
|
|
713b2c1bf2 | ||
|
|
00a9ae0e72 | ||
|
|
6bcb471584 | ||
|
|
f5ea595763 | ||
|
|
1f1cb2bdac | ||
|
|
d9d3c5d15f | ||
|
|
47d6e5c028 | ||
|
|
467700e4bb | ||
|
|
cfa2eabb57 | ||
|
|
b4e508f72a | ||
|
|
2bda81661e | ||
|
|
b0f6d2214c | ||
|
|
c9bac0b51c | ||
|
|
487473f0d1 | ||
|
|
4364e00117 | ||
|
|
e033eb5aef | ||
|
|
bc169fe1cc | ||
|
|
c15502e581 | ||
|
|
45d5999bc2 | ||
|
|
78cd1abbb2 | ||
|
|
e0861fee3a | ||
|
|
cb806b1699 | ||
|
|
15b80cdb1a | ||
|
|
bf0f1f1496 | ||
|
|
dbe4417ac3 | ||
|
|
c76e97cecf | ||
|
|
ce4e34eb28 | ||
|
|
32286a9d49 | ||
|
|
ed0bb127e8 | ||
|
|
cac48e7cfb | ||
|
|
96bd0f9d9e | ||
|
|
bc7472f3f1 | ||
|
|
5b2f00b978 | ||
|
|
3f73176ef2 | ||
|
|
d987c6e3f7 | ||
|
|
5d11400f6c | ||
|
|
6f43f223b2 | ||
|
|
6b87241099 | ||
|
|
0f6615d9cd | ||
|
|
9b55fa61de | ||
|
|
165405ad3f | ||
|
|
7ca92e4c1e | ||
|
|
8c107a5fa8 | ||
|
|
8b35ac89fc | ||
|
|
b90f9ac401 | ||
|
|
7a093de9fd | ||
|
|
259fcdc5df | ||
|
|
aa4575c92e | ||
|
|
f0f4f487fb | ||
|
|
c4a79a778f | ||
|
|
f611d4275f | ||
|
|
8c3dfe8655 | ||
|
|
f9e081046c | ||
|
|
da9abc087e | ||
|
|
956f588fd8 | ||
|
|
cb238c1f24 | ||
|
|
3dee9f0512 | ||
|
|
261a635005 | ||
|
|
4f13592430 | ||
|
|
73c9d1b7a9 | ||
|
|
8180a2060a | ||
|
|
dfaf19cdf3 | ||
|
|
ea9ed31f9d | ||
|
|
05226333ff | ||
|
|
5b79843390 | ||
|
|
1eda939ce2 | ||
|
|
43456d13c4 | ||
|
|
215c3c87e5 | ||
|
|
957804e22a | ||
|
|
e14f17687c | ||
|
|
e0d61cfb8e | ||
|
|
1344706095 | ||
|
|
5c855b5bd1 | ||
|
|
6d0f2948aa | ||
|
|
736bd2ed67 | ||
|
|
47f31b41fb | ||
|
|
61eeb630f8 | ||
|
|
7f55a1da0d | ||
|
|
ceef7f57af | ||
|
|
393ad6b2f4 | ||
|
|
0768c6ac1d | ||
|
|
2824616ba6 | ||
|
|
c436dcf875 | ||
|
|
0de8a89293 | ||
|
|
81a509424a | ||
|
|
42f4956a7f | ||
|
|
e2d474ddd2 | ||
|
|
011d7eb892 | ||
|
|
018ff91620 | ||
|
|
368d933799 | ||
|
|
e9532dea8e | ||
|
|
b911f8bf77 | ||
|
|
e727909a61 | ||
|
|
13366c1e75 | ||
|
|
ca6d124417 | ||
|
|
35cca68f04 | ||
|
|
dcf17683e2 | ||
|
|
b851e2be4a | ||
|
|
69368d874e | ||
|
|
0ee223f799 | ||
|
|
7a43f00a5d | ||
|
|
4ed5bca5e3 | ||
|
|
0a87469225 | ||
|
|
63997838cd | ||
|
|
89b731a0cb | ||
|
|
9036ba492c | ||
|
|
d87144c4a7 | ||
|
|
c209955400 | ||
|
|
e5f48bfa62 | ||
|
|
6e2c04e16c | ||
|
|
8f096d11b6 | ||
|
|
b98bac0ee9 | ||
|
|
52a2ed2786 | ||
|
|
9710015a2f | ||
|
|
b70c354dfc | ||
|
|
e943ae59b7 | ||
|
|
6e8809fe72 | ||
|
|
d2d1074e8b | ||
|
|
1bcda0a4cb | ||
|
|
4e4afb4a98 | ||
|
|
750c8d8a6d | ||
|
|
74e264a6a5 | ||
|
|
c508dc7d83 | ||
|
|
ea355f6f8f | ||
|
|
95f5315fa3 | ||
|
|
e99e97bcb8 | ||
|
|
7b16931658 | ||
|
|
e395b87a1b | ||
|
|
71fcb1a82c | ||
|
|
21e5179a84 | ||
|
|
298eaa0b32 | ||
|
|
17c0293065 | ||
|
|
6f150f0362 | ||
|
|
71718ee2eb | ||
|
|
7ef68a03c6 | ||
|
|
bf3473d394 | ||
|
|
eab9ee5128 | ||
|
|
563a0cc2a4 | ||
|
|
3007b9c66a | ||
|
|
a4ec149344 | ||
|
|
a59bc33280 | ||
|
|
8f089c02a5 | ||
|
|
d663d397f8 | ||
|
|
546281d435 | ||
|
|
4e01b11577 | ||
|
|
d25f1d1357 | ||
|
|
2ab290ff2d | ||
|
|
ef88507d23 | ||
|
|
df9a012013 | ||
|
|
9160d91278 | ||
|
|
ab11f18957 | ||
|
|
ea30547754 | ||
|
|
4a0778a3d5 | ||
|
|
1270949909 | ||
|
|
180396b5df | ||
|
|
504b17b474 | ||
|
|
dd81f59d9a | ||
|
|
99478f5d25 | ||
|
|
b3ad12f31a | ||
|
|
926d6a6525 | ||
|
|
e84642b4a1 | ||
|
|
ef3c0cfb38 | ||
|
|
9a4439e731 | ||
|
|
c481919a49 | ||
|
|
58f0e42bd6 | ||
|
|
70af2dd66b | ||
|
|
40dbe70854 | ||
|
|
a7368cec43 | ||
|
|
636153d92c | ||
|
|
ae9377e0e4 | ||
|
|
c586fa9821 | ||
|
|
cb4d55b47a | ||
|
|
e398a0ac5e | ||
|
|
bc6df9cab8 | ||
|
|
efb4fb6fd0 | ||
|
|
f2be4de544 | ||
|
|
73f16b2bee | ||
|
|
b394766075 | ||
|
|
df11a7fd3d | ||
|
|
1ae4ed4922 | ||
|
|
8a72a4d39d | ||
|
|
127495b53d | ||
|
|
017d367749 |
6
.dput.cf
6
.dput.cf
@@ -1,6 +0,0 @@
|
||||
[mini_dinstall]
|
||||
fqdn = localhost
|
||||
method = local
|
||||
incoming = FIXME/deb-repo/mini-dinstall/incoming
|
||||
run_dinstall = 0
|
||||
post_upload_command = mini-dinstall -b -v
|
||||
@@ -1,20 +0,0 @@
|
||||
root = true
|
||||
|
||||
[*]
|
||||
end_of_line = lf
|
||||
insert_final_newline = true
|
||||
|
||||
[Makefile]
|
||||
indent_style = tab
|
||||
|
||||
[**.py]
|
||||
indent_style = space
|
||||
indent_size = 4
|
||||
|
||||
[**.{js,less,html}]
|
||||
indent_style = space
|
||||
indent_size = 4
|
||||
|
||||
[**.{json}]
|
||||
indent_style = space
|
||||
indent_size = 2
|
||||
6
.github/ISSUE_TEMPLATE/bug_report.md
vendored
6
.github/ISSUE_TEMPLATE/bug_report.md
vendored
@@ -7,12 +7,6 @@ about: Create a report to help us improve
|
||||
##### ISSUE TYPE
|
||||
- Bug Report
|
||||
|
||||
##### COMPONENT NAME
|
||||
<!-- Pick the area of AWX for this issue, you can have multiple, delete the rest: -->
|
||||
- API
|
||||
- UI
|
||||
- Installer
|
||||
|
||||
##### SUMMARY
|
||||
<!-- Briefly describe the problem. -->
|
||||
|
||||
|
||||
11
.github/ISSUE_TEMPLATE/feature_request.md
vendored
11
.github/ISSUE_TEMPLATE/feature_request.md
vendored
@@ -7,16 +7,5 @@ about: Suggest an idea for this project
|
||||
##### ISSUE TYPE
|
||||
- Feature Idea
|
||||
|
||||
##### COMPONENT NAME
|
||||
<!-- Pick the area of AWX for this issue, you can have multiple, delete the rest: -->
|
||||
- API
|
||||
- UI
|
||||
- Installer
|
||||
|
||||
##### SUMMARY
|
||||
<!-- Briefly describe the problem or desired enhancement. -->
|
||||
|
||||
##### ADDITIONAL INFORMATION
|
||||
|
||||
<!-- Include any links to sosreport, database dumps, screenshots or other
|
||||
information. -->
|
||||
|
||||
2
.gitignore
vendored
2
.gitignore
vendored
@@ -8,6 +8,7 @@ reference-schema.json
|
||||
.tags1
|
||||
|
||||
# Tower
|
||||
awx-dev
|
||||
awx/settings/local_*.py*
|
||||
awx/*.sqlite3
|
||||
awx/*.sqlite3_*
|
||||
@@ -29,6 +30,7 @@ awx/ui/templates/ui/index.html
|
||||
awx/ui/templates/ui/installing.html
|
||||
/tower-license
|
||||
/tower-license/**
|
||||
tools/prometheus/data
|
||||
|
||||
# Tower setup playbook testing
|
||||
setup/test/roles/postgresql
|
||||
|
||||
@@ -1,16 +0,0 @@
|
||||
[DEFAULT]
|
||||
archivedir = FIXME/deb-repo
|
||||
mail_to =
|
||||
verify_sigs = false
|
||||
architectures = all, amd64
|
||||
archive_style = flat
|
||||
generate_release = true
|
||||
mail_on_success = false
|
||||
release_codename = ansible-tower
|
||||
release_description = Ansible Tower
|
||||
release_label = ansible-tower
|
||||
release_origin = ansible-tower
|
||||
|
||||
[trusty]
|
||||
|
||||
[precise]
|
||||
@@ -1,6 +0,0 @@
|
||||
[MASTER]
|
||||
|
||||
# Add files or directories to the blacklist. They should be base names, not
|
||||
# paths.
|
||||
ignore=site-packages,ui,migrations,data
|
||||
|
||||
18
INSTALL.md
18
INSTALL.md
@@ -59,10 +59,13 @@ Before you can run a deployment, you'll need the following installed in your loc
|
||||
|
||||
- [Ansible](http://docs.ansible.com/ansible/latest/intro_installation.html) Requires Version 2.4+
|
||||
- [Docker](https://docs.docker.com/engine/installation/)
|
||||
- [docker-py](https://github.com/docker/docker-py) Python module
|
||||
+ A recent version
|
||||
- [docker](https://pypi.org/project/docker/) Python module
|
||||
+ This is incompatible with `docker-py`. If you have previously installed `docker-py`, please uninstall it.
|
||||
+ We use this module instead of `docker-py` because it is what the `docker-compose` Python module requires.
|
||||
- [GNU Make](https://www.gnu.org/software/make/)
|
||||
- [Git](https://git-scm.com/) Requires Version 1.8.4+
|
||||
- [Node 8.x LTS version](https://nodejs.org/en/download/)
|
||||
- [Node 10.x LTS version](https://nodejs.org/en/download/)
|
||||
- [NPM 6.x LTS](https://docs.npmjs.com/)
|
||||
|
||||
### System Requirements
|
||||
@@ -81,7 +84,7 @@ The system that runs the AWX service will need to satisfy the following requirem
|
||||
|
||||
### Choose a deployment platform
|
||||
|
||||
We currently support running AWX as a containerized application using Docker images deployed to either an OpenShift cluster or docker-compose. The remainder of this document will walk you through the process of building the images, and deploying them to either platform.
|
||||
We currently support running AWX as a containerized application using Docker images deployed to either an OpenShift cluster, a Kubernetes cluster, or docker-compose. The remainder of this document will walk you through the process of building the images, and deploying them to either platform.
|
||||
|
||||
The [installer](./installer) directory contains an [inventory](./installer/inventory) file, and a playbook, [install.yml](./installer/install.yml). You'll begin by setting variables in the inventory file according to the platform you wish to use, and then you'll start the image build and deployment process by running the playbook.
|
||||
|
||||
@@ -396,7 +399,8 @@ Unlike Openshift's `Route` the Kubernetes `Ingress` doesn't yet handle SSL termi
|
||||
### Prerequisites
|
||||
|
||||
- [Docker](https://docs.docker.com/engine/installation/) on the host where AWX will be deployed. After installing Docker, the Docker service must be started (depending on your OS, you may have to add the local user that uses Docker to the ``docker`` group, refer to the documentation for details)
|
||||
- [docker-py](https://github.com/docker/docker-py) Python module.
|
||||
- [docker-compose](https://pypi.org/project/docker-compose/) Python module.
|
||||
+ This also installs the `docker` Python module, which is incompatible with `docker-py`. If you have previously installed `docker-py`, please uninstall it.
|
||||
- [Docker Compose](https://docs.docker.com/compose/install/).
|
||||
|
||||
### Pre-build steps
|
||||
@@ -440,13 +444,17 @@ Before starting the build process, review the [inventory](./installer/inventory)
|
||||
|
||||
> Provide a port number that can be mapped from the Docker daemon host to the web server running inside the AWX container. Defaults to *80*.
|
||||
|
||||
*host_port_ssl*
|
||||
|
||||
> Provide a port number that can be mapped from the Docker daemon host to the web server running inside the AWX container for SSL support. Defaults to *443*, only works if you also set `ssl_certificate` (see below).
|
||||
|
||||
*ssl_certificate*
|
||||
|
||||
> Optionally, provide the path to a file that contains a certificate and its private key.
|
||||
|
||||
*docker_compose_dir*
|
||||
|
||||
When using docker-compose, the `docker-compose.yml` file will be created there (default `/tmp/awxcompose`).
|
||||
> When using docker-compose, the `docker-compose.yml` file will be created there (default `/tmp/awxcompose`).
|
||||
|
||||
*ca_trust_dir*
|
||||
|
||||
|
||||
60
Makefile
60
Makefile
@@ -60,7 +60,7 @@ I18N_FLAG_FILE = .i18n_built
|
||||
|
||||
.PHONY: awx-link clean clean-tmp clean-venv requirements requirements_dev \
|
||||
develop refresh adduser migrate dbchange dbshell runserver \
|
||||
receiver test test_unit test_ansible test_coverage coverage_html \
|
||||
receiver test test_unit test_coverage coverage_html \
|
||||
dev_build release_build release_clean sdist \
|
||||
ui-docker-machine ui-docker ui-release ui-devel \
|
||||
ui-test ui-deps ui-test-ci VERSION
|
||||
@@ -218,7 +218,7 @@ init:
|
||||
if [ "$(AWX_GROUP_QUEUES)" == "tower,thepentagon" ]; then \
|
||||
$(MANAGEMENT_COMMAND) provision_instance --hostname=isolated; \
|
||||
$(MANAGEMENT_COMMAND) register_queue --queuename='thepentagon' --hostnames=isolated --controller=tower; \
|
||||
$(MANAGEMENT_COMMAND) generate_isolated_key > /awx_devel/awx/main/expect/authorized_keys; \
|
||||
$(MANAGEMENT_COMMAND) generate_isolated_key > /awx_devel/awx/main/isolated/authorized_keys; \
|
||||
fi;
|
||||
|
||||
# Refresh development environment after pulling new code.
|
||||
@@ -269,15 +269,7 @@ supervisor:
|
||||
@if [ "$(VENV_BASE)" ]; then \
|
||||
. $(VENV_BASE)/awx/bin/activate; \
|
||||
fi; \
|
||||
supervisord --pidfile=/tmp/supervisor_pid
|
||||
|
||||
# Alternate approach to tmux to run all development tasks specified in
|
||||
# Procfile.
|
||||
honcho:
|
||||
@if [ "$(VENV_BASE)" ]; then \
|
||||
. $(VENV_BASE)/awx/bin/activate; \
|
||||
fi; \
|
||||
honcho start -f tools/docker-compose/Procfile
|
||||
supervisord --pidfile=/tmp/supervisor_pid -n
|
||||
|
||||
collectstatic:
|
||||
@if [ "$(VENV_BASE)" ]; then \
|
||||
@@ -289,7 +281,7 @@ uwsgi: collectstatic
|
||||
@if [ "$(VENV_BASE)" ]; then \
|
||||
. $(VENV_BASE)/awx/bin/activate; \
|
||||
fi; \
|
||||
uwsgi -b 32768 --socket 127.0.0.1:8050 --module=awx.wsgi:application --home=/venv/awx --chdir=/awx_devel/ --vacuum --processes=5 --harakiri=120 --master --no-orphans --py-autoreload 1 --max-requests=1000 --stats /tmp/stats.socket --lazy-apps --logformat "%(addr) %(method) %(uri) - %(proto) %(status)" --hook-accepting1-once="exec:awx-manage run_dispatcher --reload"
|
||||
uwsgi -b 32768 --socket 127.0.0.1:8050 --module=awx.wsgi:application --home=/venv/awx --chdir=/awx_devel/ --vacuum --processes=5 --harakiri=120 --master --no-orphans --py-autoreload 1 --max-requests=1000 --stats /tmp/stats.socket --lazy-apps --logformat "%(addr) %(method) %(uri) - %(proto) %(status)" --hook-accepting1="exec:supervisorctl restart tower-processes:awx-dispatcher tower-processes:awx-receiver"
|
||||
|
||||
daphne:
|
||||
@if [ "$(VENV_BASE)" ]; then \
|
||||
@@ -353,7 +345,8 @@ pylint: reports
|
||||
@(set -o pipefail && $@ | reports/$@.report)
|
||||
|
||||
genschema: reports
|
||||
$(MAKE) swagger PYTEST_ARGS="--genschema"
|
||||
$(MAKE) swagger PYTEST_ARGS="--genschema --create-db "
|
||||
mv swagger.json schema.json
|
||||
|
||||
swagger: reports
|
||||
@if [ "$(VENV_BASE)" ]; then \
|
||||
@@ -378,20 +371,12 @@ test:
|
||||
PYTHONDONTWRITEBYTECODE=1 py.test -p no:cacheprovider -n auto $(TEST_DIRS)
|
||||
awx-manage check_migrations --dry-run --check -n 'vNNN_missing_migration_file'
|
||||
|
||||
test_combined: test_ansible test
|
||||
|
||||
test_unit:
|
||||
@if [ "$(VENV_BASE)" ]; then \
|
||||
. $(VENV_BASE)/awx/bin/activate; \
|
||||
fi; \
|
||||
py.test awx/main/tests/unit awx/conf/tests/unit awx/sso/tests/unit
|
||||
|
||||
test_ansible:
|
||||
@if [ "$(VENV_BASE)" ]; then \
|
||||
. $(VENV_BASE)/ansible/bin/activate; \
|
||||
fi; \
|
||||
py.test awx/lib/tests -c awx/lib/tests/pytest.ini
|
||||
|
||||
# Run all API unit tests with coverage enabled.
|
||||
test_coverage:
|
||||
@if [ "$(VENV_BASE)" ]; then \
|
||||
@@ -512,6 +497,10 @@ ui-devel: $(UI_DEPS_FLAG_FILE)
|
||||
ui-test: $(UI_DEPS_FLAG_FILE)
|
||||
$(NPM_BIN) --prefix awx/ui run test
|
||||
|
||||
ui-lint: $(UI_DEPS_FLAG_FILE)
|
||||
$(NPM_BIN) run --prefix awx/ui jshint
|
||||
$(NPM_BIN) run --prefix awx/ui lint
|
||||
|
||||
# A standard go-to target for API developers to use building the frontend
|
||||
ui: clean-ui ui-devel
|
||||
|
||||
@@ -562,21 +551,22 @@ docker-auth:
|
||||
fi;
|
||||
|
||||
# Docker isolated rampart
|
||||
docker-isolated:
|
||||
TAG=$(COMPOSE_TAG) DEV_DOCKER_TAG_BASE=$(DEV_DOCKER_TAG_BASE) docker-compose -f tools/docker-compose.yml -f tools/docker-isolated-override.yml create
|
||||
docker start tools_awx_1
|
||||
docker start tools_isolated_1
|
||||
docker-compose-isolated:
|
||||
CURRENT_UID=$(shell id -u) TAG=$(COMPOSE_TAG) DEV_DOCKER_TAG_BASE=$(DEV_DOCKER_TAG_BASE) docker-compose -f tools/docker-compose.yml -f tools/docker-isolated-override.yml up
|
||||
|
||||
# Docker Compose Development environment
|
||||
docker-compose: docker-auth
|
||||
CURRENT_UID=$(shell id -u) TAG=$(COMPOSE_TAG) DEV_DOCKER_TAG_BASE=$(DEV_DOCKER_TAG_BASE) docker-compose -f tools/docker-compose.yml up --no-recreate awx
|
||||
CURRENT_UID=$(shell id -u) OS="$(shell docker info | grep 'Operating System')" TAG=$(COMPOSE_TAG) DEV_DOCKER_TAG_BASE=$(DEV_DOCKER_TAG_BASE) docker-compose -f tools/docker-compose.yml up --no-recreate awx
|
||||
|
||||
docker-compose-cluster: docker-auth
|
||||
CURRENT_UID=$(shell id -u) TAG=$(COMPOSE_TAG) DEV_DOCKER_TAG_BASE=$(DEV_DOCKER_TAG_BASE) docker-compose -f tools/docker-compose-cluster.yml up
|
||||
|
||||
docker-compose-credential-plugins: docker-auth
|
||||
echo -e "\033[0;31mTo generate a CyberArk Conjur API key: docker exec -it tools_conjur_1 conjurctl account create quick-start\033[0m"
|
||||
CURRENT_UID=$(shell id -u) TAG=$(COMPOSE_TAG) DEV_DOCKER_TAG_BASE=$(DEV_DOCKER_TAG_BASE) docker-compose -f tools/docker-compose.yml -f tools/docker-credential-plugins-override.yml up --no-recreate awx
|
||||
|
||||
docker-compose-test: docker-auth
|
||||
cd tools && CURRENT_UID=$(shell id -u) TAG=$(COMPOSE_TAG) DEV_DOCKER_TAG_BASE=$(DEV_DOCKER_TAG_BASE) docker-compose run --rm --service-ports awx /bin/bash
|
||||
cd tools && CURRENT_UID=$(shell id -u) OS="$(shell docker info | grep 'Operating System')" TAG=$(COMPOSE_TAG) DEV_DOCKER_TAG_BASE=$(DEV_DOCKER_TAG_BASE) docker-compose run --rm --service-ports awx /bin/bash
|
||||
|
||||
docker-compose-runtest:
|
||||
cd tools && CURRENT_UID=$(shell id -u) TAG=$(COMPOSE_TAG) DEV_DOCKER_TAG_BASE=$(DEV_DOCKER_TAG_BASE) docker-compose run --rm --service-ports awx /start_tests.sh
|
||||
@@ -584,12 +574,7 @@ docker-compose-runtest:
|
||||
docker-compose-build-swagger:
|
||||
cd tools && CURRENT_UID=$(shell id -u) TAG=$(COMPOSE_TAG) DEV_DOCKER_TAG_BASE=$(DEV_DOCKER_TAG_BASE) docker-compose run --rm --service-ports awx /start_tests.sh swagger
|
||||
|
||||
docker-compose-genschema:
|
||||
cd tools && CURRENT_UID=$(shell id -u) TAG=$(COMPOSE_TAG) DEV_DOCKER_TAG_BASE=$(DEV_DOCKER_TAG_BASE) docker-compose run --rm --service-ports awx /start_tests.sh genschema
|
||||
mv swagger.json schema.json
|
||||
|
||||
docker-compose-detect-schema-change:
|
||||
$(MAKE) docker-compose-genschema
|
||||
detect-schema-change: genschema
|
||||
curl https://s3.amazonaws.com/awx-public-ci-files/schema.json -o reference-schema.json
|
||||
# Ignore differences in whitespace with -b
|
||||
diff -u -b reference-schema.json schema.json
|
||||
@@ -602,12 +587,14 @@ docker-compose-build: awx-devel-build
|
||||
|
||||
# Base development image build
|
||||
awx-devel-build:
|
||||
docker build -t ansible/awx_devel -f tools/docker-compose/Dockerfile .
|
||||
docker build -t ansible/awx_devel -f tools/docker-compose/Dockerfile \
|
||||
--cache-from=$(DEV_DOCKER_TAG_BASE)/awx_devel:devel \
|
||||
--cache-from=$(DEV_DOCKER_TAG_BASE)/awx_devel:$(COMPOSE_TAG) .
|
||||
docker tag ansible/awx_devel $(DEV_DOCKER_TAG_BASE)/awx_devel:$(COMPOSE_TAG)
|
||||
#docker push $(DEV_DOCKER_TAG_BASE)/awx_devel:$(COMPOSE_TAG)
|
||||
|
||||
# For use when developing on "isolated" AWX deployments
|
||||
awx-isolated-build:
|
||||
docker-compose-isolated-build: awx-devel-build
|
||||
docker build -t ansible/awx_isolated -f tools/docker-isolated/Dockerfile .
|
||||
docker tag ansible/awx_isolated $(DEV_DOCKER_TAG_BASE)/awx_isolated:$(COMPOSE_TAG)
|
||||
#docker push $(DEV_DOCKER_TAG_BASE)/awx_isolated:$(COMPOSE_TAG)
|
||||
@@ -627,6 +614,9 @@ docker-compose-elk: docker-auth
|
||||
docker-compose-cluster-elk: docker-auth
|
||||
TAG=$(COMPOSE_TAG) DEV_DOCKER_TAG_BASE=$(DEV_DOCKER_TAG_BASE) docker-compose -f tools/docker-compose-cluster.yml -f tools/elastic/docker-compose.logstash-link-cluster.yml -f tools/elastic/docker-compose.elastic-override.yml up --no-recreate
|
||||
|
||||
prometheus:
|
||||
docker run -u0 --net=tools_default --link=`docker ps | egrep -o "tools_awx(_run)?_([^ ]+)?"`:awxweb --volume `pwd`/tools/prometheus:/prometheus --name prometheus -d -p 0.0.0.0:9090:9090 prom/prometheus --web.enable-lifecycle --config.file=/prometheus/prometheus.yml
|
||||
|
||||
minishift-dev:
|
||||
ansible-playbook -i localhost, -e devtree_directory=$(CURDIR) tools/clusterdevel/start_minishift_dev.yml
|
||||
|
||||
|
||||
@@ -37,9 +37,9 @@ if HAS_DJANGO is True:
|
||||
# This line exists to make sure we don't regress on FIPS support if we
|
||||
# upgrade Django; if you're upgrading Django and see this error,
|
||||
# update the version check below, and confirm that FIPS still works.
|
||||
if django.__version__ != '1.11.16':
|
||||
raise RuntimeError("Django version other than 1.11.16 detected {}. \
|
||||
Subclassing BaseDatabaseSchemaEditor is known to work for Django 1.11.16 \
|
||||
if django.__version__ != '1.11.20':
|
||||
raise RuntimeError("Django version other than 1.11.20 detected {}. \
|
||||
Subclassing BaseDatabaseSchemaEditor is known to work for Django 1.11.20 \
|
||||
and may not work in newer Django versions.".format(django.__version__))
|
||||
|
||||
|
||||
|
||||
@@ -24,20 +24,6 @@ from rest_framework.filters import BaseFilterBackend
|
||||
# AWX
|
||||
from awx.main.utils import get_type_for_model, to_python_boolean
|
||||
from awx.main.utils.db import get_all_field_names
|
||||
from awx.main.models.credential import CredentialType
|
||||
|
||||
|
||||
class V1CredentialFilterBackend(BaseFilterBackend):
|
||||
'''
|
||||
For /api/v1/ requests, filter out v2 (custom) credentials
|
||||
'''
|
||||
|
||||
def filter_queryset(self, request, queryset, view):
|
||||
# TODO: remove in 3.3
|
||||
from awx.api.versioning import get_request_version
|
||||
if get_request_version(request) == 1:
|
||||
queryset = queryset.filter(credential_type__managed_by_tower=True)
|
||||
return queryset
|
||||
|
||||
|
||||
class TypeFilterBackend(BaseFilterBackend):
|
||||
@@ -223,7 +209,7 @@ class FieldLookupBackend(BaseFilterBackend):
|
||||
raise ValueError('%s is not searchable' % new_lookup[:-8])
|
||||
new_lookups = []
|
||||
for rm_field in related_model._meta.fields:
|
||||
if rm_field.name in ('username', 'first_name', 'last_name', 'email', 'name', 'description'):
|
||||
if rm_field.name in ('username', 'first_name', 'last_name', 'email', 'name', 'description', 'playbook'):
|
||||
new_lookups.append('{}__{}__icontains'.format(new_lookup[:-8], rm_field.name))
|
||||
return value, new_lookups
|
||||
else:
|
||||
@@ -292,39 +278,6 @@ class FieldLookupBackend(BaseFilterBackend):
|
||||
key = key[5:]
|
||||
q_not = True
|
||||
|
||||
# Make legacy v1 Job/Template fields work for backwards compatability
|
||||
# TODO: remove after API v1 deprecation period
|
||||
if queryset.model._meta.object_name in ('JobTemplate', 'Job') and key in (
|
||||
'credential', 'vault_credential', 'cloud_credential', 'network_credential'
|
||||
) or queryset.model._meta.object_name in ('InventorySource', 'InventoryUpdate') and key == 'credential':
|
||||
key = 'credentials'
|
||||
|
||||
# Make legacy v1 Credential fields work for backwards compatability
|
||||
# TODO: remove after API v1 deprecation period
|
||||
#
|
||||
# convert v1 `Credential.kind` queries to `Credential.credential_type__pk`
|
||||
if queryset.model._meta.object_name == 'Credential' and key == 'kind':
|
||||
key = key.replace('kind', 'credential_type')
|
||||
|
||||
if 'ssh' in values:
|
||||
# In 3.2, SSH and Vault became separate credential types, but in the v1 API,
|
||||
# they're both still "kind=ssh"
|
||||
# under the hood, convert `/api/v1/credentials/?kind=ssh` to
|
||||
# `/api/v1/credentials/?or__credential_type=<ssh_pk>&or__credential_type=<vault_pk>`
|
||||
values = set(values)
|
||||
values.add('vault')
|
||||
values = list(values)
|
||||
q_or = True
|
||||
|
||||
for i, kind in enumerate(values):
|
||||
if kind == 'vault':
|
||||
type_ = CredentialType.objects.get(kind=kind)
|
||||
else:
|
||||
type_ = CredentialType.from_v1_kind(kind)
|
||||
if type_ is None:
|
||||
raise ParseError(_('cannot filter on kind %s') % kind)
|
||||
values[i] = type_.pk
|
||||
|
||||
# Convert value(s) to python and add to the appropriate list.
|
||||
for value in values:
|
||||
if q_int:
|
||||
@@ -402,6 +355,8 @@ class OrderByBackend(BaseFilterBackend):
|
||||
order_by = value.split(',')
|
||||
else:
|
||||
order_by = (value,)
|
||||
if order_by is None:
|
||||
order_by = self.get_default_ordering(view)
|
||||
if order_by:
|
||||
order_by = self._validate_ordering_fields(queryset.model, order_by)
|
||||
|
||||
@@ -428,6 +383,12 @@ class OrderByBackend(BaseFilterBackend):
|
||||
# Return a 400 for invalid field names.
|
||||
raise ParseError(*e.args)
|
||||
|
||||
def get_default_ordering(self, view):
|
||||
ordering = getattr(view, 'ordering', None)
|
||||
if isinstance(ordering, str):
|
||||
return (ordering,)
|
||||
return ordering
|
||||
|
||||
def _validate_ordering_fields(self, model, order_by):
|
||||
for field_name in order_by:
|
||||
# strip off the negation prefix `-` if it exists
|
||||
|
||||
@@ -34,7 +34,7 @@ from rest_framework.negotiation import DefaultContentNegotiation
|
||||
# AWX
|
||||
from awx.api.filters import FieldLookupBackend
|
||||
from awx.main.models import (
|
||||
UnifiedJob, UnifiedJobTemplate, User, Role
|
||||
UnifiedJob, UnifiedJobTemplate, User, Role, Credential
|
||||
)
|
||||
from awx.main.access import access_registry
|
||||
from awx.main.utils import (
|
||||
@@ -46,7 +46,7 @@ from awx.main.utils import (
|
||||
)
|
||||
from awx.main.utils.db import get_all_field_names
|
||||
from awx.api.serializers import ResourceAccessListElementSerializer, CopySerializer, UserSerializer
|
||||
from awx.api.versioning import URLPathVersioning, get_request_version
|
||||
from awx.api.versioning import URLPathVersioning
|
||||
from awx.api.metadata import SublistAttachDetatchMetadata, Metadata
|
||||
|
||||
__all__ = ['APIView', 'GenericAPIView', 'ListAPIView', 'SimpleListAPIView',
|
||||
@@ -119,39 +119,12 @@ class LoggedLogoutView(auth_views.LogoutView):
|
||||
return ret
|
||||
|
||||
|
||||
def get_view_name(cls, suffix=None):
|
||||
'''
|
||||
Wrapper around REST framework get_view_name() to support get_name() method
|
||||
and view_name property on a view class.
|
||||
'''
|
||||
name = ''
|
||||
if hasattr(cls, 'get_name') and callable(cls.get_name):
|
||||
name = cls().get_name()
|
||||
elif hasattr(cls, 'view_name'):
|
||||
if callable(cls.view_name):
|
||||
name = cls.view_name()
|
||||
else:
|
||||
name = cls.view_name
|
||||
if name:
|
||||
return ('%s %s' % (name, suffix)) if suffix else name
|
||||
return views.get_view_name(cls, suffix=None)
|
||||
def get_view_description(view, html=False):
|
||||
'''Wrapper around REST framework get_view_description() to continue
|
||||
to support our historical div.
|
||||
|
||||
|
||||
def get_view_description(cls, request, html=False):
|
||||
'''
|
||||
Wrapper around REST framework get_view_description() to support
|
||||
get_description() method and view_description property on a view class.
|
||||
'''
|
||||
if hasattr(cls, 'get_description') and callable(cls.get_description):
|
||||
desc = cls().get_description(request, html=html)
|
||||
cls = type(cls.__name__, (object,), {'__doc__': desc})
|
||||
elif hasattr(cls, 'view_description'):
|
||||
if callable(cls.view_description):
|
||||
view_desc = cls.view_description()
|
||||
else:
|
||||
view_desc = cls.view_description
|
||||
cls = type(cls.__name__, (object,), {'__doc__': view_desc})
|
||||
desc = views.get_view_description(cls, html=html)
|
||||
desc = views.get_view_description(view, html=html)
|
||||
if html:
|
||||
desc = '<div class="description">%s</div>' % desc
|
||||
return mark_safe(desc)
|
||||
@@ -264,14 +237,6 @@ class APIView(views.APIView):
|
||||
# `curl https://user:pass@tower.example.org/api/v2/job_templates/N/launch/`
|
||||
return 'Bearer realm=api authorization_url=/api/o/authorize/'
|
||||
|
||||
def get_view_description(self, html=False):
|
||||
"""
|
||||
Return some descriptive text for the view, as used in OPTIONS responses
|
||||
and in the browsable API.
|
||||
"""
|
||||
func = self.settings.VIEW_DESCRIPTION_FUNCTION
|
||||
return func(self.__class__, getattr(self, '_request', None), html)
|
||||
|
||||
def get_description_context(self):
|
||||
return {
|
||||
'view': self,
|
||||
@@ -280,20 +245,14 @@ class APIView(views.APIView):
|
||||
'swagger_method': getattr(self.request, 'swagger_method', None),
|
||||
}
|
||||
|
||||
def get_description(self, request, html=False):
|
||||
self.request = request
|
||||
@property
|
||||
def description(self):
|
||||
template_list = []
|
||||
for klass in inspect.getmro(type(self)):
|
||||
template_basename = camelcase_to_underscore(klass.__name__)
|
||||
template_list.append('api/%s.md' % template_basename)
|
||||
context = self.get_description_context()
|
||||
|
||||
# "v2" -> 2
|
||||
default_version = int(settings.REST_FRAMEWORK['DEFAULT_VERSION'].lstrip('v'))
|
||||
request_version = get_request_version(self.request)
|
||||
if request_version is not None and request_version < default_version:
|
||||
context['deprecated'] = True
|
||||
|
||||
description = render_to_string(template_list, context)
|
||||
if context.get('deprecated') and context.get('swagger_method') is None:
|
||||
# render deprecation messages at the very top
|
||||
@@ -389,12 +348,14 @@ class GenericAPIView(generics.GenericAPIView, APIView):
|
||||
'model_verbose_name_plural': smart_text(self.model._meta.verbose_name_plural),
|
||||
})
|
||||
serializer = self.get_serializer()
|
||||
metadata = self.metadata_class()
|
||||
metadata.request = self.request
|
||||
for method, key in [
|
||||
('GET', 'serializer_fields'),
|
||||
('POST', 'serializer_create_fields'),
|
||||
('PUT', 'serializer_update_fields')
|
||||
]:
|
||||
d[key] = self.metadata_class().get_serializer_info(serializer, method=method)
|
||||
d[key] = metadata.get_serializer_info(serializer, method=method)
|
||||
d['settings'] = settings
|
||||
return d
|
||||
|
||||
@@ -815,6 +776,7 @@ class RetrieveUpdateDestroyAPIView(RetrieveUpdateAPIView, DestroyAPIView):
|
||||
class ResourceAccessList(ParentMixin, ListAPIView):
|
||||
|
||||
serializer_class = ResourceAccessListElementSerializer
|
||||
ordering = ('username',)
|
||||
|
||||
def get_queryset(self):
|
||||
obj = self.get_parent_object()
|
||||
@@ -841,10 +803,6 @@ class CopyAPIView(GenericAPIView):
|
||||
new_in_330 = True
|
||||
new_in_api_v2 = True
|
||||
|
||||
def v1_not_allowed(self):
|
||||
return Response({'detail': 'Action only possible starting with v2 API.'},
|
||||
status=status.HTTP_404_NOT_FOUND)
|
||||
|
||||
def _get_copy_return_serializer(self, *args, **kwargs):
|
||||
if not self.copy_return_serializer_class:
|
||||
return self.get_serializer(*args, **kwargs)
|
||||
@@ -858,15 +816,15 @@ class CopyAPIView(GenericAPIView):
|
||||
def _decrypt_model_field_if_needed(obj, field_name, field_val):
|
||||
if field_name in getattr(type(obj), 'REENCRYPTION_BLACKLIST_AT_COPY', []):
|
||||
return field_val
|
||||
if isinstance(field_val, dict):
|
||||
if isinstance(obj, Credential) and field_name == 'inputs':
|
||||
for secret in obj.credential_type.secret_fields:
|
||||
if secret in field_val:
|
||||
field_val[secret] = decrypt_field(obj, secret)
|
||||
elif isinstance(field_val, dict):
|
||||
for sub_field in field_val:
|
||||
if isinstance(sub_field, str) \
|
||||
and isinstance(field_val[sub_field], str):
|
||||
try:
|
||||
field_val[sub_field] = decrypt_field(obj, field_name, sub_field)
|
||||
except AttributeError:
|
||||
# Catching the corner case with v1 credential fields
|
||||
field_val[sub_field] = decrypt_field(obj, sub_field)
|
||||
field_val[sub_field] = decrypt_field(obj, field_name, sub_field)
|
||||
elif isinstance(field_val, str):
|
||||
try:
|
||||
field_val = decrypt_field(obj, field_name)
|
||||
@@ -951,21 +909,20 @@ class CopyAPIView(GenericAPIView):
|
||||
return ret
|
||||
|
||||
def get(self, request, *args, **kwargs):
|
||||
if get_request_version(request) < 2:
|
||||
return self.v1_not_allowed()
|
||||
obj = self.get_object()
|
||||
if not request.user.can_access(obj.__class__, 'read', obj):
|
||||
raise PermissionDenied()
|
||||
create_kwargs = self._build_create_dict(obj)
|
||||
for key in create_kwargs:
|
||||
create_kwargs[key] = getattr(create_kwargs[key], 'pk', None) or create_kwargs[key]
|
||||
can_copy = request.user.can_access(self.model, 'add', create_kwargs) and \
|
||||
request.user.can_access(self.model, 'copy_related', obj)
|
||||
try:
|
||||
can_copy = request.user.can_access(self.model, 'add', create_kwargs) and \
|
||||
request.user.can_access(self.model, 'copy_related', obj)
|
||||
except PermissionDenied:
|
||||
return Response({'can_copy': False})
|
||||
return Response({'can_copy': can_copy})
|
||||
|
||||
def post(self, request, *args, **kwargs):
|
||||
if get_request_version(request) < 2:
|
||||
return self.v1_not_allowed()
|
||||
obj = self.get_object()
|
||||
create_kwargs = self._build_create_dict(obj)
|
||||
create_kwargs_check = {}
|
||||
@@ -982,7 +939,7 @@ class CopyAPIView(GenericAPIView):
|
||||
None, None, self.model, obj, request.user, create_kwargs=create_kwargs,
|
||||
copy_name=serializer.validated_data.get('name', '')
|
||||
)
|
||||
if hasattr(new_obj, 'admin_role') and request.user not in new_obj.admin_role:
|
||||
if hasattr(new_obj, 'admin_role') and request.user not in new_obj.admin_role.members.all():
|
||||
new_obj.admin_role.members.add(request.user)
|
||||
if sub_objs:
|
||||
permission_check_func = None
|
||||
|
||||
@@ -232,19 +232,6 @@ class RoleMetadata(Metadata):
|
||||
return metadata
|
||||
|
||||
|
||||
# TODO: Tower 3.3 remove class and all uses in views.py when API v1 is removed
|
||||
class JobTypeMetadata(Metadata):
|
||||
def get_field_info(self, field):
|
||||
res = super(JobTypeMetadata, self).get_field_info(field)
|
||||
|
||||
if field.field_name == 'job_type':
|
||||
res['choices'] = [
|
||||
choice for choice in res['choices']
|
||||
if choice[0] != 'scan'
|
||||
]
|
||||
return res
|
||||
|
||||
|
||||
class SublistAttachDetatchMetadata(Metadata):
|
||||
|
||||
def determine_actions(self, request, view):
|
||||
|
||||
15
awx/api/metrics.py
Normal file
15
awx/api/metrics.py
Normal file
@@ -0,0 +1,15 @@
|
||||
# Copyright (c) 2017 Ansible, Inc.
|
||||
# All Rights Reserved.
|
||||
|
||||
from django.conf.urls import url
|
||||
|
||||
from awx.api.views import (
|
||||
MetricsView
|
||||
)
|
||||
|
||||
|
||||
urls = [
|
||||
url(r'^$', MetricsView.as_view(), name='metrics_view'),
|
||||
]
|
||||
|
||||
__all__ = ['urls']
|
||||
@@ -18,7 +18,7 @@ class Pagination(pagination.PageNumberPagination):
|
||||
url = self.request and self.request.get_full_path() or ''
|
||||
url = url.encode('utf-8')
|
||||
page_number = self.page.next_page_number()
|
||||
return replace_query_param(url, self.page_query_param, page_number)
|
||||
return replace_query_param(self.cap_page_size(url), self.page_query_param, page_number)
|
||||
|
||||
def get_previous_link(self):
|
||||
if not self.page.has_previous():
|
||||
@@ -26,4 +26,16 @@ class Pagination(pagination.PageNumberPagination):
|
||||
url = self.request and self.request.get_full_path() or ''
|
||||
url = url.encode('utf-8')
|
||||
page_number = self.page.previous_page_number()
|
||||
return replace_query_param(url, self.page_query_param, page_number)
|
||||
return replace_query_param(self.cap_page_size(url), self.page_query_param, page_number)
|
||||
|
||||
def cap_page_size(self, url):
|
||||
if int(self.request.query_params.get(self.page_size_query_param, 0)) > self.max_page_size:
|
||||
url = replace_query_param(url, self.page_size_query_param, self.max_page_size)
|
||||
return url
|
||||
|
||||
def get_html_context(self):
|
||||
context = super().get_html_context()
|
||||
context['page_links'] = [pl._replace(url=self.cap_page_size(pl.url))
|
||||
for pl in context['page_links']]
|
||||
|
||||
return context
|
||||
|
||||
@@ -15,7 +15,7 @@ from awx.main.utils import get_object_or_400
|
||||
|
||||
logger = logging.getLogger('awx.api.permissions')
|
||||
|
||||
__all__ = ['ModelAccessPermission', 'JobTemplateCallbackPermission',
|
||||
__all__ = ['ModelAccessPermission', 'JobTemplateCallbackPermission', 'VariableDataPermission',
|
||||
'TaskPermission', 'ProjectUpdatePermission', 'InventoryInventorySourcesUpdatePermission',
|
||||
'UserPermission', 'IsSuperUser', 'InstanceGroupTowerPermission',]
|
||||
|
||||
@@ -74,12 +74,8 @@ class ModelAccessPermission(permissions.BasePermission):
|
||||
# FIXME: For some reason this needs to return True
|
||||
# because it is first called with obj=None?
|
||||
return True
|
||||
if getattr(view, 'is_variable_data', False):
|
||||
return check_user_access(request.user, view.model, 'change', obj,
|
||||
dict(variables=request.data))
|
||||
else:
|
||||
return check_user_access(request.user, view.model, 'change', obj,
|
||||
request.data)
|
||||
return check_user_access(request.user, view.model, 'change', obj,
|
||||
request.data)
|
||||
|
||||
def check_patch_permissions(self, request, view, obj=None):
|
||||
return self.check_put_permissions(request, view, obj)
|
||||
@@ -103,8 +99,7 @@ class ModelAccessPermission(permissions.BasePermission):
|
||||
return False
|
||||
|
||||
# Always allow superusers
|
||||
if getattr(view, 'always_allow_superuser', True) and request.user.is_superuser \
|
||||
and not hasattr(request.user, 'oauth_scopes'):
|
||||
if getattr(view, 'always_allow_superuser', True) and request.user.is_superuser:
|
||||
return True
|
||||
|
||||
# Check if view supports the request method before checking permission
|
||||
@@ -164,6 +159,15 @@ class JobTemplateCallbackPermission(ModelAccessPermission):
|
||||
return True
|
||||
|
||||
|
||||
class VariableDataPermission(ModelAccessPermission):
|
||||
|
||||
def check_put_permissions(self, request, view, obj=None):
|
||||
if not obj:
|
||||
return True
|
||||
return check_user_access(request.user, view.model, 'change', obj,
|
||||
dict(variables=request.data))
|
||||
|
||||
|
||||
class TaskPermission(ModelAccessPermission):
|
||||
'''
|
||||
Permission checks used for API callbacks from running a task.
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -53,7 +53,6 @@ class AutoSchema(DRFAuthSchema):
|
||||
return link
|
||||
|
||||
def get_description(self, path, method):
|
||||
self.view._request = self.view.request
|
||||
setattr(self.view.request, 'swagger_method', method)
|
||||
description = super(AutoSchema, self).get_description(path, method)
|
||||
return description
|
||||
|
||||
@@ -5,7 +5,7 @@ The following lists the expected format and details of our rrules:
|
||||
* INTERVAL is required
|
||||
* SECONDLY is not supported
|
||||
* TZID is not supported
|
||||
* RRULE must preceed the rule statements
|
||||
* RRULE must precede the rule statements
|
||||
* BYDAY is supported but not BYDAY with a numerical prefix
|
||||
* BYYEARDAY and BYWEEKNO are not supported
|
||||
* Only one rrule statement per schedule is supported
|
||||
|
||||
@@ -29,17 +29,6 @@ to the redirect_uri specified in the application. The client application will th
|
||||
AWX will respond with the `access_token`, `token_type`, `refresh_token`, and `expires_in`. For more
|
||||
information on testing this flow, refer to [django-oauth-toolkit](http://django-oauth-toolkit.readthedocs.io/en/latest/tutorial/tutorial_01.html#test-your-authorization-server).
|
||||
|
||||
## Create Token for an Application using Implicit grant type
|
||||
Suppose we have an application "admin's app" of grant type `implicit`.
|
||||
In API browser, first make sure the user is logged in via session auth, then visit authorization
|
||||
endpoint with given parameters:
|
||||
```text
|
||||
http://localhost:8013/api/o/authorize/?response_type=token&client_id=L0uQQWW8pKX51hoqIRQGsuqmIdPi2AcXZ9EJRGmj&scope=read
|
||||
```
|
||||
Here the value of `client_id` should be the same as that of `client_id` field of underlying application.
|
||||
On success, an authorization page should be displayed asking the logged in user to grant/deny the access token.
|
||||
Once the user clicks on 'grant', the API browser will try POSTing to the same endpoint with the same parameters
|
||||
in POST body, on success a 302 redirect will be returned.
|
||||
|
||||
## Create Token for an Application using Password grant type
|
||||
|
||||
|
||||
@@ -8,15 +8,15 @@ job template.
|
||||
|
||||
For example, using curl:
|
||||
|
||||
curl -H "Content-Type: application/json" -d '{"host_config_key": "HOST_CONFIG_KEY"}' http://server/api/v1/job_templates/N/callback/
|
||||
curl -H "Content-Type: application/json" -d '{"host_config_key": "HOST_CONFIG_KEY"}' http://server/api/v2/job_templates/N/callback/
|
||||
|
||||
Or using wget:
|
||||
|
||||
wget -O /dev/null --post-data='{"host_config_key": "HOST_CONFIG_KEY"}' --header=Content-Type:application/json http://server/api/v1/job_templates/N/callback/
|
||||
wget -O /dev/null --post-data='{"host_config_key": "HOST_CONFIG_KEY"}' --header=Content-Type:application/json http://server/api/v2/job_templates/N/callback/
|
||||
|
||||
You may also pass `extra_vars` to the callback:
|
||||
|
||||
curl -H "Content-Type: application/json" -d '{"host_config_key": "HOST_CONFIG_KEY", "extra_vars": {"key": "value"}}' http://server/api/v1/job_templates/N/callback/
|
||||
curl -H "Content-Type: application/json" -d '{"host_config_key": "HOST_CONFIG_KEY", "extra_vars": {"key": "value"}}' http://server/api/v2/job_templates/N/callback/
|
||||
|
||||
The response will return status 202 if the request is valid, 403 for an
|
||||
invalid host config key, or 400 if the host cannot be determined from the
|
||||
@@ -30,7 +30,7 @@ A GET request may be used to verify that the correct host will be selected.
|
||||
This request must authenticate as a valid user with permission to edit the
|
||||
job template. For example:
|
||||
|
||||
curl http://user:password@server/api/v1/job_templates/N/callback/
|
||||
curl http://user:password@server/api/v2/job_templates/N/callback/
|
||||
|
||||
The response will include the host config key as well as the host name(s)
|
||||
that would match the request:
|
||||
|
||||
@@ -3,7 +3,7 @@ Launch a Job Template:
|
||||
Make a POST request to this resource to launch the system job template.
|
||||
|
||||
Variables specified inside of the parameter `extra_vars` are passed to the
|
||||
system job task as command line parameters. These tasks can be ran manually
|
||||
system job task as command line parameters. These tasks can be run manually
|
||||
on the host system via the `awx-manage` command.
|
||||
|
||||
For example on `cleanup_jobs` and `cleanup_activitystream`:
|
||||
|
||||
@@ -6,4 +6,4 @@ One result should be returned containing the following fields:
|
||||
|
||||
{% include "api/_result_fields_common.md" %}
|
||||
|
||||
Use the primary URL for the user (/api/v1/users/N/) to modify the user.
|
||||
Use the primary URL for the user (/api/v2/users/N/) to modify the user.
|
||||
|
||||
@@ -4,4 +4,7 @@
|
||||
from __future__ import absolute_import, unicode_literals
|
||||
from .urls import urlpatterns
|
||||
|
||||
__all__ = ['urlpatterns']
|
||||
__all__ = ['urlpatterns', 'app_name']
|
||||
|
||||
|
||||
app_name = 'api'
|
||||
|
||||
@@ -12,6 +12,8 @@ from awx.api.views import (
|
||||
CredentialOwnerUsersList,
|
||||
CredentialOwnerTeamsList,
|
||||
CredentialCopy,
|
||||
CredentialInputSourceSubList,
|
||||
CredentialExternalTest,
|
||||
)
|
||||
|
||||
|
||||
@@ -24,6 +26,8 @@ urls = [
|
||||
url(r'^(?P<pk>[0-9]+)/owner_users/$', CredentialOwnerUsersList.as_view(), name='credential_owner_users_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/owner_teams/$', CredentialOwnerTeamsList.as_view(), name='credential_owner_teams_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/copy/$', CredentialCopy.as_view(), name='credential_copy'),
|
||||
url(r'^(?P<pk>[0-9]+)/input_sources/$', CredentialInputSourceSubList.as_view(), name='credential_input_source_sublist'),
|
||||
url(r'^(?P<pk>[0-9]+)/test/$', CredentialExternalTest.as_view(), name='credential_external_test'),
|
||||
]
|
||||
|
||||
__all__ = ['urls']
|
||||
|
||||
17
awx/api/urls/credential_input_source.py
Normal file
17
awx/api/urls/credential_input_source.py
Normal file
@@ -0,0 +1,17 @@
|
||||
# Copyright (c) 2019 Ansible, Inc.
|
||||
# All Rights Reserved.
|
||||
|
||||
from django.conf.urls import url
|
||||
|
||||
from awx.api.views import (
|
||||
CredentialInputSourceDetail,
|
||||
CredentialInputSourceList,
|
||||
)
|
||||
|
||||
|
||||
urls = [
|
||||
url(r'^$', CredentialInputSourceList.as_view(), name='credential_input_source_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/$', CredentialInputSourceDetail.as_view(), name='credential_input_source_detail'),
|
||||
]
|
||||
|
||||
__all__ = ['urls']
|
||||
@@ -8,6 +8,7 @@ from awx.api.views import (
|
||||
CredentialTypeDetail,
|
||||
CredentialTypeCredentialList,
|
||||
CredentialTypeActivityStreamList,
|
||||
CredentialTypeExternalTest,
|
||||
)
|
||||
|
||||
|
||||
@@ -16,6 +17,7 @@ urls = [
|
||||
url(r'^(?P<pk>[0-9]+)/$', CredentialTypeDetail.as_view(), name='credential_type_detail'),
|
||||
url(r'^(?P<pk>[0-9]+)/credentials/$', CredentialTypeCredentialList.as_view(), name='credential_type_credential_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/activity_stream/$', CredentialTypeActivityStreamList.as_view(), name='credential_type_activity_stream_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/test/$', CredentialTypeExternalTest.as_view(), name='credential_type_external_test'),
|
||||
]
|
||||
|
||||
__all__ = ['urls']
|
||||
|
||||
@@ -16,8 +16,6 @@ from awx.api.views import (
|
||||
HostSmartInventoriesList,
|
||||
HostAdHocCommandsList,
|
||||
HostAdHocCommandEventsList,
|
||||
HostFactVersionsList,
|
||||
HostFactCompareView,
|
||||
HostInsights,
|
||||
)
|
||||
|
||||
@@ -35,8 +33,6 @@ urls = [
|
||||
url(r'^(?P<pk>[0-9]+)/smart_inventories/$', HostSmartInventoriesList.as_view(), name='host_smart_inventories_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/ad_hoc_commands/$', HostAdHocCommandsList.as_view(), name='host_ad_hoc_commands_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/ad_hoc_command_events/$', HostAdHocCommandEventsList.as_view(), name='host_ad_hoc_command_events_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/fact_versions/$', HostFactVersionsList.as_view(), name='host_fact_versions_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/fact_view/$', HostFactCompareView.as_view(), name='host_fact_compare_view'),
|
||||
url(r'^(?P<pk>[0-9]+)/insights/$', HostInsights.as_view(), name='host_insights'),
|
||||
]
|
||||
|
||||
|
||||
@@ -13,8 +13,8 @@ from awx.api.views import (
|
||||
InventorySourceCredentialsList,
|
||||
InventorySourceGroupsList,
|
||||
InventorySourceHostsList,
|
||||
InventorySourceNotificationTemplatesAnyList,
|
||||
InventorySourceNotificationTemplatesErrorList,
|
||||
InventorySourceNotificationTemplatesStartedList,
|
||||
InventorySourceNotificationTemplatesSuccessList,
|
||||
)
|
||||
|
||||
@@ -29,8 +29,8 @@ urls = [
|
||||
url(r'^(?P<pk>[0-9]+)/credentials/$', InventorySourceCredentialsList.as_view(), name='inventory_source_credentials_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/groups/$', InventorySourceGroupsList.as_view(), name='inventory_source_groups_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/hosts/$', InventorySourceHostsList.as_view(), name='inventory_source_hosts_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/notification_templates_any/$', InventorySourceNotificationTemplatesAnyList.as_view(),
|
||||
name='inventory_source_notification_templates_any_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/notification_templates_started/$', InventorySourceNotificationTemplatesStartedList.as_view(),
|
||||
name='inventory_source_notification_templates_started_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/notification_templates_error/$', InventorySourceNotificationTemplatesErrorList.as_view(),
|
||||
name='inventory_source_notification_templates_error_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/notification_templates_success/$', InventorySourceNotificationTemplatesSuccessList.as_view(),
|
||||
|
||||
@@ -6,7 +6,6 @@ from django.conf.urls import url
|
||||
from awx.api.views import (
|
||||
JobList,
|
||||
JobDetail,
|
||||
JobStart,
|
||||
JobCancel,
|
||||
JobRelaunch,
|
||||
JobCreateSchedule,
|
||||
@@ -23,7 +22,6 @@ from awx.api.views import (
|
||||
urls = [
|
||||
url(r'^$', JobList.as_view(), name='job_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/$', JobDetail.as_view(), name='job_detail'),
|
||||
url(r'^(?P<pk>[0-9]+)/start/$', JobStart.as_view(), name='job_start'), # Todo: Remove In 3.3
|
||||
url(r'^(?P<pk>[0-9]+)/cancel/$', JobCancel.as_view(), name='job_cancel'),
|
||||
url(r'^(?P<pk>[0-9]+)/relaunch/$', JobRelaunch.as_view(), name='job_relaunch'),
|
||||
url(r'^(?P<pk>[0-9]+)/create_schedule/$', JobCreateSchedule.as_view(), name='job_create_schedule'),
|
||||
|
||||
@@ -13,8 +13,8 @@ from awx.api.views import (
|
||||
JobTemplateSchedulesList,
|
||||
JobTemplateSurveySpec,
|
||||
JobTemplateActivityStreamList,
|
||||
JobTemplateNotificationTemplatesAnyList,
|
||||
JobTemplateNotificationTemplatesErrorList,
|
||||
JobTemplateNotificationTemplatesStartedList,
|
||||
JobTemplateNotificationTemplatesSuccessList,
|
||||
JobTemplateInstanceGroupsList,
|
||||
JobTemplateAccessList,
|
||||
@@ -34,8 +34,8 @@ urls = [
|
||||
url(r'^(?P<pk>[0-9]+)/schedules/$', JobTemplateSchedulesList.as_view(), name='job_template_schedules_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/survey_spec/$', JobTemplateSurveySpec.as_view(), name='job_template_survey_spec'),
|
||||
url(r'^(?P<pk>[0-9]+)/activity_stream/$', JobTemplateActivityStreamList.as_view(), name='job_template_activity_stream_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/notification_templates_any/$', JobTemplateNotificationTemplatesAnyList.as_view(),
|
||||
name='job_template_notification_templates_any_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/notification_templates_started/$', JobTemplateNotificationTemplatesStartedList.as_view(),
|
||||
name='job_template_notification_templates_started_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/notification_templates_error/$', JobTemplateNotificationTemplatesErrorList.as_view(),
|
||||
name='job_template_notification_templates_error_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/notification_templates_success/$', JobTemplateNotificationTemplatesSuccessList.as_view(),
|
||||
|
||||
@@ -15,8 +15,8 @@ from awx.api.views import (
|
||||
OrganizationCredentialList,
|
||||
OrganizationActivityStreamList,
|
||||
OrganizationNotificationTemplatesList,
|
||||
OrganizationNotificationTemplatesAnyList,
|
||||
OrganizationNotificationTemplatesErrorList,
|
||||
OrganizationNotificationTemplatesStartedList,
|
||||
OrganizationNotificationTemplatesSuccessList,
|
||||
OrganizationInstanceGroupsList,
|
||||
OrganizationObjectRolesList,
|
||||
@@ -25,7 +25,7 @@ from awx.api.views import (
|
||||
)
|
||||
|
||||
|
||||
urls = [
|
||||
urls = [
|
||||
url(r'^$', OrganizationList.as_view(), name='organization_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/$', OrganizationDetail.as_view(), name='organization_detail'),
|
||||
url(r'^(?P<pk>[0-9]+)/users/$', OrganizationUsersList.as_view(), name='organization_users_list'),
|
||||
@@ -37,8 +37,8 @@ urls = [
|
||||
url(r'^(?P<pk>[0-9]+)/credentials/$', OrganizationCredentialList.as_view(), name='organization_credential_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/activity_stream/$', OrganizationActivityStreamList.as_view(), name='organization_activity_stream_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/notification_templates/$', OrganizationNotificationTemplatesList.as_view(), name='organization_notification_templates_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/notification_templates_any/$', OrganizationNotificationTemplatesAnyList.as_view(),
|
||||
name='organization_notification_templates_any_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/notification_templates_started/$', OrganizationNotificationTemplatesStartedList.as_view(),
|
||||
name='organization_notification_templates_started_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/notification_templates_error/$', OrganizationNotificationTemplatesErrorList.as_view(),
|
||||
name='organization_notification_templates_error_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/notification_templates_success/$', OrganizationNotificationTemplatesSuccessList.as_view(),
|
||||
|
||||
@@ -14,8 +14,8 @@ from awx.api.views import (
|
||||
ProjectUpdatesList,
|
||||
ProjectActivityStreamList,
|
||||
ProjectSchedulesList,
|
||||
ProjectNotificationTemplatesAnyList,
|
||||
ProjectNotificationTemplatesErrorList,
|
||||
ProjectNotificationTemplatesStartedList,
|
||||
ProjectNotificationTemplatesSuccessList,
|
||||
ProjectObjectRolesList,
|
||||
ProjectAccessList,
|
||||
@@ -34,10 +34,11 @@ urls = [
|
||||
url(r'^(?P<pk>[0-9]+)/project_updates/$', ProjectUpdatesList.as_view(), name='project_updates_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/activity_stream/$', ProjectActivityStreamList.as_view(), name='project_activity_stream_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/schedules/$', ProjectSchedulesList.as_view(), name='project_schedules_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/notification_templates_any/$', ProjectNotificationTemplatesAnyList.as_view(), name='project_notification_templates_any_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/notification_templates_error/$', ProjectNotificationTemplatesErrorList.as_view(), name='project_notification_templates_error_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/notification_templates_success/$', ProjectNotificationTemplatesSuccessList.as_view(),
|
||||
name='project_notification_templates_success_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/notification_templates_started/$', ProjectNotificationTemplatesStartedList.as_view(),
|
||||
name='project_notification_templates_started_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/object_roles/$', ProjectObjectRolesList.as_view(), name='project_object_roles_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/access_list/$', ProjectAccessList.as_view(), name='project_access_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/copy/$', ProjectCopy.as_view(), name='project_copy'),
|
||||
|
||||
@@ -9,8 +9,8 @@ from awx.api.views import (
|
||||
SystemJobTemplateLaunch,
|
||||
SystemJobTemplateJobsList,
|
||||
SystemJobTemplateSchedulesList,
|
||||
SystemJobTemplateNotificationTemplatesAnyList,
|
||||
SystemJobTemplateNotificationTemplatesErrorList,
|
||||
SystemJobTemplateNotificationTemplatesStartedList,
|
||||
SystemJobTemplateNotificationTemplatesSuccessList,
|
||||
)
|
||||
|
||||
@@ -21,8 +21,8 @@ urls = [
|
||||
url(r'^(?P<pk>[0-9]+)/launch/$', SystemJobTemplateLaunch.as_view(), name='system_job_template_launch'),
|
||||
url(r'^(?P<pk>[0-9]+)/jobs/$', SystemJobTemplateJobsList.as_view(), name='system_job_template_jobs_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/schedules/$', SystemJobTemplateSchedulesList.as_view(), name='system_job_template_schedules_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/notification_templates_any/$', SystemJobTemplateNotificationTemplatesAnyList.as_view(),
|
||||
name='system_job_template_notification_templates_any_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/notification_templates_started/$', SystemJobTemplateNotificationTemplatesStartedList.as_view(),
|
||||
name='system_job_template_notification_templates_started_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/notification_templates_error/$', SystemJobTemplateNotificationTemplatesErrorList.as_view(),
|
||||
name='system_job_template_notification_templates_error_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/notification_templates_success/$', SystemJobTemplateNotificationTemplatesSuccessList.as_view(),
|
||||
|
||||
@@ -11,10 +11,9 @@ from awx.api.generics import (
|
||||
)
|
||||
from awx.api.views import (
|
||||
ApiRootView,
|
||||
ApiV1RootView,
|
||||
ApiV2RootView,
|
||||
ApiV1PingView,
|
||||
ApiV1ConfigView,
|
||||
ApiV2PingView,
|
||||
ApiV2ConfigView,
|
||||
AuthView,
|
||||
UserMeList,
|
||||
DashboardView,
|
||||
@@ -34,6 +33,8 @@ from awx.api.views import (
|
||||
OAuth2ApplicationDetail,
|
||||
)
|
||||
|
||||
from awx.api.views.metrics import MetricsView
|
||||
|
||||
from .organization import urls as organization_urls
|
||||
from .user import urls as user_urls
|
||||
from .project import urls as project_urls
|
||||
@@ -47,6 +48,7 @@ from .inventory_update import urls as inventory_update_urls
|
||||
from .inventory_script import urls as inventory_script_urls
|
||||
from .credential_type import urls as credential_type_urls
|
||||
from .credential import urls as credential_urls
|
||||
from .credential_input_source import urls as credential_input_source_urls
|
||||
from .role import urls as role_urls
|
||||
from .job_template import urls as job_template_urls
|
||||
from .job import urls as job_urls
|
||||
@@ -71,10 +73,25 @@ from .oauth2 import urls as oauth2_urls
|
||||
from .oauth2_root import urls as oauth2_root_urls
|
||||
|
||||
|
||||
v1_urls = [
|
||||
url(r'^$', ApiV1RootView.as_view(), name='api_v1_root_view'),
|
||||
url(r'^ping/$', ApiV1PingView.as_view(), name='api_v1_ping_view'),
|
||||
url(r'^config/$', ApiV1ConfigView.as_view(), name='api_v1_config_view'),
|
||||
v2_urls = [
|
||||
url(r'^$', ApiV2RootView.as_view(), name='api_v2_root_view'),
|
||||
url(r'^credential_types/', include(credential_type_urls)),
|
||||
url(r'^credential_input_sources/', include(credential_input_source_urls)),
|
||||
url(r'^hosts/(?P<pk>[0-9]+)/ansible_facts/$', HostAnsibleFactsDetail.as_view(), name='host_ansible_facts_detail'),
|
||||
url(r'^jobs/(?P<pk>[0-9]+)/extra_credentials/$', JobExtraCredentialsList.as_view(), name='job_extra_credentials_list'),
|
||||
url(r'^jobs/(?P<pk>[0-9]+)/credentials/$', JobCredentialsList.as_view(), name='job_credentials_list'),
|
||||
url(r'^job_templates/(?P<pk>[0-9]+)/extra_credentials/$', JobTemplateExtraCredentialsList.as_view(), name='job_template_extra_credentials_list'),
|
||||
url(r'^job_templates/(?P<pk>[0-9]+)/credentials/$', JobTemplateCredentialsList.as_view(), name='job_template_credentials_list'),
|
||||
url(r'^schedules/preview/$', SchedulePreview.as_view(), name='schedule_rrule'),
|
||||
url(r'^schedules/zoneinfo/$', ScheduleZoneInfo.as_view(), name='schedule_zoneinfo'),
|
||||
url(r'^applications/$', OAuth2ApplicationList.as_view(), name='o_auth2_application_list'),
|
||||
url(r'^applications/(?P<pk>[0-9]+)/$', OAuth2ApplicationDetail.as_view(), name='o_auth2_application_detail'),
|
||||
url(r'^applications/(?P<pk>[0-9]+)/tokens/$', ApplicationOAuth2TokenList.as_view(), name='application_o_auth2_token_list'),
|
||||
url(r'^tokens/$', OAuth2TokenList.as_view(), name='o_auth2_token_list'),
|
||||
url(r'^', include(oauth2_urls)),
|
||||
url(r'^metrics/$', MetricsView.as_view(), name='metrics_view'),
|
||||
url(r'^ping/$', ApiV2PingView.as_view(), name='api_v2_ping_view'),
|
||||
url(r'^config/$', ApiV2ConfigView.as_view(), name='api_v2_config_view'),
|
||||
url(r'^auth/$', AuthView.as_view()),
|
||||
url(r'^me/$', UserMeList.as_view(), name='user_me_list'),
|
||||
url(r'^dashboard/$', DashboardView.as_view(), name='dashboard_view'),
|
||||
@@ -116,28 +133,10 @@ v1_urls = [
|
||||
url(r'^activity_stream/', include(activity_stream_urls)),
|
||||
]
|
||||
|
||||
v2_urls = [
|
||||
url(r'^$', ApiV2RootView.as_view(), name='api_v2_root_view'),
|
||||
url(r'^credential_types/', include(credential_type_urls)),
|
||||
url(r'^hosts/(?P<pk>[0-9]+)/ansible_facts/$', HostAnsibleFactsDetail.as_view(), name='host_ansible_facts_detail'),
|
||||
url(r'^jobs/(?P<pk>[0-9]+)/extra_credentials/$', JobExtraCredentialsList.as_view(), name='job_extra_credentials_list'),
|
||||
url(r'^jobs/(?P<pk>[0-9]+)/credentials/$', JobCredentialsList.as_view(), name='job_credentials_list'),
|
||||
url(r'^job_templates/(?P<pk>[0-9]+)/extra_credentials/$', JobTemplateExtraCredentialsList.as_view(), name='job_template_extra_credentials_list'),
|
||||
url(r'^job_templates/(?P<pk>[0-9]+)/credentials/$', JobTemplateCredentialsList.as_view(), name='job_template_credentials_list'),
|
||||
url(r'^schedules/preview/$', SchedulePreview.as_view(), name='schedule_rrule'),
|
||||
url(r'^schedules/zoneinfo/$', ScheduleZoneInfo.as_view(), name='schedule_zoneinfo'),
|
||||
url(r'^applications/$', OAuth2ApplicationList.as_view(), name='o_auth2_application_list'),
|
||||
url(r'^applications/(?P<pk>[0-9]+)/$', OAuth2ApplicationDetail.as_view(), name='o_auth2_application_detail'),
|
||||
url(r'^applications/(?P<pk>[0-9]+)/tokens/$', ApplicationOAuth2TokenList.as_view(), name='application_o_auth2_token_list'),
|
||||
url(r'^tokens/$', OAuth2TokenList.as_view(), name='o_auth2_token_list'),
|
||||
url(r'^', include(oauth2_urls)),
|
||||
]
|
||||
|
||||
app_name = 'api'
|
||||
urlpatterns = [
|
||||
url(r'^$', ApiRootView.as_view(), name='api_root_view'),
|
||||
url(r'^(?P<version>(v2))/', include(v2_urls)),
|
||||
url(r'^(?P<version>(v1|v2))/', include(v1_urls)),
|
||||
url(r'^login/$', LoggedLoginView.as_view(
|
||||
template_name='rest_framework/login.html',
|
||||
extra_context={'inside_login_context': True}
|
||||
|
||||
@@ -13,8 +13,8 @@ from awx.api.views import (
|
||||
WorkflowJobTemplateSurveySpec,
|
||||
WorkflowJobTemplateWorkflowNodesList,
|
||||
WorkflowJobTemplateActivityStreamList,
|
||||
WorkflowJobTemplateNotificationTemplatesAnyList,
|
||||
WorkflowJobTemplateNotificationTemplatesErrorList,
|
||||
WorkflowJobTemplateNotificationTemplatesStartedList,
|
||||
WorkflowJobTemplateNotificationTemplatesSuccessList,
|
||||
WorkflowJobTemplateAccessList,
|
||||
WorkflowJobTemplateObjectRolesList,
|
||||
@@ -32,8 +32,8 @@ urls = [
|
||||
url(r'^(?P<pk>[0-9]+)/survey_spec/$', WorkflowJobTemplateSurveySpec.as_view(), name='workflow_job_template_survey_spec'),
|
||||
url(r'^(?P<pk>[0-9]+)/workflow_nodes/$', WorkflowJobTemplateWorkflowNodesList.as_view(), name='workflow_job_template_workflow_nodes_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/activity_stream/$', WorkflowJobTemplateActivityStreamList.as_view(), name='workflow_job_template_activity_stream_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/notification_templates_any/$', WorkflowJobTemplateNotificationTemplatesAnyList.as_view(),
|
||||
name='workflow_job_template_notification_templates_any_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/notification_templates_started/$', WorkflowJobTemplateNotificationTemplatesStartedList.as_view(),
|
||||
name='workflow_job_template_notification_templates_started_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/notification_templates_error/$', WorkflowJobTemplateNotificationTemplatesErrorList.as_view(),
|
||||
name='workflow_job_template_notification_templates_error_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/notification_templates_success/$', WorkflowJobTemplateNotificationTemplatesSuccessList.as_view(),
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
# All Rights Reserved.
|
||||
|
||||
from django.conf import settings
|
||||
from django.core.urlresolvers import NoReverseMatch
|
||||
from django.urls import NoReverseMatch
|
||||
|
||||
from rest_framework.reverse import _reverse
|
||||
from rest_framework.versioning import URLPathVersioning as BaseVersioning
|
||||
@@ -27,19 +27,6 @@ def drf_reverse(viewname, args=None, kwargs=None, request=None, format=None, **e
|
||||
return url
|
||||
|
||||
|
||||
def get_request_version(request):
|
||||
"""
|
||||
The API version of a request as an integer i.e., 1 or 2
|
||||
"""
|
||||
version = settings.REST_FRAMEWORK['DEFAULT_VERSION']
|
||||
if request and hasattr(request, 'version'):
|
||||
version = request.version
|
||||
if version is None:
|
||||
# For requests to /api/
|
||||
return None
|
||||
return int(version.lstrip('v'))
|
||||
|
||||
|
||||
def reverse(viewname, args=None, kwargs=None, request=None, format=None, **extra):
|
||||
if request is None or getattr(request, 'version', None) is None:
|
||||
# We need the "current request" to determine the correct version to
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -44,11 +44,9 @@ from awx.api.serializers import (
|
||||
InstanceGroupSerializer,
|
||||
InventoryUpdateEventSerializer,
|
||||
CustomInventoryScriptSerializer,
|
||||
InventoryDetailSerializer,
|
||||
JobTemplateSerializer,
|
||||
)
|
||||
from awx.api.views.mixin import (
|
||||
ActivityStreamEnforcementMixin,
|
||||
RelatedJobsPreventDeleteMixin,
|
||||
ControlledByScmMixin,
|
||||
)
|
||||
@@ -62,7 +60,7 @@ class InventoryUpdateEventsList(SubListAPIView):
|
||||
serializer_class = InventoryUpdateEventSerializer
|
||||
parent_model = InventoryUpdate
|
||||
relationship = 'inventory_update_events'
|
||||
view_name = _('Inventory Update Events List')
|
||||
name = _('Inventory Update Events List')
|
||||
search_fields = ('stdout',)
|
||||
|
||||
def finalize_response(self, request, response, *args, **kwargs):
|
||||
@@ -116,17 +114,11 @@ class InventoryList(ListCreateAPIView):
|
||||
model = Inventory
|
||||
serializer_class = InventorySerializer
|
||||
|
||||
def get_queryset(self):
|
||||
qs = Inventory.accessible_objects(self.request.user, 'read_role')
|
||||
qs = qs.select_related('admin_role', 'read_role', 'update_role', 'use_role', 'adhoc_role')
|
||||
qs = qs.prefetch_related('created_by', 'modified_by', 'organization')
|
||||
return qs
|
||||
|
||||
|
||||
class InventoryDetail(RelatedJobsPreventDeleteMixin, ControlledByScmMixin, RetrieveUpdateDestroyAPIView):
|
||||
|
||||
model = Inventory
|
||||
serializer_class = InventoryDetailSerializer
|
||||
serializer_class = InventorySerializer
|
||||
|
||||
def update(self, request, *args, **kwargs):
|
||||
obj = self.get_object()
|
||||
@@ -149,7 +141,7 @@ class InventoryDetail(RelatedJobsPreventDeleteMixin, ControlledByScmMixin, Retri
|
||||
return Response(dict(error=_("{0}".format(e))), status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
|
||||
class InventoryActivityStreamList(ActivityStreamEnforcementMixin, SubListAPIView):
|
||||
class InventoryActivityStreamList(SubListAPIView):
|
||||
|
||||
model = ActivityStream
|
||||
serializer_class = ActivityStreamSerializer
|
||||
|
||||
40
awx/api/views/metrics.py
Normal file
40
awx/api/views/metrics.py
Normal file
@@ -0,0 +1,40 @@
|
||||
# Copyright (c) 2018 Red Hat, Inc.
|
||||
# All Rights Reserved.
|
||||
|
||||
# Python
|
||||
import logging
|
||||
|
||||
# Django
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
|
||||
# Django REST Framework
|
||||
from rest_framework.response import Response
|
||||
from rest_framework.exceptions import PermissionDenied
|
||||
|
||||
|
||||
# AWX
|
||||
# from awx.main.analytics import collectors
|
||||
from awx.main.analytics.metrics import metrics
|
||||
from awx.api import renderers
|
||||
|
||||
from awx.api.generics import (
|
||||
APIView,
|
||||
)
|
||||
|
||||
|
||||
logger = logging.getLogger('awx.main.analytics')
|
||||
|
||||
|
||||
class MetricsView(APIView):
|
||||
|
||||
name = _('Metrics')
|
||||
swagger_topic = 'Metrics'
|
||||
|
||||
renderer_classes = [renderers.PlainTextRenderer,
|
||||
renderers.BrowsableAPIRenderer,]
|
||||
|
||||
def get(self, request, format='txt'):
|
||||
''' Show Metrics Details '''
|
||||
if (request.user.is_superuser or request.user.is_system_auditor):
|
||||
return Response(metrics().decode('UTF-8'))
|
||||
raise PermissionDenied()
|
||||
@@ -31,48 +31,11 @@ from awx.main.models.organization import Team
|
||||
from awx.main.models.projects import Project
|
||||
from awx.main.models.inventory import Inventory
|
||||
from awx.main.models.jobs import JobTemplate
|
||||
from awx.conf.license import (
|
||||
feature_enabled,
|
||||
LicenseForbids,
|
||||
)
|
||||
from awx.api.exceptions import ActiveJobConflict
|
||||
|
||||
logger = logging.getLogger('awx.api.views.mixin')
|
||||
|
||||
|
||||
class ActivityStreamEnforcementMixin(object):
|
||||
'''
|
||||
Mixin to check that license supports activity streams.
|
||||
'''
|
||||
def check_permissions(self, request):
|
||||
ret = super(ActivityStreamEnforcementMixin, self).check_permissions(request)
|
||||
if not feature_enabled('activity_streams'):
|
||||
raise LicenseForbids(_('Your license does not allow use of the activity stream.'))
|
||||
return ret
|
||||
|
||||
|
||||
class SystemTrackingEnforcementMixin(object):
|
||||
'''
|
||||
Mixin to check that license supports system tracking.
|
||||
'''
|
||||
def check_permissions(self, request):
|
||||
ret = super(SystemTrackingEnforcementMixin, self).check_permissions(request)
|
||||
if not feature_enabled('system_tracking'):
|
||||
raise LicenseForbids(_('Your license does not permit use of system tracking.'))
|
||||
return ret
|
||||
|
||||
|
||||
class WorkflowsEnforcementMixin(object):
|
||||
'''
|
||||
Mixin to check that license supports workflows.
|
||||
'''
|
||||
def check_permissions(self, request):
|
||||
ret = super(WorkflowsEnforcementMixin, self).check_permissions(request)
|
||||
if not feature_enabled('workflows') and request.method not in ('GET', 'OPTIONS', 'DELETE'):
|
||||
raise LicenseForbids(_('Your license does not allow use of workflows.'))
|
||||
return ret
|
||||
|
||||
|
||||
class UnifiedJobDeletionMixin(object):
|
||||
'''
|
||||
Special handling when deleting a running unified job object.
|
||||
|
||||
@@ -7,13 +7,8 @@ import logging
|
||||
# Django
|
||||
from django.db.models import Count
|
||||
from django.contrib.contenttypes.models import ContentType
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
|
||||
# AWX
|
||||
from awx.conf.license import (
|
||||
feature_enabled,
|
||||
LicenseForbids,
|
||||
)
|
||||
from awx.main.models import (
|
||||
ActivityStream,
|
||||
Inventory,
|
||||
@@ -50,7 +45,6 @@ from awx.api.serializers import (
|
||||
InstanceGroupSerializer,
|
||||
)
|
||||
from awx.api.views.mixin import (
|
||||
ActivityStreamEnforcementMixin,
|
||||
RelatedJobsPreventDeleteMixin,
|
||||
OrganizationCountsMixin,
|
||||
)
|
||||
@@ -69,24 +63,6 @@ class OrganizationList(OrganizationCountsMixin, ListCreateAPIView):
|
||||
qs = qs.prefetch_related('created_by', 'modified_by')
|
||||
return qs
|
||||
|
||||
def create(self, request, *args, **kwargs):
|
||||
"""Create a new organzation.
|
||||
|
||||
If there is already an organization and the license of this
|
||||
instance does not permit multiple organizations, then raise
|
||||
LicenseForbids.
|
||||
"""
|
||||
# Sanity check: If the multiple organizations feature is disallowed
|
||||
# by the license, then we are only willing to create this organization
|
||||
# if no organizations exist in the system.
|
||||
if (not feature_enabled('multiple_organizations') and
|
||||
self.model.objects.exists()):
|
||||
raise LicenseForbids(_('Your license only permits a single '
|
||||
'organization to exist.'))
|
||||
|
||||
# Okay, create the organization as usual.
|
||||
return super(OrganizationList, self).create(request, *args, **kwargs)
|
||||
|
||||
|
||||
class OrganizationDetail(RelatedJobsPreventDeleteMixin, RetrieveUpdateDestroyAPIView):
|
||||
|
||||
@@ -140,6 +116,7 @@ class OrganizationUsersList(BaseUsersList):
|
||||
serializer_class = UserSerializer
|
||||
parent_model = Organization
|
||||
relationship = 'member_role.members'
|
||||
ordering = ('username',)
|
||||
|
||||
|
||||
class OrganizationAdminsList(BaseUsersList):
|
||||
@@ -148,6 +125,7 @@ class OrganizationAdminsList(BaseUsersList):
|
||||
serializer_class = UserSerializer
|
||||
parent_model = Organization
|
||||
relationship = 'admin_role.members'
|
||||
ordering = ('username',)
|
||||
|
||||
|
||||
class OrganizationProjectsList(SubListCreateAttachDetachAPIView):
|
||||
@@ -177,7 +155,7 @@ class OrganizationTeamsList(SubListCreateAttachDetachAPIView):
|
||||
parent_key = 'organization'
|
||||
|
||||
|
||||
class OrganizationActivityStreamList(ActivityStreamEnforcementMixin, SubListAPIView):
|
||||
class OrganizationActivityStreamList(SubListAPIView):
|
||||
|
||||
model = ActivityStream
|
||||
serializer_class = ActivityStreamSerializer
|
||||
@@ -200,22 +178,20 @@ class OrganizationNotificationTemplatesAnyList(SubListCreateAttachDetachAPIView)
|
||||
model = NotificationTemplate
|
||||
serializer_class = NotificationTemplateSerializer
|
||||
parent_model = Organization
|
||||
relationship = 'notification_templates_any'
|
||||
|
||||
|
||||
class OrganizationNotificationTemplatesErrorList(SubListCreateAttachDetachAPIView):
|
||||
class OrganizationNotificationTemplatesStartedList(OrganizationNotificationTemplatesAnyList):
|
||||
|
||||
relationship = 'notification_templates_started'
|
||||
|
||||
|
||||
class OrganizationNotificationTemplatesErrorList(OrganizationNotificationTemplatesAnyList):
|
||||
|
||||
model = NotificationTemplate
|
||||
serializer_class = NotificationTemplateSerializer
|
||||
parent_model = Organization
|
||||
relationship = 'notification_templates_error'
|
||||
|
||||
|
||||
class OrganizationNotificationTemplatesSuccessList(SubListCreateAttachDetachAPIView):
|
||||
class OrganizationNotificationTemplatesSuccessList(OrganizationNotificationTemplatesAnyList):
|
||||
|
||||
model = NotificationTemplate
|
||||
serializer_class = NotificationTemplateSerializer
|
||||
parent_model = Organization
|
||||
relationship = 'notification_templates_success'
|
||||
|
||||
|
||||
@@ -244,4 +220,3 @@ class OrganizationObjectRolesList(SubListAPIView):
|
||||
po = self.get_parent_object()
|
||||
content_type = ContentType.objects.get_for_model(self.parent_model)
|
||||
return Role.objects.filter(content_type=content_type, object_id=po.pk)
|
||||
|
||||
|
||||
@@ -25,8 +25,8 @@ from awx.main.utils import (
|
||||
get_custom_venv_choices,
|
||||
to_python_boolean,
|
||||
)
|
||||
from awx.api.versioning import reverse, get_request_version, drf_reverse
|
||||
from awx.conf.license import get_license, feature_enabled
|
||||
from awx.api.versioning import reverse, drf_reverse
|
||||
from awx.conf.license import get_license
|
||||
from awx.main.constants import PRIVILEGE_ESCALATION_METHODS
|
||||
from awx.main.models import (
|
||||
Project,
|
||||
@@ -42,7 +42,7 @@ logger = logging.getLogger('awx.api.views.root')
|
||||
class ApiRootView(APIView):
|
||||
|
||||
permission_classes = (AllowAny,)
|
||||
view_name = _('REST API')
|
||||
name = _('REST API')
|
||||
versioning_class = None
|
||||
swagger_topic = 'Versioning'
|
||||
|
||||
@@ -50,23 +50,21 @@ class ApiRootView(APIView):
|
||||
def get(self, request, format=None):
|
||||
''' List supported API versions '''
|
||||
|
||||
v1 = reverse('api:api_v1_root_view', kwargs={'version': 'v1'})
|
||||
v2 = reverse('api:api_v2_root_view', kwargs={'version': 'v2'})
|
||||
data = OrderedDict()
|
||||
data['description'] = _('AWX REST API')
|
||||
data['current_version'] = v2
|
||||
data['available_versions'] = dict(v1 = v1, v2 = v2)
|
||||
data['available_versions'] = dict(v2 = v2)
|
||||
data['oauth2'] = drf_reverse('api:oauth_authorization_root_view')
|
||||
if feature_enabled('rebranding'):
|
||||
data['custom_logo'] = settings.CUSTOM_LOGO
|
||||
data['custom_login_info'] = settings.CUSTOM_LOGIN_INFO
|
||||
data['custom_logo'] = settings.CUSTOM_LOGO
|
||||
data['custom_login_info'] = settings.CUSTOM_LOGIN_INFO
|
||||
return Response(data)
|
||||
|
||||
|
||||
class ApiOAuthAuthorizationRootView(APIView):
|
||||
|
||||
permission_classes = (AllowAny,)
|
||||
view_name = _("API OAuth 2 Authorization Root")
|
||||
name = _("API OAuth 2 Authorization Root")
|
||||
versioning_class = None
|
||||
swagger_topic = 'Authentication'
|
||||
|
||||
@@ -86,10 +84,10 @@ class ApiVersionRootView(APIView):
|
||||
def get(self, request, format=None):
|
||||
''' List top level resources '''
|
||||
data = OrderedDict()
|
||||
data['ping'] = reverse('api:api_v1_ping_view', request=request)
|
||||
data['ping'] = reverse('api:api_v2_ping_view', request=request)
|
||||
data['instances'] = reverse('api:instance_list', request=request)
|
||||
data['instance_groups'] = reverse('api:instance_group_list', request=request)
|
||||
data['config'] = reverse('api:api_v1_config_view', request=request)
|
||||
data['config'] = reverse('api:api_v2_config_view', request=request)
|
||||
data['settings'] = reverse('api:setting_category_list', request=request)
|
||||
data['me'] = reverse('api:user_me_list', request=request)
|
||||
data['dashboard'] = reverse('api:dashboard_view', request=request)
|
||||
@@ -99,10 +97,11 @@ class ApiVersionRootView(APIView):
|
||||
data['project_updates'] = reverse('api:project_update_list', request=request)
|
||||
data['teams'] = reverse('api:team_list', request=request)
|
||||
data['credentials'] = reverse('api:credential_list', request=request)
|
||||
if get_request_version(request) > 1:
|
||||
data['credential_types'] = reverse('api:credential_type_list', request=request)
|
||||
data['applications'] = reverse('api:o_auth2_application_list', request=request)
|
||||
data['tokens'] = reverse('api:o_auth2_token_list', request=request)
|
||||
data['credential_types'] = reverse('api:credential_type_list', request=request)
|
||||
data['credential_input_sources'] = reverse('api:credential_input_source_list', request=request)
|
||||
data['applications'] = reverse('api:o_auth2_application_list', request=request)
|
||||
data['tokens'] = reverse('api:o_auth2_token_list', request=request)
|
||||
data['metrics'] = reverse('api:metrics_view', request=request)
|
||||
data['inventory'] = reverse('api:inventory_list', request=request)
|
||||
data['inventory_scripts'] = reverse('api:inventory_script_list', request=request)
|
||||
data['inventory_sources'] = reverse('api:inventory_source_list', request=request)
|
||||
@@ -130,21 +129,17 @@ class ApiVersionRootView(APIView):
|
||||
return Response(data)
|
||||
|
||||
|
||||
class ApiV1RootView(ApiVersionRootView):
|
||||
view_name = _('Version 1')
|
||||
|
||||
|
||||
class ApiV2RootView(ApiVersionRootView):
|
||||
view_name = _('Version 2')
|
||||
name = _('Version 2')
|
||||
|
||||
|
||||
class ApiV1PingView(APIView):
|
||||
class ApiV2PingView(APIView):
|
||||
"""A simple view that reports very basic information about this
|
||||
instance, which is acceptable to be public information.
|
||||
"""
|
||||
permission_classes = (AllowAny,)
|
||||
authentication_classes = ()
|
||||
view_name = _('Ping')
|
||||
name = _('Ping')
|
||||
swagger_topic = 'System Configuration'
|
||||
|
||||
def get(self, request, format=None):
|
||||
@@ -157,29 +152,30 @@ class ApiV1PingView(APIView):
|
||||
'ha': is_ha_environment(),
|
||||
'version': get_awx_version(),
|
||||
'active_node': settings.CLUSTER_HOST_ID,
|
||||
'install_uuid': settings.INSTALL_UUID,
|
||||
}
|
||||
|
||||
response['instances'] = []
|
||||
for instance in Instance.objects.all():
|
||||
response['instances'].append(dict(node=instance.hostname, heartbeat=instance.modified,
|
||||
response['instances'].append(dict(node=instance.hostname, uuid=instance.uuid, heartbeat=instance.modified,
|
||||
capacity=instance.capacity, version=instance.version))
|
||||
sorted(response['instances'], key=operator.itemgetter('node'))
|
||||
response['instance_groups'] = []
|
||||
for instance_group in InstanceGroup.objects.all():
|
||||
for instance_group in InstanceGroup.objects.prefetch_related('instances'):
|
||||
response['instance_groups'].append(dict(name=instance_group.name,
|
||||
capacity=instance_group.capacity,
|
||||
instances=[x.hostname for x in instance_group.instances.all()]))
|
||||
return Response(response)
|
||||
|
||||
|
||||
class ApiV1ConfigView(APIView):
|
||||
class ApiV2ConfigView(APIView):
|
||||
|
||||
permission_classes = (IsAuthenticated,)
|
||||
view_name = _('Configuration')
|
||||
name = _('Configuration')
|
||||
swagger_topic = 'System Configuration'
|
||||
|
||||
def check_permissions(self, request):
|
||||
super(ApiV1ConfigView, self).check_permissions(request)
|
||||
super(ApiV2ConfigView, self).check_permissions(request)
|
||||
if not request.user.is_superuser and request.method.lower() not in {'options', 'head', 'get'}:
|
||||
self.permission_denied(request) # Raises PermissionDenied exception.
|
||||
|
||||
@@ -211,7 +207,7 @@ class ApiV1ConfigView(APIView):
|
||||
# If LDAP is enabled, user_ldap_fields will return a list of field
|
||||
# names that are managed by LDAP and should be read-only for users with
|
||||
# a non-empty ldap_dn attribute.
|
||||
if getattr(settings, 'AUTH_LDAP_SERVER_URI', None) and feature_enabled('ldap'):
|
||||
if getattr(settings, 'AUTH_LDAP_SERVER_URI', None):
|
||||
user_ldap_fields = ['username', 'password']
|
||||
user_ldap_fields.extend(getattr(settings, 'AUTH_LDAP_USER_ATTR_MAP', {}).keys())
|
||||
user_ldap_fields.extend(getattr(settings, 'AUTH_LDAP_USER_FLAGS_BY_GROUP', {}).keys())
|
||||
@@ -220,7 +216,8 @@ class ApiV1ConfigView(APIView):
|
||||
if request.user.is_superuser \
|
||||
or request.user.is_system_auditor \
|
||||
or Organization.accessible_objects(request.user, 'admin_role').exists() \
|
||||
or Organization.accessible_objects(request.user, 'auditor_role').exists():
|
||||
or Organization.accessible_objects(request.user, 'auditor_role').exists() \
|
||||
or Organization.accessible_objects(request.user, 'project_admin_role').exists():
|
||||
data.update(dict(
|
||||
project_base_dir = settings.PROJECTS_ROOT,
|
||||
project_local_paths = Project.get_local_path_choices(),
|
||||
@@ -276,6 +273,3 @@ class ApiV1ConfigView(APIView):
|
||||
except Exception:
|
||||
# FIX: Log
|
||||
return Response({"error": _("Failed to remove license.")}, status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
|
||||
|
||||
|
||||
@@ -78,9 +78,6 @@ register(
|
||||
# the other settings change, the cached value for this setting will be
|
||||
# cleared to require it to be recomputed.
|
||||
depends_on=['ANSIBLE_COW_SELECTION'],
|
||||
# Optional; licensed feature required to be able to view or modify this
|
||||
# setting.
|
||||
feature_required='rebranding',
|
||||
# Optional; field is stored encrypted in the database and only $encrypted$
|
||||
# is returned via the API.
|
||||
encrypted=True,
|
||||
|
||||
@@ -1,64 +1,19 @@
|
||||
# Copyright (c) 2016 Ansible, Inc.
|
||||
# All Rights Reserved.
|
||||
|
||||
# Django
|
||||
from django.core.signals import setting_changed
|
||||
from django.dispatch import receiver
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
|
||||
# Django REST Framework
|
||||
from rest_framework.exceptions import APIException
|
||||
|
||||
# Tower
|
||||
from awx.main.utils.common import get_licenser
|
||||
from awx.main.utils import memoize, memoize_delete
|
||||
|
||||
__all__ = ['LicenseForbids', 'get_license', 'get_licensed_features',
|
||||
'feature_enabled', 'feature_exists']
|
||||
|
||||
|
||||
class LicenseForbids(APIException):
|
||||
status_code = 402
|
||||
default_detail = _('Your Tower license does not allow that.')
|
||||
__all__ = ['get_license']
|
||||
|
||||
|
||||
def _get_validated_license_data():
|
||||
return get_licenser().validate()
|
||||
|
||||
|
||||
@receiver(setting_changed)
|
||||
def _on_setting_changed(sender, **kwargs):
|
||||
# Clear cached result above when license changes.
|
||||
if kwargs.get('setting', None) == 'LICENSE':
|
||||
memoize_delete('feature_enabled')
|
||||
|
||||
|
||||
def get_license(show_key=False):
|
||||
"""Return a dictionary representing the active license on this Tower instance."""
|
||||
license_data = _get_validated_license_data()
|
||||
if not show_key:
|
||||
license_data.pop('license_key', None)
|
||||
return license_data
|
||||
|
||||
|
||||
def get_licensed_features():
|
||||
"""Return a set of all features enabled by the active license."""
|
||||
features = set()
|
||||
for feature, enabled in _get_validated_license_data().get('features', {}).items():
|
||||
if enabled:
|
||||
features.add(feature)
|
||||
return features
|
||||
|
||||
|
||||
@memoize(track_function=True)
|
||||
def feature_enabled(name):
|
||||
"""Return True if the requested feature is enabled, False otherwise."""
|
||||
validated_license_data = _get_validated_license_data()
|
||||
if validated_license_data.get('license_type', 'UNLICENSED') == 'open':
|
||||
return True
|
||||
return validated_license_data.get('features', {}).get(name, False)
|
||||
|
||||
|
||||
def feature_exists(name):
|
||||
"""Return True if the requested feature name exists, False otherwise."""
|
||||
return bool(name in _get_validated_license_data().get('features', {}))
|
||||
|
||||
@@ -22,7 +22,7 @@ def fill_ldap_group_type_params(apps, schema_editor):
|
||||
modified=now())
|
||||
|
||||
init_attrs = set(inspect.getargspec(group_type.__init__).args[1:])
|
||||
for k in group_type_params.keys():
|
||||
for k in list(group_type_params.keys()):
|
||||
if k not in init_attrs:
|
||||
del group_type_params[k]
|
||||
|
||||
|
||||
@@ -68,7 +68,7 @@ class SettingsRegistry(object):
|
||||
def get_dependent_settings(self, setting):
|
||||
return self._dependent_settings.get(setting, set())
|
||||
|
||||
def get_registered_categories(self, features_enabled=None):
|
||||
def get_registered_categories(self):
|
||||
categories = {
|
||||
'all': _('All'),
|
||||
'changed': _('Changed'),
|
||||
@@ -77,10 +77,6 @@ class SettingsRegistry(object):
|
||||
category_slug = kwargs.get('category_slug', None)
|
||||
if category_slug is None or category_slug in categories:
|
||||
continue
|
||||
if features_enabled is not None:
|
||||
feature_required = kwargs.get('feature_required', None)
|
||||
if feature_required and feature_required not in features_enabled:
|
||||
continue
|
||||
if category_slug == 'user':
|
||||
categories['user'] = _('User')
|
||||
categories['user-defaults'] = _('User-Defaults')
|
||||
@@ -88,7 +84,7 @@ class SettingsRegistry(object):
|
||||
categories[category_slug] = kwargs.get('category', None) or category_slug
|
||||
return categories
|
||||
|
||||
def get_registered_settings(self, category_slug=None, read_only=None, features_enabled=None, slugs_to_ignore=set()):
|
||||
def get_registered_settings(self, category_slug=None, read_only=None, slugs_to_ignore=set()):
|
||||
setting_names = []
|
||||
if category_slug == 'user-defaults':
|
||||
category_slug = 'user'
|
||||
@@ -100,14 +96,10 @@ class SettingsRegistry(object):
|
||||
if kwargs.get('category_slug', None) in slugs_to_ignore:
|
||||
continue
|
||||
if (read_only in {True, False} and kwargs.get('read_only', False) != read_only and
|
||||
setting not in ('AWX_ISOLATED_PRIVATE_KEY', 'AWX_ISOLATED_PUBLIC_KEY')):
|
||||
setting not in ('INSTALL_UUID', 'AWX_ISOLATED_PRIVATE_KEY', 'AWX_ISOLATED_PUBLIC_KEY')):
|
||||
# Note: Doesn't catch fields that set read_only via __init__;
|
||||
# read-only field kwargs should always include read_only=True.
|
||||
continue
|
||||
if features_enabled is not None:
|
||||
feature_required = kwargs.get('feature_required', None)
|
||||
if feature_required and feature_required not in features_enabled:
|
||||
continue
|
||||
setting_names.append(setting)
|
||||
return setting_names
|
||||
|
||||
@@ -135,7 +127,6 @@ class SettingsRegistry(object):
|
||||
category = field_kwargs.pop('category', None)
|
||||
depends_on = frozenset(field_kwargs.pop('depends_on', None) or [])
|
||||
placeholder = field_kwargs.pop('placeholder', empty)
|
||||
feature_required = field_kwargs.pop('feature_required', empty)
|
||||
encrypted = bool(field_kwargs.pop('encrypted', False))
|
||||
defined_in_file = bool(field_kwargs.pop('defined_in_file', False))
|
||||
if getattr(field_kwargs.get('child', None), 'source', None) is not None:
|
||||
@@ -146,8 +137,6 @@ class SettingsRegistry(object):
|
||||
field_instance.depends_on = depends_on
|
||||
if placeholder is not empty:
|
||||
field_instance.placeholder = placeholder
|
||||
if feature_required is not empty:
|
||||
field_instance.feature_required = feature_required
|
||||
field_instance.defined_in_file = defined_in_file
|
||||
if field_instance.defined_in_file:
|
||||
field_instance.help_text = (
|
||||
|
||||
@@ -88,7 +88,7 @@ class SettingSingletonSerializer(serializers.Serializer):
|
||||
continue
|
||||
extra_kwargs = {}
|
||||
# Make LICENSE and AWX_ISOLATED_KEY_GENERATION read-only here;
|
||||
# LICENSE is only updated via /api/v1/config/
|
||||
# LICENSE is only updated via /api/v2/config/
|
||||
# AWX_ISOLATED_KEY_GENERATION is only set/unset via the setup playbook
|
||||
if key in ('LICENSE', 'AWX_ISOLATED_KEY_GENERATION'):
|
||||
extra_kwargs['read_only'] = True
|
||||
|
||||
@@ -24,7 +24,6 @@ from rest_framework.fields import empty, SkipField
|
||||
|
||||
# Tower
|
||||
from awx.main.utils import encrypt_field, decrypt_field
|
||||
from awx.main.utils.db import get_tower_migration_version
|
||||
from awx.conf import settings_registry
|
||||
from awx.conf.models import Setting
|
||||
from awx.conf.migrations._reencrypt import decrypt_field as old_decrypt_field
|
||||
@@ -90,45 +89,42 @@ def _ctit_db_wrapper(trans_safe=False):
|
||||
transaction.set_rollback(False)
|
||||
yield
|
||||
except DBError:
|
||||
if 'migrate' in sys.argv and get_tower_migration_version() < '310':
|
||||
logger.info('Using default settings until version 3.1 migration.')
|
||||
else:
|
||||
# We want the _full_ traceback with the context
|
||||
# First we get the current call stack, which constitutes the "top",
|
||||
# it has the context up to the point where the context manager is used
|
||||
top_stack = StringIO()
|
||||
traceback.print_stack(file=top_stack)
|
||||
top_lines = top_stack.getvalue().strip('\n').split('\n')
|
||||
top_stack.close()
|
||||
# Get "bottom" stack from the local error that happened
|
||||
# inside of the "with" block this wraps
|
||||
exc_type, exc_value, exc_traceback = sys.exc_info()
|
||||
bottom_stack = StringIO()
|
||||
traceback.print_tb(exc_traceback, file=bottom_stack)
|
||||
bottom_lines = bottom_stack.getvalue().strip('\n').split('\n')
|
||||
# Glue together top and bottom where overlap is found
|
||||
bottom_cutoff = 0
|
||||
for i, line in enumerate(bottom_lines):
|
||||
if line in top_lines:
|
||||
# start of overlapping section, take overlap from bottom
|
||||
top_lines = top_lines[:top_lines.index(line)]
|
||||
bottom_cutoff = i
|
||||
break
|
||||
bottom_lines = bottom_lines[bottom_cutoff:]
|
||||
tb_lines = top_lines + bottom_lines
|
||||
# We want the _full_ traceback with the context
|
||||
# First we get the current call stack, which constitutes the "top",
|
||||
# it has the context up to the point where the context manager is used
|
||||
top_stack = StringIO()
|
||||
traceback.print_stack(file=top_stack)
|
||||
top_lines = top_stack.getvalue().strip('\n').split('\n')
|
||||
top_stack.close()
|
||||
# Get "bottom" stack from the local error that happened
|
||||
# inside of the "with" block this wraps
|
||||
exc_type, exc_value, exc_traceback = sys.exc_info()
|
||||
bottom_stack = StringIO()
|
||||
traceback.print_tb(exc_traceback, file=bottom_stack)
|
||||
bottom_lines = bottom_stack.getvalue().strip('\n').split('\n')
|
||||
# Glue together top and bottom where overlap is found
|
||||
bottom_cutoff = 0
|
||||
for i, line in enumerate(bottom_lines):
|
||||
if line in top_lines:
|
||||
# start of overlapping section, take overlap from bottom
|
||||
top_lines = top_lines[:top_lines.index(line)]
|
||||
bottom_cutoff = i
|
||||
break
|
||||
bottom_lines = bottom_lines[bottom_cutoff:]
|
||||
tb_lines = top_lines + bottom_lines
|
||||
|
||||
tb_string = '\n'.join(
|
||||
['Traceback (most recent call last):'] +
|
||||
tb_lines +
|
||||
['{}: {}'.format(exc_type.__name__, str(exc_value))]
|
||||
)
|
||||
bottom_stack.close()
|
||||
# Log the combined stack
|
||||
if trans_safe:
|
||||
if 'check_migrations' not in sys.argv:
|
||||
logger.warning('Database settings are not available, using defaults, error:\n{}'.format(tb_string))
|
||||
else:
|
||||
logger.error('Error modifying something related to database settings.\n{}'.format(tb_string))
|
||||
tb_string = '\n'.join(
|
||||
['Traceback (most recent call last):'] +
|
||||
tb_lines +
|
||||
['{}: {}'.format(exc_type.__name__, str(exc_value))]
|
||||
)
|
||||
bottom_stack.close()
|
||||
# Log the combined stack
|
||||
if trans_safe:
|
||||
if 'check_migrations' not in sys.argv:
|
||||
logger.debug('Database settings are not available, using defaults, error:\n{}'.format(tb_string))
|
||||
else:
|
||||
logger.debug('Error modifying something related to database settings.\n{}'.format(tb_string))
|
||||
finally:
|
||||
if trans_safe and is_atomic and rollback_set:
|
||||
transaction.set_rollback(rollback_set)
|
||||
@@ -381,8 +377,9 @@ class SettingsWrapper(UserSettingsHolder):
|
||||
setting = None
|
||||
setting_id = None
|
||||
if not field.read_only or name in (
|
||||
# these two values are read-only - however - we *do* want
|
||||
# these values are read-only - however - we *do* want
|
||||
# to fetch their value from the database
|
||||
'INSTALL_UUID',
|
||||
'AWX_ISOLATED_PRIVATE_KEY',
|
||||
'AWX_ISOLATED_PUBLIC_KEY',
|
||||
):
|
||||
|
||||
@@ -2,7 +2,7 @@ import urllib.parse
|
||||
|
||||
import pytest
|
||||
|
||||
from django.core.urlresolvers import resolve
|
||||
from django.urls import resolve
|
||||
from django.contrib.auth.models import User
|
||||
|
||||
from rest_framework.test import (
|
||||
|
||||
@@ -65,41 +65,6 @@ def test_non_admin_user_does_not_see_categories(api_request, dummy_setting, norm
|
||||
assert not response.data['results']
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@mock.patch(
|
||||
'awx.conf.views.VERSION_SPECIFIC_CATEGORIES_TO_EXCLUDE',
|
||||
{
|
||||
1: set([]),
|
||||
2: set(['foobar']),
|
||||
}
|
||||
)
|
||||
def test_version_specific_category_slug_to_exclude_does_not_show_up(api_request, dummy_setting):
|
||||
with dummy_setting(
|
||||
'FOO_BAR',
|
||||
field_class=fields.IntegerField,
|
||||
category='FooBar',
|
||||
category_slug='foobar'
|
||||
):
|
||||
response = api_request(
|
||||
'get',
|
||||
reverse('api:setting_category_list',
|
||||
kwargs={'version': 'v2'})
|
||||
)
|
||||
for item in response.data['results']:
|
||||
assert item['slug'] != 'foobar'
|
||||
response = api_request(
|
||||
'get',
|
||||
reverse('api:setting_category_list',
|
||||
kwargs={'version': 'v1'})
|
||||
)
|
||||
contains = False
|
||||
for item in response.data['results']:
|
||||
if item['slug'] != 'foobar':
|
||||
contains = True
|
||||
break
|
||||
assert contains
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_setting_singleton_detail_retrieve(api_request, dummy_setting):
|
||||
with dummy_setting(
|
||||
|
||||
@@ -119,20 +119,6 @@ def test_get_registered_read_only_settings(reg):
|
||||
]
|
||||
|
||||
|
||||
def test_get_registered_settings_with_required_features(reg):
|
||||
reg.register(
|
||||
'AWX_SOME_SETTING_ENABLED',
|
||||
field_class=fields.BooleanField,
|
||||
category=_('System'),
|
||||
category_slug='system',
|
||||
feature_required='superpowers',
|
||||
)
|
||||
assert reg.get_registered_settings(features_enabled=[]) == []
|
||||
assert reg.get_registered_settings(features_enabled=['superpowers']) == [
|
||||
'AWX_SOME_SETTING_ENABLED'
|
||||
]
|
||||
|
||||
|
||||
def test_get_dependent_settings(reg):
|
||||
reg.register(
|
||||
'AWX_SOME_SETTING_ENABLED',
|
||||
@@ -173,45 +159,6 @@ def test_get_registered_categories(reg):
|
||||
}
|
||||
|
||||
|
||||
def test_get_registered_categories_with_required_features(reg):
|
||||
reg.register(
|
||||
'AWX_SOME_SETTING_ENABLED',
|
||||
field_class=fields.BooleanField,
|
||||
category=_('System'),
|
||||
category_slug='system',
|
||||
feature_required='superpowers'
|
||||
)
|
||||
reg.register(
|
||||
'AWX_SOME_OTHER_SETTING_ENABLED',
|
||||
field_class=fields.BooleanField,
|
||||
category=_('OtherSystem'),
|
||||
category_slug='other-system',
|
||||
feature_required='sortapowers'
|
||||
)
|
||||
assert reg.get_registered_categories(features_enabled=[]) == {
|
||||
'all': _('All'),
|
||||
'changed': _('Changed'),
|
||||
}
|
||||
assert reg.get_registered_categories(features_enabled=['superpowers']) == {
|
||||
'all': _('All'),
|
||||
'changed': _('Changed'),
|
||||
'system': _('System'),
|
||||
}
|
||||
assert reg.get_registered_categories(features_enabled=['sortapowers']) == {
|
||||
'all': _('All'),
|
||||
'changed': _('Changed'),
|
||||
'other-system': _('OtherSystem'),
|
||||
}
|
||||
assert reg.get_registered_categories(
|
||||
features_enabled=['superpowers', 'sortapowers']
|
||||
) == {
|
||||
'all': _('All'),
|
||||
'changed': _('Changed'),
|
||||
'system': _('System'),
|
||||
'other-system': _('OtherSystem'),
|
||||
}
|
||||
|
||||
|
||||
def test_is_setting_encrypted(reg):
|
||||
reg.register(
|
||||
'AWX_SOME_SETTING_ENABLED',
|
||||
@@ -237,7 +184,6 @@ def test_simple_field(reg):
|
||||
category=_('System'),
|
||||
category_slug='system',
|
||||
placeholder='Example Value',
|
||||
feature_required='superpowers'
|
||||
)
|
||||
|
||||
field = reg.get_setting_field('AWX_SOME_SETTING')
|
||||
@@ -246,7 +192,6 @@ def test_simple_field(reg):
|
||||
assert field.category_slug == 'system'
|
||||
assert field.default is empty
|
||||
assert field.placeholder == 'Example Value'
|
||||
assert field.feature_required == 'superpowers'
|
||||
|
||||
|
||||
def test_field_with_custom_attribute(reg):
|
||||
|
||||
@@ -24,11 +24,10 @@ from awx.api.generics import (
|
||||
RetrieveUpdateDestroyAPIView,
|
||||
)
|
||||
from awx.api.permissions import IsSuperUser
|
||||
from awx.api.versioning import reverse, get_request_version
|
||||
from awx.api.versioning import reverse
|
||||
from awx.main.utils import camelcase_to_underscore
|
||||
from awx.main.utils.handlers import AWXProxyHandler, LoggingConnectivityException
|
||||
from awx.main.tasks import handle_setting_changes
|
||||
from awx.conf.license import get_licensed_features
|
||||
from awx.conf.models import Setting
|
||||
from awx.conf.serializers import SettingCategorySerializer, SettingSingletonSerializer
|
||||
from awx.conf import settings_registry
|
||||
@@ -36,24 +35,17 @@ from awx.conf import settings_registry
|
||||
|
||||
SettingCategory = collections.namedtuple('SettingCategory', ('url', 'slug', 'name'))
|
||||
|
||||
VERSION_SPECIFIC_CATEGORIES_TO_EXCLUDE = {
|
||||
1: set([
|
||||
'named-url',
|
||||
]),
|
||||
2: set([]),
|
||||
}
|
||||
|
||||
|
||||
class SettingCategoryList(ListAPIView):
|
||||
|
||||
model = Setting # Not exactly, but needed for the view.
|
||||
serializer_class = SettingCategorySerializer
|
||||
filter_backends = []
|
||||
view_name = _('Setting Categories')
|
||||
name = _('Setting Categories')
|
||||
|
||||
def get_queryset(self):
|
||||
setting_categories = []
|
||||
categories = settings_registry.get_registered_categories(features_enabled=get_licensed_features())
|
||||
categories = settings_registry.get_registered_categories()
|
||||
if self.request.user.is_superuser or self.request.user.is_system_auditor:
|
||||
pass # categories = categories
|
||||
elif 'user' in categories:
|
||||
@@ -61,8 +53,6 @@ class SettingCategoryList(ListAPIView):
|
||||
else:
|
||||
categories = {}
|
||||
for category_slug in sorted(categories.keys()):
|
||||
if category_slug in VERSION_SPECIFIC_CATEGORIES_TO_EXCLUDE[get_request_version(self.request)]:
|
||||
continue
|
||||
url = reverse('api:setting_singleton_detail', kwargs={'category_slug': category_slug}, request=self.request)
|
||||
setting_categories.append(SettingCategory(url, category_slug, categories[category_slug]))
|
||||
return setting_categories
|
||||
@@ -73,13 +63,11 @@ class SettingSingletonDetail(RetrieveUpdateDestroyAPIView):
|
||||
model = Setting # Not exactly, but needed for the view.
|
||||
serializer_class = SettingSingletonSerializer
|
||||
filter_backends = []
|
||||
view_name = _('Setting Detail')
|
||||
name = _('Setting Detail')
|
||||
|
||||
def get_queryset(self):
|
||||
self.category_slug = self.kwargs.get('category_slug', 'all')
|
||||
all_category_slugs = list(settings_registry.get_registered_categories(features_enabled=get_licensed_features()).keys())
|
||||
for slug_to_delete in VERSION_SPECIFIC_CATEGORIES_TO_EXCLUDE[get_request_version(self.request)]:
|
||||
all_category_slugs.remove(slug_to_delete)
|
||||
all_category_slugs = list(settings_registry.get_registered_categories().keys())
|
||||
if self.request.user.is_superuser or getattr(self.request.user, 'is_system_auditor', False):
|
||||
category_slugs = all_category_slugs
|
||||
else:
|
||||
@@ -90,8 +78,7 @@ class SettingSingletonDetail(RetrieveUpdateDestroyAPIView):
|
||||
raise PermissionDenied()
|
||||
|
||||
registered_settings = settings_registry.get_registered_settings(
|
||||
category_slug=self.category_slug, read_only=False, features_enabled=get_licensed_features(),
|
||||
slugs_to_ignore=VERSION_SPECIFIC_CATEGORIES_TO_EXCLUDE[get_request_version(self.request)]
|
||||
category_slug=self.category_slug, read_only=False,
|
||||
)
|
||||
if self.category_slug == 'user':
|
||||
return Setting.objects.filter(key__in=registered_settings, user=self.request.user)
|
||||
@@ -101,8 +88,7 @@ class SettingSingletonDetail(RetrieveUpdateDestroyAPIView):
|
||||
def get_object(self):
|
||||
settings_qs = self.get_queryset()
|
||||
registered_settings = settings_registry.get_registered_settings(
|
||||
category_slug=self.category_slug, features_enabled=get_licensed_features(),
|
||||
slugs_to_ignore=VERSION_SPECIFIC_CATEGORIES_TO_EXCLUDE[get_request_version(self.request)]
|
||||
category_slug=self.category_slug,
|
||||
)
|
||||
all_settings = {}
|
||||
for setting in settings_qs:
|
||||
@@ -168,7 +154,7 @@ class SettingSingletonDetail(RetrieveUpdateDestroyAPIView):
|
||||
|
||||
class SettingLoggingTest(GenericAPIView):
|
||||
|
||||
view_name = _('Logging Connectivity Test')
|
||||
name = _('Logging Connectivity Test')
|
||||
model = Setting
|
||||
serializer_class = SettingSingletonSerializer
|
||||
permission_classes = (IsSuperUser,)
|
||||
|
||||
@@ -1,30 +0,0 @@
|
||||
# Copyright (c) 2017 Ansible by Red Hat
|
||||
#
|
||||
# This file is part of Ansible Tower, but depends on code imported from Ansible.
|
||||
#
|
||||
# Ansible is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# Ansible is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
from __future__ import (absolute_import, division, print_function)
|
||||
|
||||
# Python
|
||||
import os
|
||||
import sys
|
||||
|
||||
# Add awx/lib to sys.path.
|
||||
awx_lib_path = os.path.abspath(os.path.join(os.path.dirname(__file__), '..'))
|
||||
if awx_lib_path not in sys.path:
|
||||
sys.path.insert(0, awx_lib_path)
|
||||
|
||||
# Tower Display Callback
|
||||
from awx_display_callback import AWXDefaultCallbackModule as CallbackModule # noqa
|
||||
@@ -1,30 +0,0 @@
|
||||
# Copyright (c) 2017 Ansible by Red Hat
|
||||
#
|
||||
# This file is part of Ansible Tower, but depends on code imported from Ansible.
|
||||
#
|
||||
# Ansible is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# Ansible is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
from __future__ import (absolute_import, division, print_function)
|
||||
|
||||
# Python
|
||||
import os
|
||||
import sys
|
||||
|
||||
# Add awx/lib to sys.path.
|
||||
awx_lib_path = os.path.abspath(os.path.join(os.path.dirname(__file__), '..'))
|
||||
if awx_lib_path not in sys.path:
|
||||
sys.path.insert(0, awx_lib_path)
|
||||
|
||||
# Tower Display Callback
|
||||
from awx_display_callback import AWXMinimalCallbackModule as CallbackModule # noqa
|
||||
@@ -1,26 +0,0 @@
|
||||
# Python
|
||||
import os
|
||||
import sys
|
||||
|
||||
# Based on http://stackoverflow.com/a/6879344/131141 -- Initialize awx display
|
||||
# callback as early as possible to wrap ansible.display.Display methods.
|
||||
|
||||
|
||||
def argv_ready(argv):
|
||||
if argv and os.path.basename(argv[0]) in {'ansible', 'ansible-playbook'}:
|
||||
import awx_display_callback # noqa
|
||||
|
||||
|
||||
class argv_placeholder(object):
|
||||
|
||||
def __del__(self):
|
||||
try:
|
||||
argv_ready(sys.argv)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
|
||||
if hasattr(sys, 'argv'):
|
||||
argv_ready(sys.argv)
|
||||
else:
|
||||
sys.argv = argv_placeholder()
|
||||
@@ -1,2 +0,0 @@
|
||||
[pytest]
|
||||
addopts = -v
|
||||
@@ -30,8 +30,8 @@ from awx.main.utils import (
|
||||
)
|
||||
from awx.main.models import (
|
||||
ActivityStream, AdHocCommand, AdHocCommandEvent, Credential, CredentialType,
|
||||
CustomInventoryScript, Group, Host, Instance, InstanceGroup, Inventory,
|
||||
InventorySource, InventoryUpdate, InventoryUpdateEvent, Job, JobEvent,
|
||||
CredentialInputSource, CustomInventoryScript, Group, Host, Instance, InstanceGroup,
|
||||
Inventory, InventorySource, InventoryUpdate, InventoryUpdateEvent, Job, JobEvent,
|
||||
JobHostSummary, JobLaunchConfig, JobTemplate, Label, Notification,
|
||||
NotificationTemplate, Organization, Project, ProjectUpdate,
|
||||
ProjectUpdateEvent, Role, Schedule, SystemJob, SystemJobEvent,
|
||||
@@ -41,8 +41,6 @@ from awx.main.models import (
|
||||
)
|
||||
from awx.main.models.mixins import ResourceMixin
|
||||
|
||||
from awx.conf.license import LicenseForbids, feature_enabled
|
||||
|
||||
__all__ = ['get_user_queryset', 'check_user_access', 'check_user_access_with_errors',
|
||||
'user_accessible_objects', 'consumer_access',]
|
||||
|
||||
@@ -84,6 +82,17 @@ def get_object_from_data(field, Model, data, obj=None):
|
||||
raise ParseError(_("Bad data found in related field %s." % field))
|
||||
|
||||
|
||||
def vars_are_encrypted(vars):
|
||||
'''Returns True if any of the values in the dictionary vars contains
|
||||
content which is encrypted by the AWX encryption algorithm
|
||||
'''
|
||||
for value in vars.values():
|
||||
if isinstance(value, str):
|
||||
if value.startswith('$encrypted$'):
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def register_access(model_class, access_class):
|
||||
access_registry[model_class] = access_class
|
||||
|
||||
@@ -324,12 +333,6 @@ class BaseAccess(object):
|
||||
elif not add_host_name and free_instances < 0:
|
||||
raise PermissionDenied(_("Host count exceeds available instances."))
|
||||
|
||||
if feature is not None:
|
||||
if "features" in validation_info and not validation_info["features"].get(feature, False):
|
||||
raise LicenseForbids(_("Feature %s is not enabled in the active license.") % feature)
|
||||
elif "features" not in validation_info:
|
||||
raise LicenseForbids(_("Features not found in active license."))
|
||||
|
||||
def check_org_host_limit(self, data, add_host_name=None):
|
||||
validation_info = get_licenser().validate()
|
||||
if validation_info.get('license_type', 'UNLICENSED') == 'open':
|
||||
@@ -383,9 +386,6 @@ class BaseAccess(object):
|
||||
if obj.validation_errors:
|
||||
user_capabilities[display_method] = False
|
||||
continue
|
||||
elif isinstance(obj, (WorkflowJobTemplate, WorkflowJob)) and (not feature_enabled('workflows')):
|
||||
user_capabilities[display_method] = (display_method == 'delete')
|
||||
continue
|
||||
elif display_method == 'copy' and isinstance(obj, WorkflowJobTemplate) and obj.organization_id is None:
|
||||
user_capabilities[display_method] = self.user.is_superuser
|
||||
continue
|
||||
@@ -426,7 +426,7 @@ class BaseAccess(object):
|
||||
if display_method == 'schedule':
|
||||
user_capabilities['schedule'] = user_capabilities['start']
|
||||
continue
|
||||
elif display_method == 'delete' and not isinstance(obj, (User, UnifiedJob, CustomInventoryScript)):
|
||||
elif display_method == 'delete' and not isinstance(obj, (User, UnifiedJob, CustomInventoryScript, CredentialInputSource)):
|
||||
user_capabilities['delete'] = user_capabilities['edit']
|
||||
continue
|
||||
elif display_method == 'copy' and isinstance(obj, (Group, Host)):
|
||||
@@ -460,6 +460,42 @@ class BaseAccess(object):
|
||||
return False
|
||||
|
||||
|
||||
class NotificationAttachMixin(BaseAccess):
|
||||
'''For models that can have notifications attached
|
||||
|
||||
I can attach a notification template when
|
||||
- I have notification_admin_role to organization of the NT
|
||||
- I can read the object I am attaching it to
|
||||
|
||||
I can unattach when those same critiera are met
|
||||
'''
|
||||
notification_attach_roles = None
|
||||
|
||||
def _can_attach(self, notification_template, resource_obj):
|
||||
if not NotificationTemplateAccess(self.user).can_change(notification_template, {}):
|
||||
return False
|
||||
if self.notification_attach_roles is None:
|
||||
return self.can_read(resource_obj)
|
||||
return any(self.user in getattr(resource_obj, role) for role in self.notification_attach_roles)
|
||||
|
||||
@check_superuser
|
||||
def can_attach(self, obj, sub_obj, relationship, data, skip_sub_obj_read_check=False):
|
||||
if isinstance(sub_obj, NotificationTemplate):
|
||||
# reverse obj and sub_obj
|
||||
return self._can_attach(notification_template=sub_obj, resource_obj=obj)
|
||||
return super(NotificationAttachMixin, self).can_attach(
|
||||
obj, sub_obj, relationship, data, skip_sub_obj_read_check=skip_sub_obj_read_check)
|
||||
|
||||
@check_superuser
|
||||
def can_unattach(self, obj, sub_obj, relationship, data=None):
|
||||
if isinstance(sub_obj, NotificationTemplate):
|
||||
# due to this special case, we use symmetrical logic with attach permission
|
||||
return self._can_attach(notification_template=sub_obj, resource_obj=obj)
|
||||
return super(NotificationAttachMixin, self).can_unattach(
|
||||
obj, sub_obj, relationship, relationship, data=data
|
||||
)
|
||||
|
||||
|
||||
class InstanceAccess(BaseAccess):
|
||||
|
||||
model = Instance
|
||||
@@ -620,23 +656,22 @@ class UserAccess(BaseAccess):
|
||||
return False
|
||||
|
||||
def can_attach(self, obj, sub_obj, relationship, *args, **kwargs):
|
||||
if not settings.MANAGE_ORGANIZATION_AUTH and not self.user.is_superuser:
|
||||
return False
|
||||
|
||||
# Reverse obj and sub_obj, defer to RoleAccess if this is a role assignment.
|
||||
# The only thing that a User should ever have attached is a Role
|
||||
if relationship == 'roles':
|
||||
role_access = RoleAccess(self.user)
|
||||
return role_access.can_attach(sub_obj, obj, 'members', *args, **kwargs)
|
||||
return super(UserAccess, self).can_attach(obj, sub_obj, relationship, *args, **kwargs)
|
||||
|
||||
logger.error('Unexpected attempt to associate {} with a user.'.format(sub_obj))
|
||||
return False
|
||||
|
||||
def can_unattach(self, obj, sub_obj, relationship, *args, **kwargs):
|
||||
if not settings.MANAGE_ORGANIZATION_AUTH and not self.user.is_superuser:
|
||||
return False
|
||||
|
||||
# The only thing that a User should ever have to be unattached is a Role
|
||||
if relationship == 'roles':
|
||||
role_access = RoleAccess(self.user)
|
||||
return role_access.can_unattach(sub_obj, obj, 'members', *args, **kwargs)
|
||||
return super(UserAccess, self).can_unattach(obj, sub_obj, relationship, *args, **kwargs)
|
||||
|
||||
logger.error('Unexpected attempt to de-associate {} from a user.'.format(sub_obj))
|
||||
return False
|
||||
|
||||
|
||||
class OAuth2ApplicationAccess(BaseAccess):
|
||||
@@ -652,6 +687,7 @@ class OAuth2ApplicationAccess(BaseAccess):
|
||||
|
||||
model = OAuth2Application
|
||||
select_related = ('user',)
|
||||
prefetch_related = ('organization', 'oauth2accesstoken_set')
|
||||
|
||||
def filtered_queryset(self):
|
||||
org_access_qs = Organization.accessible_objects(self.user, 'member_role')
|
||||
@@ -690,6 +726,7 @@ class OAuth2TokenAccess(BaseAccess):
|
||||
model = OAuth2AccessToken
|
||||
|
||||
select_related = ('user', 'application')
|
||||
prefetch_related = ('refresh_token',)
|
||||
|
||||
def filtered_queryset(self):
|
||||
org_access_qs = Organization.objects.filter(
|
||||
@@ -715,7 +752,7 @@ class OAuth2TokenAccess(BaseAccess):
|
||||
return True
|
||||
|
||||
|
||||
class OrganizationAccess(BaseAccess):
|
||||
class OrganizationAccess(NotificationAttachMixin, BaseAccess):
|
||||
'''
|
||||
I can see organizations when:
|
||||
- I am a superuser.
|
||||
@@ -729,6 +766,8 @@ class OrganizationAccess(BaseAccess):
|
||||
|
||||
model = Organization
|
||||
prefetch_related = ('created_by', 'modified_by',)
|
||||
# organization admin_role is not a parent of organization auditor_role
|
||||
notification_attach_roles = ['admin_role', 'auditor_role']
|
||||
|
||||
def filtered_queryset(self):
|
||||
return self.model.accessible_objects(self.user, 'read_role')
|
||||
@@ -738,13 +777,18 @@ class OrganizationAccess(BaseAccess):
|
||||
return self.user in obj.admin_role
|
||||
|
||||
def can_delete(self, obj):
|
||||
self.check_license(feature='multiple_organizations', check_expiration=False)
|
||||
self.check_license(check_expiration=False)
|
||||
is_change_possible = self.can_change(obj, None)
|
||||
if not is_change_possible:
|
||||
return False
|
||||
return True
|
||||
|
||||
def can_attach(self, obj, sub_obj, relationship, *args, **kwargs):
|
||||
# If the request is updating the membership, check the membership role permissions instead
|
||||
if relationship in ('member_role.members', 'admin_role.members'):
|
||||
rel_role = getattr(obj, relationship.split('.')[0])
|
||||
return RoleAccess(self.user).can_attach(rel_role, sub_obj, 'members', *args, **kwargs)
|
||||
|
||||
if relationship == "instance_groups":
|
||||
if self.user.is_superuser:
|
||||
return True
|
||||
@@ -752,6 +796,11 @@ class OrganizationAccess(BaseAccess):
|
||||
return super(OrganizationAccess, self).can_attach(obj, sub_obj, relationship, *args, **kwargs)
|
||||
|
||||
def can_unattach(self, obj, sub_obj, relationship, *args, **kwargs):
|
||||
# If the request is updating the membership, check the membership role permissions instead
|
||||
if relationship in ('member_role.members', 'admin_role.members'):
|
||||
rel_role = getattr(obj, relationship.split('.')[0])
|
||||
return RoleAccess(self.user).can_unattach(rel_role, sub_obj, 'members', *args, **kwargs)
|
||||
|
||||
if relationship == "instance_groups":
|
||||
return self.can_attach(obj, sub_obj, relationship, *args, **kwargs)
|
||||
return super(OrganizationAccess, self).can_attach(obj, sub_obj, relationship, *args, **kwargs)
|
||||
@@ -779,7 +828,7 @@ class InventoryAccess(BaseAccess):
|
||||
'''
|
||||
|
||||
model = Inventory
|
||||
select_related = ('created_by', 'modified_by', 'organization',)
|
||||
prefetch_related = ('created_by', 'modified_by', 'organization')
|
||||
|
||||
def filtered_queryset(self, allowed=None, ad_hoc=None):
|
||||
return self.model.accessible_objects(self.user, 'read_role')
|
||||
@@ -952,21 +1001,8 @@ class GroupAccess(BaseAccess):
|
||||
def can_delete(self, obj):
|
||||
return bool(obj and self.user in obj.inventory.admin_role)
|
||||
|
||||
def can_start(self, obj, validate_license=True):
|
||||
# TODO: Delete for 3.3, only used by v1 serializer
|
||||
# Used as another alias to inventory_source start access for user_capabilities
|
||||
if obj:
|
||||
try:
|
||||
return self.user.can_access(
|
||||
InventorySource, 'start', obj.deprecated_inventory_source,
|
||||
validate_license=validate_license)
|
||||
obj.deprecated_inventory_source
|
||||
except Group.deprecated_inventory_source.RelatedObjectDoesNotExist:
|
||||
return False
|
||||
return False
|
||||
|
||||
|
||||
class InventorySourceAccess(BaseAccess):
|
||||
class InventorySourceAccess(NotificationAttachMixin, BaseAccess):
|
||||
'''
|
||||
I can see inventory sources whenever I can see their inventory.
|
||||
I can change inventory sources whenever I can change their inventory.
|
||||
@@ -1043,8 +1079,8 @@ class InventoryUpdateAccess(BaseAccess):
|
||||
'''
|
||||
|
||||
model = InventoryUpdate
|
||||
select_related = ('created_by', 'modified_by', 'inventory_source__inventory',)
|
||||
prefetch_related = ('unified_job_template', 'instance_group', 'credentials',)
|
||||
select_related = ('created_by', 'modified_by', 'inventory_source',)
|
||||
prefetch_related = ('unified_job_template', 'instance_group', 'credentials__credential_type', 'inventory', 'source_script')
|
||||
|
||||
def filtered_queryset(self):
|
||||
return self.model.objects.filter(inventory_source__inventory__in=Inventory.accessible_pk_qs(self.user, 'read_role'))
|
||||
@@ -1058,11 +1094,7 @@ class InventoryUpdateAccess(BaseAccess):
|
||||
return self.user in obj.inventory_source.inventory.admin_role
|
||||
|
||||
def can_start(self, obj, validate_license=True):
|
||||
# For relaunching
|
||||
if obj and obj.inventory_source:
|
||||
access = InventorySourceAccess(self.user)
|
||||
return access.can_start(obj.inventory_source, validate_license=validate_license)
|
||||
return False
|
||||
return InventorySourceAccess(self.user).can_start(obj, validate_license=validate_license)
|
||||
|
||||
@check_superuser
|
||||
def can_delete(self, obj):
|
||||
@@ -1080,6 +1112,7 @@ class CredentialTypeAccess(BaseAccess):
|
||||
'''
|
||||
|
||||
model = CredentialType
|
||||
prefetch_related = ('created_by', 'modified_by',)
|
||||
|
||||
def can_read(self, obj):
|
||||
return True
|
||||
@@ -1162,6 +1195,55 @@ class CredentialAccess(BaseAccess):
|
||||
# return True
|
||||
return self.can_change(obj, None)
|
||||
|
||||
def get_user_capabilities(self, obj, **kwargs):
|
||||
user_capabilities = super(CredentialAccess, self).get_user_capabilities(obj, **kwargs)
|
||||
user_capabilities['use'] = self.can_use(obj)
|
||||
return user_capabilities
|
||||
|
||||
|
||||
class CredentialInputSourceAccess(BaseAccess):
|
||||
'''
|
||||
I can see a CredentialInputSource when:
|
||||
- I can see the associated target_credential
|
||||
I can create/change a CredentialInputSource when:
|
||||
- I'm an admin of the associated target_credential
|
||||
- I have use access to the associated source credential
|
||||
I can delete a CredentialInputSource when:
|
||||
- I'm an admin of the associated target_credential
|
||||
'''
|
||||
|
||||
model = CredentialInputSource
|
||||
select_related = ('target_credential', 'source_credential')
|
||||
|
||||
def filtered_queryset(self):
|
||||
return CredentialInputSource.objects.filter(
|
||||
target_credential__in=Credential.accessible_pk_qs(self.user, 'read_role'))
|
||||
|
||||
@check_superuser
|
||||
def can_read(self, obj):
|
||||
return self.user in obj.target_credential.read_role
|
||||
|
||||
@check_superuser
|
||||
def can_add(self, data):
|
||||
return (
|
||||
self.check_related('target_credential', Credential, data, role_field='admin_role') and
|
||||
self.check_related('source_credential', Credential, data, role_field='use_role')
|
||||
)
|
||||
|
||||
@check_superuser
|
||||
def can_change(self, obj, data):
|
||||
if self.can_add(data) is False:
|
||||
return False
|
||||
|
||||
return (
|
||||
self.user in obj.target_credential.admin_role and
|
||||
self.user in obj.source_credential.use_role
|
||||
)
|
||||
|
||||
@check_superuser
|
||||
def can_delete(self, obj):
|
||||
return self.user in obj.target_credential.admin_role
|
||||
|
||||
|
||||
class TeamAccess(BaseAccess):
|
||||
'''
|
||||
@@ -1169,6 +1251,7 @@ class TeamAccess(BaseAccess):
|
||||
- I'm a superuser.
|
||||
- I'm an admin of the team
|
||||
- I'm a member of that team.
|
||||
- I'm a member of the team's organization
|
||||
I can create/change a team when:
|
||||
- I'm a superuser.
|
||||
- I'm an admin for the team
|
||||
@@ -1181,7 +1264,10 @@ class TeamAccess(BaseAccess):
|
||||
if settings.ORG_ADMINS_CAN_SEE_ALL_USERS and \
|
||||
(self.user.admin_of_organizations.exists() or self.user.auditor_of_organizations.exists()):
|
||||
return self.model.objects.all()
|
||||
return self.model.accessible_objects(self.user, 'read_role')
|
||||
return self.model.objects.filter(
|
||||
Q(organization__in=Organization.accessible_pk_qs(self.user, 'member_role')) |
|
||||
Q(pk__in=self.model.accessible_pk_qs(self.user, 'read_role'))
|
||||
)
|
||||
|
||||
@check_superuser
|
||||
def can_add(self, data):
|
||||
@@ -1219,6 +1305,12 @@ class TeamAccess(BaseAccess):
|
||||
*args, **kwargs)
|
||||
if self.user.is_superuser:
|
||||
return True
|
||||
|
||||
# If the request is updating the membership, check the membership role permissions instead
|
||||
if relationship in ('member_role.members', 'admin_role.members'):
|
||||
rel_role = getattr(obj, relationship.split('.')[0])
|
||||
return RoleAccess(self.user).can_attach(rel_role, sub_obj, 'members', *args, **kwargs)
|
||||
|
||||
return super(TeamAccess, self).can_attach(obj, sub_obj, relationship,
|
||||
*args, **kwargs)
|
||||
|
||||
@@ -1229,11 +1321,17 @@ class TeamAccess(BaseAccess):
|
||||
role_access = RoleAccess(self.user)
|
||||
return role_access.can_unattach(sub_obj, obj, 'member_role.parents',
|
||||
*args, **kwargs)
|
||||
|
||||
# If the request is updating the membership, check the membership role permissions instead
|
||||
if relationship in ('member_role.members', 'admin_role.members'):
|
||||
rel_role = getattr(obj, relationship.split('.')[0])
|
||||
return RoleAccess(self.user).can_unattach(rel_role, sub_obj, 'members', *args, **kwargs)
|
||||
|
||||
return super(TeamAccess, self).can_unattach(obj, sub_obj, relationship,
|
||||
*args, **kwargs)
|
||||
|
||||
|
||||
class ProjectAccess(BaseAccess):
|
||||
class ProjectAccess(NotificationAttachMixin, BaseAccess):
|
||||
'''
|
||||
I can see projects when:
|
||||
- I am a superuser.
|
||||
@@ -1251,7 +1349,9 @@ class ProjectAccess(BaseAccess):
|
||||
'''
|
||||
|
||||
model = Project
|
||||
select_related = ('modified_by', 'credential', 'current_job', 'last_job',)
|
||||
select_related = ('credential',)
|
||||
prefetch_related = ('modified_by', 'created_by', 'organization', 'last_job', 'current_job')
|
||||
notification_attach_roles = ['admin_role']
|
||||
|
||||
def filtered_queryset(self):
|
||||
return self.model.accessible_objects(self.user, 'read_role')
|
||||
@@ -1314,7 +1414,7 @@ class ProjectUpdateAccess(BaseAccess):
|
||||
return obj and self.user in obj.project.admin_role
|
||||
|
||||
|
||||
class JobTemplateAccess(BaseAccess):
|
||||
class JobTemplateAccess(NotificationAttachMixin, BaseAccess):
|
||||
'''
|
||||
I can see job templates when:
|
||||
- I have read role for the job template.
|
||||
@@ -1394,7 +1494,10 @@ class JobTemplateAccess(BaseAccess):
|
||||
|
||||
# obj.credentials.all() is accessible ONLY when object is saved (has valid id)
|
||||
credential_manager = getattr(obj, 'credentials', None) if getattr(obj, 'id', False) else Credential.objects.none()
|
||||
return reduce(lambda prev, cred: prev and self.user in cred.use_role, credential_manager.all(), True)
|
||||
user_can_copy = reduce(lambda prev, cred: prev and self.user in cred.use_role, credential_manager.all(), True)
|
||||
if not user_can_copy:
|
||||
raise PermissionDenied(_('Insufficient access to Job Template credentials.'))
|
||||
return user_can_copy
|
||||
|
||||
def can_start(self, obj, validate_license=True):
|
||||
# Check license.
|
||||
@@ -1404,11 +1507,6 @@ class JobTemplateAccess(BaseAccess):
|
||||
# Check the per-org limit
|
||||
self.check_org_host_limit({'inventory': obj.inventory})
|
||||
|
||||
if obj.survey_enabled:
|
||||
self.check_license(feature='surveys')
|
||||
if Instance.objects.active_count() > 1:
|
||||
self.check_license(feature='ha')
|
||||
|
||||
# Super users can start any job
|
||||
if self.user.is_superuser:
|
||||
return True
|
||||
@@ -1465,8 +1563,6 @@ class JobTemplateAccess(BaseAccess):
|
||||
|
||||
@check_superuser
|
||||
def can_attach(self, obj, sub_obj, relationship, data, skip_sub_obj_read_check=False):
|
||||
if isinstance(sub_obj, NotificationTemplate):
|
||||
return self.check_related('organization', Organization, {}, obj=sub_obj, mandatory=True)
|
||||
if relationship == "instance_groups":
|
||||
if not obj.project.organization:
|
||||
return False
|
||||
@@ -1582,10 +1678,10 @@ class JobAccess(BaseAccess):
|
||||
prompts_access = False
|
||||
elif not config.has_user_prompts(obj.job_template):
|
||||
prompts_access = True
|
||||
elif obj.created_by_id != self.user.pk:
|
||||
elif obj.created_by_id != self.user.pk and vars_are_encrypted(config.extra_data):
|
||||
prompts_access = False
|
||||
if self.save_messages:
|
||||
self.messages['detail'] = _('Job was launched with prompts provided by another user.')
|
||||
self.messages['detail'] = _('Job was launched with secret prompts provided by another user.')
|
||||
else:
|
||||
prompts_access = (
|
||||
JobLaunchConfigAccess(self.user).can_add({'reference_obj': config}) and
|
||||
@@ -1747,7 +1843,7 @@ class WorkflowJobTemplateNodeAccess(BaseAccess):
|
||||
'''
|
||||
model = WorkflowJobTemplateNode
|
||||
prefetch_related = ('success_nodes', 'failure_nodes', 'always_nodes',
|
||||
'unified_job_template', 'credentials',)
|
||||
'unified_job_template', 'credentials', 'workflow_job_template')
|
||||
|
||||
def filtered_queryset(self):
|
||||
return self.model.objects.filter(
|
||||
@@ -1839,9 +1935,8 @@ class WorkflowJobNodeAccess(BaseAccess):
|
||||
Deletion must happen as a cascade delete from the workflow job.
|
||||
'''
|
||||
model = WorkflowJobNode
|
||||
select_related = ('unified_job_template', 'job',)
|
||||
prefetch_related = ('success_nodes', 'failure_nodes', 'always_nodes',
|
||||
'credentials',)
|
||||
prefetch_related = ('unified_job_template', 'job', 'workflow_job', 'credentials',
|
||||
'success_nodes', 'failure_nodes', 'always_nodes',)
|
||||
|
||||
def filtered_queryset(self):
|
||||
return self.model.objects.filter(
|
||||
@@ -1864,7 +1959,7 @@ class WorkflowJobNodeAccess(BaseAccess):
|
||||
|
||||
|
||||
# TODO: notification attachments?
|
||||
class WorkflowJobTemplateAccess(BaseAccess):
|
||||
class WorkflowJobTemplateAccess(NotificationAttachMixin, BaseAccess):
|
||||
'''
|
||||
I can only see/manage Workflow Job Templates if I'm a super user
|
||||
'''
|
||||
@@ -1935,10 +2030,6 @@ class WorkflowJobTemplateAccess(BaseAccess):
|
||||
# Check the per-org limit
|
||||
self.check_org_host_limit({'inventory': obj.inventory})
|
||||
|
||||
# if surveys are added to WFJTs, check license here
|
||||
if obj.survey_enabled:
|
||||
self.check_license(feature='surveys')
|
||||
|
||||
# Super users can start any job
|
||||
if self.user.is_superuser:
|
||||
return True
|
||||
@@ -1946,11 +2037,6 @@ class WorkflowJobTemplateAccess(BaseAccess):
|
||||
return self.user in obj.execute_role
|
||||
|
||||
def can_change(self, obj, data):
|
||||
# Check survey license if surveys are added to WFJTs
|
||||
if (data and 'survey_enabled' in data and
|
||||
obj.survey_enabled != data['survey_enabled'] and data['survey_enabled']):
|
||||
self.check_license(feature='surveys')
|
||||
|
||||
if self.user.is_superuser:
|
||||
return True
|
||||
|
||||
@@ -2030,9 +2116,9 @@ class WorkflowJobAccess(BaseAccess):
|
||||
|
||||
# Check if access to prompts to prevent relaunch
|
||||
if config.prompts_dict():
|
||||
if obj.created_by_id != self.user.pk:
|
||||
if obj.created_by_id != self.user.pk and vars_are_encrypted(config.extra_data):
|
||||
if self.save_messages:
|
||||
self.messages['detail'] = _('Job was launched with prompts provided by another user.')
|
||||
self.messages['detail'] = _('Job was launched with secret prompts provided by another user.')
|
||||
return False
|
||||
if not JobLaunchConfigAccess(self.user).can_add({'reference_obj': config}):
|
||||
if self.save_messages:
|
||||
@@ -2178,7 +2264,7 @@ class JobEventAccess(BaseAccess):
|
||||
'''
|
||||
|
||||
model = JobEvent
|
||||
prefetch_related = ('hosts', 'children', 'job__job_template', 'host',)
|
||||
prefetch_related = ('hosts', 'job__job_template', 'host',)
|
||||
|
||||
def filtered_queryset(self):
|
||||
return self.model.objects.filter(
|
||||
@@ -2288,11 +2374,6 @@ class UnifiedJobTemplateAccess(BaseAccess):
|
||||
Q(inventorysource__inventory__id__in=Inventory._accessible_pk_qs(
|
||||
Inventory, self.user, 'read_role')))
|
||||
|
||||
def get_queryset(self):
|
||||
# TODO: remove after the depreciation of v1 API
|
||||
qs = super(UnifiedJobTemplateAccess, self).get_queryset()
|
||||
return qs.exclude(inventorysource__source="")
|
||||
|
||||
def can_start(self, obj, validate_license=True):
|
||||
access_class = access_registry[obj.__class__]
|
||||
access_instance = access_class(self.user)
|
||||
@@ -2397,6 +2478,7 @@ class NotificationTemplateAccess(BaseAccess):
|
||||
I can see/use a notification_template if I have permission to
|
||||
'''
|
||||
model = NotificationTemplate
|
||||
prefetch_related = ('created_by', 'modified_by', 'organization')
|
||||
|
||||
def filtered_queryset(self):
|
||||
return self.model.objects.filter(
|
||||
@@ -2583,6 +2665,7 @@ class ActivityStreamAccess(BaseAccess):
|
||||
class CustomInventoryScriptAccess(BaseAccess):
|
||||
|
||||
model = CustomInventoryScript
|
||||
prefetch_related = ('created_by', 'modified_by', 'organization')
|
||||
|
||||
def filtered_queryset(self):
|
||||
return self.model.accessible_objects(self.user, 'read_role').all()
|
||||
@@ -2616,6 +2699,17 @@ class RoleAccess(BaseAccess):
|
||||
'''
|
||||
|
||||
model = Role
|
||||
prefetch_related = ('content_type',)
|
||||
|
||||
def filtered_queryset(self):
|
||||
result = Role.visible_roles(self.user)
|
||||
# Sanity check: is the requesting user an orphaned non-admin/auditor?
|
||||
# if yes, make system admin/auditor mandatorily visible.
|
||||
if not self.user.is_superuser and not self.user.is_system_auditor and not self.user.organizations.exists():
|
||||
mandatories = ('system_administrator', 'system_auditor')
|
||||
super_qs = Role.objects.filter(singleton_name__in=mandatories)
|
||||
result = result | super_qs
|
||||
return result
|
||||
|
||||
def can_read(self, obj):
|
||||
if not obj:
|
||||
@@ -2635,10 +2729,6 @@ class RoleAccess(BaseAccess):
|
||||
|
||||
@check_superuser
|
||||
def can_unattach(self, obj, sub_obj, relationship, data=None, skip_sub_obj_read_check=False):
|
||||
if isinstance(obj.content_object, Team):
|
||||
if not settings.MANAGE_ORGANIZATION_AUTH and not self.user.is_superuser:
|
||||
return False
|
||||
|
||||
if not skip_sub_obj_read_check and relationship in ['members', 'member_role.parents', 'parents']:
|
||||
# If we are unattaching a team Role, check the Team read access
|
||||
if relationship == 'parents':
|
||||
@@ -2650,18 +2740,22 @@ class RoleAccess(BaseAccess):
|
||||
|
||||
# Being a user in the member_role or admin_role of an organization grants
|
||||
# administrators of that Organization the ability to edit that user. To prevent
|
||||
# unwanted escalations lets ensure that the Organization administartor has the abilty
|
||||
# unwanted escalations let's ensure that the Organization administrator has the ability
|
||||
# to admin the user being added to the role.
|
||||
if (isinstance(obj.content_object, Organization) and
|
||||
obj.role_field in (Organization.member_role.field.parent_role + ['member_role'])):
|
||||
if isinstance(obj.content_object, Organization) and obj.role_field in ['admin_role', 'member_role']:
|
||||
if not isinstance(sub_obj, User):
|
||||
logger.error('Unexpected attempt to associate {} with organization role.'.format(sub_obj))
|
||||
return False
|
||||
if not settings.MANAGE_ORGANIZATION_AUTH and not self.user.is_superuser:
|
||||
return False
|
||||
if not UserAccess(self.user).can_admin(sub_obj, None, allow_orphans=True):
|
||||
return False
|
||||
|
||||
if isinstance(obj.content_object, ResourceMixin) and \
|
||||
self.user in obj.content_object.admin_role:
|
||||
if isinstance(obj.content_object, Team) and obj.role_field in ['admin_role', 'member_role']:
|
||||
if not settings.MANAGE_ORGANIZATION_AUTH and not self.user.is_superuser:
|
||||
return False
|
||||
|
||||
if isinstance(obj.content_object, ResourceMixin) and self.user in obj.content_object.admin_role:
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
1
awx/main/analytics/__init__.py
Normal file
1
awx/main/analytics/__init__.py
Normal file
@@ -0,0 +1 @@
|
||||
from .core import register, gather, ship # noqa
|
||||
282
awx/main/analytics/collectors.py
Normal file
282
awx/main/analytics/collectors.py
Normal file
@@ -0,0 +1,282 @@
|
||||
import os
|
||||
import os.path
|
||||
import platform
|
||||
|
||||
from django.db import connection
|
||||
from django.db.models import Count
|
||||
from django.conf import settings
|
||||
from django.utils.timezone import now
|
||||
|
||||
from awx.conf.license import get_license
|
||||
from awx.main.utils import (get_awx_version, get_ansible_version,
|
||||
get_custom_venv_choices, camelcase_to_underscore)
|
||||
from awx.main import models
|
||||
from django.contrib.sessions.models import Session
|
||||
from awx.main.analytics import register
|
||||
|
||||
'''
|
||||
This module is used to define metrics collected by awx.main.analytics.gather()
|
||||
Each function is decorated with a key name, and should return a data
|
||||
structure that can be serialized to JSON
|
||||
|
||||
@register('something')
|
||||
def something(since):
|
||||
# the generated archive will contain a `something.json` w/ this JSON
|
||||
return {'some': 'json'}
|
||||
|
||||
All functions - when called - will be passed a datetime.datetime object,
|
||||
`since`, which represents the last time analytics were gathered (some metrics
|
||||
functions - like those that return metadata about playbook runs, may return
|
||||
data _since_ the last report date - i.e., new data in the last 24 hours)
|
||||
'''
|
||||
|
||||
|
||||
@register('config')
|
||||
def config(since):
|
||||
license_info = get_license(show_key=False)
|
||||
install_type = 'traditional'
|
||||
if os.environ.get('container') == 'oci':
|
||||
install_type = 'openshift'
|
||||
elif 'KUBERNETES_SERVICE_PORT' in os.environ:
|
||||
install_type = 'k8s'
|
||||
return {
|
||||
'platform': {
|
||||
'system': platform.system(),
|
||||
'dist': platform.dist(),
|
||||
'release': platform.release(),
|
||||
'type': install_type,
|
||||
},
|
||||
'install_uuid': settings.INSTALL_UUID,
|
||||
'instance_uuid': settings.SYSTEM_UUID,
|
||||
'tower_url_base': settings.TOWER_URL_BASE,
|
||||
'tower_version': get_awx_version(),
|
||||
'ansible_version': get_ansible_version(),
|
||||
'license_type': license_info.get('license_type', 'UNLICENSED'),
|
||||
'free_instances': license_info.get('free instances', 0),
|
||||
'license_expiry': license_info.get('time_remaining', 0),
|
||||
'pendo_tracking': settings.PENDO_TRACKING_STATE,
|
||||
'authentication_backends': settings.AUTHENTICATION_BACKENDS,
|
||||
'logging_aggregators': settings.LOG_AGGREGATOR_LOGGERS,
|
||||
'external_logger_enabled': settings.LOG_AGGREGATOR_ENABLED,
|
||||
'external_logger_type': getattr(settings, 'LOG_AGGREGATOR_TYPE', None),
|
||||
}
|
||||
|
||||
|
||||
@register('counts')
|
||||
def counts(since):
|
||||
counts = {}
|
||||
for cls in (models.Organization, models.Team, models.User,
|
||||
models.Inventory, models.Credential, models.Project,
|
||||
models.JobTemplate, models.WorkflowJobTemplate,
|
||||
models.Host, models.Schedule, models.CustomInventoryScript,
|
||||
models.NotificationTemplate):
|
||||
counts[camelcase_to_underscore(cls.__name__)] = cls.objects.count()
|
||||
|
||||
venvs = get_custom_venv_choices()
|
||||
counts['custom_virtualenvs'] = len([
|
||||
v for v in venvs
|
||||
if os.path.basename(v.rstrip('/')) != 'ansible'
|
||||
])
|
||||
|
||||
inv_counts = dict(models.Inventory.objects.order_by().values_list('kind').annotate(Count('kind')))
|
||||
inv_counts['normal'] = inv_counts.get('', 0)
|
||||
inv_counts.pop('', None)
|
||||
inv_counts['smart'] = inv_counts.get('smart', 0)
|
||||
counts['inventories'] = inv_counts
|
||||
|
||||
counts['unified_job'] = models.UnifiedJob.objects.exclude(launch_type='sync').count() # excludes implicit project_updates
|
||||
counts['active_host_count'] = models.Host.objects.active_count()
|
||||
active_sessions = Session.objects.filter(expire_date__gte=now()).count()
|
||||
active_user_sessions = models.UserSessionMembership.objects.select_related('session').filter(session__expire_date__gte=now()).count()
|
||||
active_anonymous_sessions = active_sessions - active_user_sessions
|
||||
counts['active_sessions'] = active_sessions
|
||||
counts['active_user_sessions'] = active_user_sessions
|
||||
counts['active_anonymous_sessions'] = active_anonymous_sessions
|
||||
counts['running_jobs'] = models.UnifiedJob.objects.exclude(launch_type='sync').filter(status__in=('running', 'waiting',)).count()
|
||||
return counts
|
||||
|
||||
|
||||
@register('org_counts')
|
||||
def org_counts(since):
|
||||
counts = {}
|
||||
for org in models.Organization.objects.annotate(num_users=Count('member_role__members', distinct=True),
|
||||
num_teams=Count('teams', distinct=True)).values('name', 'id', 'num_users', 'num_teams'):
|
||||
counts[org['id']] = {'name': org['name'],
|
||||
'users': org['num_users'],
|
||||
'teams': org['num_teams']
|
||||
}
|
||||
return counts
|
||||
|
||||
|
||||
@register('cred_type_counts')
|
||||
def cred_type_counts(since):
|
||||
counts = {}
|
||||
for cred_type in models.CredentialType.objects.annotate(num_credentials=Count(
|
||||
'credentials', distinct=True)).values('name', 'id', 'managed_by_tower', 'num_credentials'):
|
||||
counts[cred_type['id']] = {'name': cred_type['name'],
|
||||
'credential_count': cred_type['num_credentials'],
|
||||
'managed_by_tower': cred_type['managed_by_tower']
|
||||
}
|
||||
return counts
|
||||
|
||||
|
||||
@register('inventory_counts')
|
||||
def inventory_counts(since):
|
||||
counts = {}
|
||||
for inv in models.Inventory.objects.filter(kind='').annotate(num_sources=Count('inventory_sources', distinct=True),
|
||||
num_hosts=Count('hosts', distinct=True)).only('id', 'name', 'kind'):
|
||||
counts[inv.id] = {'name': inv.name,
|
||||
'kind': inv.kind,
|
||||
'hosts': inv.num_hosts,
|
||||
'sources': inv.num_sources
|
||||
}
|
||||
|
||||
for smart_inv in models.Inventory.objects.filter(kind='smart'):
|
||||
counts[smart_inv.id] = {'name': smart_inv.name,
|
||||
'kind': smart_inv.kind,
|
||||
'num_hosts': smart_inv.hosts.count(),
|
||||
'num_sources': smart_inv.inventory_sources.count()
|
||||
}
|
||||
return counts
|
||||
|
||||
|
||||
@register('projects_by_scm_type')
|
||||
def projects_by_scm_type(since):
|
||||
counts = dict(
|
||||
(t[0] or 'manual', 0)
|
||||
for t in models.Project.SCM_TYPE_CHOICES
|
||||
)
|
||||
for result in models.Project.objects.values('scm_type').annotate(
|
||||
count=Count('scm_type')
|
||||
).order_by('scm_type'):
|
||||
counts[result['scm_type'] or 'manual'] = result['count']
|
||||
return counts
|
||||
|
||||
|
||||
def _get_isolated_datetime(last_check):
|
||||
if last_check:
|
||||
return last_check.isoformat()
|
||||
return last_check
|
||||
|
||||
|
||||
@register('instance_info')
|
||||
def instance_info(since):
|
||||
info = {}
|
||||
instances = models.Instance.objects.values_list('hostname').values(
|
||||
'uuid', 'version', 'capacity', 'cpu', 'memory', 'managed_by_policy', 'hostname', 'last_isolated_check', 'enabled')
|
||||
for instance in instances:
|
||||
instance_info = {
|
||||
'uuid': instance['uuid'],
|
||||
'version': instance['version'],
|
||||
'capacity': instance['capacity'],
|
||||
'cpu': instance['cpu'],
|
||||
'memory': instance['memory'],
|
||||
'managed_by_policy': instance['managed_by_policy'],
|
||||
'last_isolated_check': _get_isolated_datetime(instance['last_isolated_check']),
|
||||
'enabled': instance['enabled']
|
||||
}
|
||||
info[instance['uuid']] = instance_info
|
||||
return info
|
||||
|
||||
|
||||
@register('job_counts')
|
||||
def job_counts(since):
|
||||
counts = {}
|
||||
counts['total_jobs'] = models.UnifiedJob.objects.exclude(launch_type='sync').count()
|
||||
counts['status'] = dict(models.UnifiedJob.objects.exclude(launch_type='sync').values_list('status').annotate(Count('status')).order_by())
|
||||
counts['launch_type'] = dict(models.UnifiedJob.objects.exclude(launch_type='sync').values_list(
|
||||
'launch_type').annotate(Count('launch_type')).order_by())
|
||||
return counts
|
||||
|
||||
|
||||
@register('job_instance_counts')
|
||||
def job_instance_counts(since):
|
||||
counts = {}
|
||||
job_types = models.UnifiedJob.objects.exclude(launch_type='sync').values_list(
|
||||
'execution_node', 'launch_type').annotate(job_launch_type=Count('launch_type')).order_by()
|
||||
for job in job_types:
|
||||
counts.setdefault(job[0], {}).setdefault('launch_type', {})[job[1]] = job[2]
|
||||
|
||||
job_statuses = models.UnifiedJob.objects.exclude(launch_type='sync').values_list(
|
||||
'execution_node', 'status').annotate(job_status=Count('status')).order_by()
|
||||
for job in job_statuses:
|
||||
counts.setdefault(job[0], {}).setdefault('status', {})[job[1]] = job[2]
|
||||
return counts
|
||||
|
||||
|
||||
# Copies Job Events from db to a .csv to be shipped
|
||||
def copy_tables(since, full_path):
|
||||
def _copy_table(table, query, path):
|
||||
file_path = os.path.join(path, table + '_table.csv')
|
||||
file = open(file_path, 'w', encoding='utf-8')
|
||||
with connection.cursor() as cursor:
|
||||
cursor.copy_expert(query, file)
|
||||
file.close()
|
||||
return file_path
|
||||
|
||||
events_query = '''COPY (SELECT main_jobevent.id,
|
||||
main_jobevent.created,
|
||||
main_jobevent.uuid,
|
||||
main_jobevent.parent_uuid,
|
||||
main_jobevent.event,
|
||||
main_jobevent.event_data::json->'task_action' AS task_action,
|
||||
main_jobevent.failed,
|
||||
main_jobevent.changed,
|
||||
main_jobevent.playbook,
|
||||
main_jobevent.play,
|
||||
main_jobevent.task,
|
||||
main_jobevent.role,
|
||||
main_jobevent.job_id,
|
||||
main_jobevent.host_id,
|
||||
main_jobevent.host_name
|
||||
FROM main_jobevent
|
||||
WHERE main_jobevent.created > {}
|
||||
ORDER BY main_jobevent.id ASC) TO STDOUT WITH CSV HEADER'''.format(since.strftime("'%Y-%m-%d %H:%M:%S'"))
|
||||
_copy_table(table='events', query=events_query, path=full_path)
|
||||
|
||||
unified_job_query = '''COPY (SELECT main_unifiedjob.id,
|
||||
main_unifiedjob.polymorphic_ctype_id,
|
||||
django_content_type.model,
|
||||
main_unifiedjob.created,
|
||||
main_unifiedjob.name,
|
||||
main_unifiedjob.unified_job_template_id,
|
||||
main_unifiedjob.launch_type,
|
||||
main_unifiedjob.schedule_id,
|
||||
main_unifiedjob.execution_node,
|
||||
main_unifiedjob.controller_node,
|
||||
main_unifiedjob.cancel_flag,
|
||||
main_unifiedjob.status,
|
||||
main_unifiedjob.failed,
|
||||
main_unifiedjob.started,
|
||||
main_unifiedjob.finished,
|
||||
main_unifiedjob.elapsed,
|
||||
main_unifiedjob.job_explanation,
|
||||
main_unifiedjob.instance_group_id
|
||||
FROM main_unifiedjob, django_content_type
|
||||
WHERE main_unifiedjob.created > {} AND
|
||||
main_unifiedjob.polymorphic_ctype_id = django_content_type.id AND
|
||||
main_unifiedjob.launch_type != 'sync'
|
||||
ORDER BY main_unifiedjob.id ASC) TO STDOUT WITH CSV HEADER'''.format(since.strftime("'%Y-%m-%d %H:%M:%S'"))
|
||||
_copy_table(table='unified_jobs', query=unified_job_query, path=full_path)
|
||||
|
||||
unified_job_template_query = '''COPY (SELECT main_unifiedjobtemplate.id,
|
||||
main_unifiedjobtemplate.polymorphic_ctype_id,
|
||||
django_content_type.model,
|
||||
main_unifiedjobtemplate.created,
|
||||
main_unifiedjobtemplate.modified,
|
||||
main_unifiedjobtemplate.created_by_id,
|
||||
main_unifiedjobtemplate.modified_by_id,
|
||||
main_unifiedjobtemplate.name,
|
||||
main_unifiedjobtemplate.current_job_id,
|
||||
main_unifiedjobtemplate.last_job_id,
|
||||
main_unifiedjobtemplate.last_job_failed,
|
||||
main_unifiedjobtemplate.last_job_run,
|
||||
main_unifiedjobtemplate.next_job_run,
|
||||
main_unifiedjobtemplate.next_schedule_id,
|
||||
main_unifiedjobtemplate.status
|
||||
FROM main_unifiedjobtemplate, django_content_type
|
||||
WHERE main_unifiedjobtemplate.polymorphic_ctype_id = django_content_type.id
|
||||
ORDER BY main_unifiedjobtemplate.id ASC) TO STDOUT WITH CSV HEADER'''.format(since.strftime("'%Y-%m-%d %H:%M:%S'"))
|
||||
_copy_table(table='unified_job_template', query=unified_job_template_query, path=full_path)
|
||||
return
|
||||
|
||||
146
awx/main/analytics/core.py
Normal file
146
awx/main/analytics/core.py
Normal file
@@ -0,0 +1,146 @@
|
||||
import inspect
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
import os.path
|
||||
import tempfile
|
||||
import shutil
|
||||
import subprocess
|
||||
|
||||
from django.conf import settings
|
||||
from django.utils.encoding import smart_str
|
||||
from django.utils.timezone import now, timedelta
|
||||
from rest_framework.exceptions import PermissionDenied
|
||||
|
||||
from awx.conf.license import get_license
|
||||
from awx.main.models import Job
|
||||
from awx.main.access import access_registry
|
||||
from awx.main.models.ha import TowerAnalyticsState
|
||||
|
||||
|
||||
__all__ = ['register', 'gather', 'ship']
|
||||
|
||||
|
||||
logger = logging.getLogger('awx.main.analytics')
|
||||
|
||||
|
||||
def _valid_license():
|
||||
try:
|
||||
if get_license(show_key=False).get('license_type', 'UNLICENSED') == 'open':
|
||||
return False
|
||||
access_registry[Job](None).check_license()
|
||||
except PermissionDenied:
|
||||
logger.exception("A valid license was not found:")
|
||||
return False
|
||||
return True
|
||||
|
||||
|
||||
def register(key):
|
||||
"""
|
||||
A decorator used to register a function as a metric collector.
|
||||
|
||||
Decorated functions should return JSON-serializable objects.
|
||||
|
||||
@register('projects_by_scm_type')
|
||||
def projects_by_scm_type():
|
||||
return {'git': 5, 'svn': 1, 'hg': 0}
|
||||
"""
|
||||
|
||||
def decorate(f):
|
||||
f.__awx_analytics_key__ = key
|
||||
return f
|
||||
|
||||
return decorate
|
||||
|
||||
|
||||
def gather(dest=None, module=None):
|
||||
"""
|
||||
Gather all defined metrics and write them as JSON files in a .tgz
|
||||
|
||||
:param dest: the (optional) absolute path to write a compressed tarball
|
||||
:pararm module: the module to search for registered analytic collector
|
||||
functions; defaults to awx.main.analytics.collectors
|
||||
"""
|
||||
|
||||
run_now = now()
|
||||
state = TowerAnalyticsState.get_solo()
|
||||
last_run = state.last_run
|
||||
logger.debug("Last analytics run was: {}".format(last_run))
|
||||
|
||||
max_interval = now() - timedelta(days=7)
|
||||
if last_run < max_interval or not last_run:
|
||||
last_run = max_interval
|
||||
|
||||
|
||||
if _valid_license() is False:
|
||||
logger.exception("Invalid License provided, or No License Provided")
|
||||
return "Error: Invalid License provided, or No License Provided"
|
||||
|
||||
if not settings.INSIGHTS_TRACKING_STATE:
|
||||
logger.error("Insights analytics not enabled")
|
||||
return
|
||||
|
||||
if module is None:
|
||||
from awx.main.analytics import collectors
|
||||
module = collectors
|
||||
|
||||
dest = dest or tempfile.mkdtemp(prefix='awx_analytics')
|
||||
for name, func in inspect.getmembers(module):
|
||||
if inspect.isfunction(func) and hasattr(func, '__awx_analytics_key__'):
|
||||
key = func.__awx_analytics_key__
|
||||
path = '{}.json'.format(os.path.join(dest, key))
|
||||
with open(path, 'w', encoding='utf-8') as f:
|
||||
try:
|
||||
json.dump(func(last_run), f)
|
||||
except Exception:
|
||||
logger.exception("Could not generate metric {}.json".format(key))
|
||||
f.close()
|
||||
os.remove(f.name)
|
||||
try:
|
||||
collectors.copy_tables(since=last_run, full_path=dest)
|
||||
except Exception:
|
||||
logger.exception("Could not copy tables")
|
||||
|
||||
# can't use isoformat() since it has colons, which GNU tar doesn't like
|
||||
tarname = '_'.join([
|
||||
settings.SYSTEM_UUID,
|
||||
run_now.strftime('%Y-%m-%d-%H%M%S%z')
|
||||
])
|
||||
tgz = shutil.make_archive(
|
||||
os.path.join(os.path.dirname(dest), tarname),
|
||||
'gztar',
|
||||
dest
|
||||
)
|
||||
shutil.rmtree(dest)
|
||||
return tgz
|
||||
|
||||
|
||||
def ship(path):
|
||||
"""
|
||||
Ship gathered metrics via the Insights agent
|
||||
"""
|
||||
try:
|
||||
agent = 'insights-client'
|
||||
if shutil.which(agent) is None:
|
||||
logger.error('could not find {} on PATH'.format(agent))
|
||||
return
|
||||
logger.debug('shipping analytics file: {}'.format(path))
|
||||
try:
|
||||
cmd = [
|
||||
agent, '--payload', path, '--content-type', settings.INSIGHTS_AGENT_MIME
|
||||
]
|
||||
output = smart_str(subprocess.check_output(cmd, timeout=60 * 5))
|
||||
logger.debug(output)
|
||||
# reset the `last_run` when data is shipped
|
||||
run_now = now()
|
||||
state = TowerAnalyticsState.get_solo()
|
||||
state.last_run = run_now
|
||||
state.save()
|
||||
|
||||
except subprocess.CalledProcessError:
|
||||
logger.exception('{} failure:'.format(cmd))
|
||||
except subprocess.TimeoutExpired:
|
||||
logger.exception('{} timeout:'.format(cmd))
|
||||
finally:
|
||||
# cleanup tar.gz
|
||||
os.remove(path)
|
||||
121
awx/main/analytics/metrics.py
Normal file
121
awx/main/analytics/metrics.py
Normal file
@@ -0,0 +1,121 @@
|
||||
from django.conf import settings
|
||||
from prometheus_client import (
|
||||
REGISTRY,
|
||||
PROCESS_COLLECTOR,
|
||||
PLATFORM_COLLECTOR,
|
||||
GC_COLLECTOR,
|
||||
Gauge,
|
||||
Info,
|
||||
generate_latest
|
||||
)
|
||||
|
||||
from awx.conf.license import get_license
|
||||
from awx.main.utils import (get_awx_version, get_ansible_version)
|
||||
from awx.main.analytics.collectors import (
|
||||
counts,
|
||||
instance_info,
|
||||
job_instance_counts,
|
||||
)
|
||||
|
||||
|
||||
REGISTRY.unregister(PROCESS_COLLECTOR)
|
||||
REGISTRY.unregister(PLATFORM_COLLECTOR)
|
||||
REGISTRY.unregister(GC_COLLECTOR)
|
||||
|
||||
SYSTEM_INFO = Info('awx_system', 'AWX System Information')
|
||||
ORG_COUNT = Gauge('awx_organizations_total', 'Number of organizations')
|
||||
USER_COUNT = Gauge('awx_users_total', 'Number of users')
|
||||
TEAM_COUNT = Gauge('awx_teams_total', 'Number of teams')
|
||||
INV_COUNT = Gauge('awx_inventories_total', 'Number of inventories')
|
||||
PROJ_COUNT = Gauge('awx_projects_total', 'Number of projects')
|
||||
JT_COUNT = Gauge('awx_job_templates_total', 'Number of job templates')
|
||||
WFJT_COUNT = Gauge('awx_workflow_job_templates_total', 'Number of workflow job templates')
|
||||
HOST_COUNT = Gauge('awx_hosts_total', 'Number of hosts', ['type',])
|
||||
SCHEDULE_COUNT = Gauge('awx_schedules_total', 'Number of schedules')
|
||||
INV_SCRIPT_COUNT = Gauge('awx_inventory_scripts_total', 'Number of invetory scripts')
|
||||
USER_SESSIONS = Gauge('awx_sessions_total', 'Number of sessions', ['type',])
|
||||
CUSTOM_VENVS = Gauge('awx_custom_virtualenvs_total', 'Number of virtualenvs')
|
||||
RUNNING_JOBS = Gauge('awx_running_jobs_total', 'Number of running jobs on the Tower system')
|
||||
|
||||
INSTANCE_CAPACITY = Gauge('awx_instance_capacity', 'Capacity of each node in a Tower system', ['instance_uuid',])
|
||||
INSTANCE_CPU = Gauge('awx_instance_cpu', 'CPU cores on each node in a Tower system', ['instance_uuid',])
|
||||
INSTANCE_MEMORY = Gauge('awx_instance_memory', 'RAM (Kb) on each node in a Tower system', ['instance_uuid',])
|
||||
INSTANCE_INFO = Info('awx_instance', 'Info about each node in a Tower system', ['instance_uuid',])
|
||||
INSTANCE_LAUNCH_TYPE = Gauge('awx_instance_launch_type_total', 'Type of Job launched', ['node', 'launch_type',])
|
||||
INSTANCE_STATUS = Gauge('awx_instance_status_total', 'Status of Job launched', ['node', 'status',])
|
||||
|
||||
LICENSE_INSTANCE_TOTAL = Gauge('awx_license_instance_total', 'Total number of managed hosts provided by your license')
|
||||
LICENSE_INSTANCE_FREE = Gauge('awx_license_instance_free', 'Number of remaining managed hosts provided by your license')
|
||||
|
||||
|
||||
def metrics():
|
||||
license_info = get_license(show_key=False)
|
||||
SYSTEM_INFO.info({
|
||||
'install_uuid': settings.INSTALL_UUID,
|
||||
'insights_analytics': str(settings.INSIGHTS_TRACKING_STATE),
|
||||
'tower_url_base': settings.TOWER_URL_BASE,
|
||||
'tower_version': get_awx_version(),
|
||||
'ansible_version': get_ansible_version(),
|
||||
'license_type': license_info.get('license_type', 'UNLICENSED'),
|
||||
'license_expiry': str(license_info.get('time_remaining', 0)),
|
||||
'pendo_tracking': settings.PENDO_TRACKING_STATE,
|
||||
'external_logger_enabled': str(settings.LOG_AGGREGATOR_ENABLED),
|
||||
'external_logger_type': getattr(settings, 'LOG_AGGREGATOR_TYPE', 'None')
|
||||
})
|
||||
|
||||
LICENSE_INSTANCE_TOTAL.set(str(license_info.get('available_instances', 0)))
|
||||
LICENSE_INSTANCE_FREE.set(str(license_info.get('free_instances', 0)))
|
||||
|
||||
current_counts = counts(None)
|
||||
|
||||
ORG_COUNT.set(current_counts['organization'])
|
||||
USER_COUNT.set(current_counts['user'])
|
||||
TEAM_COUNT.set(current_counts['team'])
|
||||
INV_COUNT.set(current_counts['inventory'])
|
||||
PROJ_COUNT.set(current_counts['project'])
|
||||
JT_COUNT.set(current_counts['job_template'])
|
||||
WFJT_COUNT.set(current_counts['workflow_job_template'])
|
||||
|
||||
HOST_COUNT.labels(type='all').set(current_counts['host'])
|
||||
HOST_COUNT.labels(type='active').set(current_counts['active_host_count'])
|
||||
|
||||
SCHEDULE_COUNT.set(current_counts['schedule'])
|
||||
INV_SCRIPT_COUNT.set(current_counts['custom_inventory_script'])
|
||||
CUSTOM_VENVS.set(current_counts['custom_virtualenvs'])
|
||||
|
||||
USER_SESSIONS.labels(type='all').set(current_counts['active_sessions'])
|
||||
USER_SESSIONS.labels(type='user').set(current_counts['active_user_sessions'])
|
||||
USER_SESSIONS.labels(type='anonymous').set(current_counts['active_anonymous_sessions'])
|
||||
|
||||
RUNNING_JOBS.set(current_counts['running_jobs'])
|
||||
|
||||
|
||||
instance_data = instance_info(None)
|
||||
for uuid in instance_data:
|
||||
INSTANCE_CAPACITY.labels(instance_uuid=uuid).set(instance_data[uuid]['capacity'])
|
||||
INSTANCE_CPU.labels(instance_uuid=uuid).set(instance_data[uuid]['cpu'])
|
||||
INSTANCE_MEMORY.labels(instance_uuid=uuid).set(instance_data[uuid]['memory'])
|
||||
INSTANCE_INFO.labels(instance_uuid=uuid).info({
|
||||
'enabled': str(instance_data[uuid]['enabled']),
|
||||
'last_isolated_check': getattr(instance_data[uuid], 'last_isolated_check', 'None'),
|
||||
'managed_by_policy': str(instance_data[uuid]['managed_by_policy']),
|
||||
'version': instance_data[uuid]['version']
|
||||
})
|
||||
|
||||
instance_data = job_instance_counts(None)
|
||||
for node in instance_data:
|
||||
# skipping internal execution node (for system jobs)
|
||||
if node == '':
|
||||
continue
|
||||
types = instance_data[node].get('launch_type', {})
|
||||
for launch_type, value in types.items():
|
||||
INSTANCE_LAUNCH_TYPE.labels(node=node, launch_type=launch_type).set(value)
|
||||
statuses = instance_data[node].get('status', {})
|
||||
for status, value in statuses.items():
|
||||
INSTANCE_STATUS.labels(node=node, status=status).set(value)
|
||||
|
||||
|
||||
return generate_latest()
|
||||
|
||||
|
||||
__all__ = ['metrics']
|
||||
@@ -21,7 +21,6 @@ register(
|
||||
help_text=_('Enable capturing activity for the activity stream.'),
|
||||
category=_('System'),
|
||||
category_slug='system',
|
||||
feature_required='activity_streams',
|
||||
)
|
||||
|
||||
register(
|
||||
@@ -31,7 +30,6 @@ register(
|
||||
help_text=_('Enable capturing activity for the activity stream when running inventory sync.'),
|
||||
category=_('System'),
|
||||
category_slug='system',
|
||||
feature_required='activity_streams',
|
||||
)
|
||||
|
||||
register(
|
||||
@@ -120,12 +118,21 @@ register(
|
||||
default=_load_default_license_from_file,
|
||||
label=_('License'),
|
||||
help_text=_('The license controls which features and functionality are '
|
||||
'enabled. Use /api/v1/config/ to update or change '
|
||||
'enabled. Use /api/v2/config/ to update or change '
|
||||
'the license.'),
|
||||
category=_('System'),
|
||||
category_slug='system',
|
||||
)
|
||||
|
||||
register(
|
||||
'INSTALL_UUID',
|
||||
field_class=fields.CharField,
|
||||
label=_('Unique identifier for an AWX/Tower installation'),
|
||||
category=_('System'),
|
||||
category_slug='system',
|
||||
read_only=True,
|
||||
)
|
||||
|
||||
register(
|
||||
'CUSTOM_VENV_PATHS',
|
||||
field_class=fields.StringListPathField,
|
||||
@@ -301,6 +308,16 @@ register(
|
||||
placeholder={'HTTP_PROXY': 'myproxy.local:8080'},
|
||||
)
|
||||
|
||||
register(
|
||||
'INSIGHTS_TRACKING_STATE',
|
||||
field_class=fields.BooleanField,
|
||||
default=False,
|
||||
label=_('Gather data for Automation Insights'),
|
||||
help_text=_('Enables Tower to gather data on automation and send it to Red Hat Insights.'),
|
||||
category=_('System'),
|
||||
category_slug='system',
|
||||
)
|
||||
|
||||
register(
|
||||
'AWX_ROLES_ENABLED',
|
||||
field_class=fields.BooleanField,
|
||||
@@ -553,6 +570,16 @@ register(
|
||||
)
|
||||
|
||||
|
||||
register(
|
||||
'BROKER_DURABILITY',
|
||||
field_class=fields.BooleanField,
|
||||
label=_('Message Durability'),
|
||||
help_text=_('When set (the default), underlying queues will be persisted to disk. Disable this to enable higher message bus throughput.'),
|
||||
category=_('System'),
|
||||
category_slug='system',
|
||||
)
|
||||
|
||||
|
||||
def logging_validate(serializer, attrs):
|
||||
if not serializer.instance or \
|
||||
not hasattr(serializer.instance, 'LOG_AGGREGATOR_HOST') or \
|
||||
|
||||
@@ -37,6 +37,17 @@ ENV_BLACKLIST = frozenset((
|
||||
'INVENTORY_ID', 'INVENTORY_SOURCE_ID', 'INVENTORY_UPDATE_ID',
|
||||
'AD_HOC_COMMAND_ID', 'REST_API_URL', 'REST_API_TOKEN', 'MAX_EVENT_RES',
|
||||
'CALLBACK_QUEUE', 'CALLBACK_CONNECTION', 'CACHE',
|
||||
'JOB_CALLBACK_DEBUG', 'INVENTORY_HOSTVARS', 'FACT_QUEUE',
|
||||
'JOB_CALLBACK_DEBUG', 'INVENTORY_HOSTVARS',
|
||||
'AWX_HOST', 'PROJECT_REVISION'
|
||||
))
|
||||
|
||||
# loggers that may be called in process of emitting a log
|
||||
LOGGER_BLACKLIST = (
|
||||
'awx.main.utils.handlers',
|
||||
'awx.main.utils.formatters',
|
||||
'awx.main.utils.filters',
|
||||
'awx.main.utils.encryption',
|
||||
'awx.main.utils.log',
|
||||
# loggers that may be called getting logging settings
|
||||
'awx.conf'
|
||||
)
|
||||
|
||||
125
awx/main/credential_plugins/aim.py
Normal file
125
awx/main/credential_plugins/aim.py
Normal file
@@ -0,0 +1,125 @@
|
||||
from .plugin import CredentialPlugin
|
||||
|
||||
import os
|
||||
import stat
|
||||
import tempfile
|
||||
import threading
|
||||
from urllib.parse import quote, urlencode, urljoin
|
||||
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
import requests
|
||||
|
||||
|
||||
aim_inputs = {
|
||||
'fields': [{
|
||||
'id': 'url',
|
||||
'label': _('CyberArk AIM URL'),
|
||||
'type': 'string',
|
||||
'format': 'url',
|
||||
}, {
|
||||
'id': 'app_id',
|
||||
'label': _('Application ID'),
|
||||
'type': 'string',
|
||||
'secret': True,
|
||||
}, {
|
||||
'id': 'client_key',
|
||||
'label': _('Client Key'),
|
||||
'type': 'string',
|
||||
'secret': True,
|
||||
'multiline': True,
|
||||
}, {
|
||||
'id': 'client_cert',
|
||||
'label': _('Client Certificate'),
|
||||
'type': 'string',
|
||||
'secret': True,
|
||||
'multiline': True,
|
||||
}, {
|
||||
'id': 'verify',
|
||||
'label': _('Verify SSL Certificates'),
|
||||
'type': 'boolean',
|
||||
'default': True,
|
||||
}],
|
||||
'metadata': [{
|
||||
'id': 'object_query',
|
||||
'label': _('Object Query'),
|
||||
'type': 'string',
|
||||
'help_text': _('Lookup query for the object. Ex: "Safe=TestSafe;Object=testAccountName123"'),
|
||||
}, {
|
||||
'id': 'object_query_format',
|
||||
'label': _('Object Query Format'),
|
||||
'type': 'string',
|
||||
'default': 'Exact',
|
||||
'choices': ['Exact', 'Regexp']
|
||||
}, {
|
||||
'id': 'reason',
|
||||
'label': _('Reason'),
|
||||
'type': 'string',
|
||||
'help_text': _('Object request reason. This is only needed if it is required by the object\'s policy.')
|
||||
}],
|
||||
'required': ['url', 'app_id', 'object_query'],
|
||||
}
|
||||
|
||||
|
||||
def create_temporary_fifo(data):
|
||||
"""Open fifo named pipe in a new thread using a temporary file path. The
|
||||
thread blocks until data is read from the pipe.
|
||||
|
||||
Returns the path to the fifo.
|
||||
|
||||
:param data(bytes): Data to write to the pipe.
|
||||
"""
|
||||
path = os.path.join(tempfile.mkdtemp(), next(tempfile._get_candidate_names()))
|
||||
os.mkfifo(path, stat.S_IRUSR | stat.S_IWUSR)
|
||||
|
||||
threading.Thread(
|
||||
target=lambda p, d: open(p, 'wb').write(d),
|
||||
args=(path, data)
|
||||
).start()
|
||||
return path
|
||||
|
||||
|
||||
def aim_backend(**kwargs):
|
||||
url = kwargs['url']
|
||||
client_cert = kwargs.get('client_cert', None)
|
||||
client_key = kwargs.get('client_key', None)
|
||||
verify = kwargs['verify']
|
||||
app_id = kwargs['app_id']
|
||||
object_query = kwargs['object_query']
|
||||
object_query_format = kwargs['object_query_format']
|
||||
reason = kwargs.get('reason', None)
|
||||
|
||||
query_params = {
|
||||
'AppId': app_id,
|
||||
'Query': object_query,
|
||||
'QueryFormat': object_query_format,
|
||||
}
|
||||
if reason:
|
||||
query_params['reason'] = reason
|
||||
|
||||
request_qs = '?' + urlencode(query_params, quote_via=quote)
|
||||
request_url = urljoin(url, '/'.join(['AIMWebService', 'api', 'Accounts']))
|
||||
|
||||
cert = None
|
||||
if client_cert and client_key:
|
||||
cert = (
|
||||
create_temporary_fifo(client_cert.encode()),
|
||||
create_temporary_fifo(client_key.encode())
|
||||
)
|
||||
elif client_cert:
|
||||
cert = create_temporary_fifo(client_cert.encode())
|
||||
|
||||
res = requests.get(
|
||||
request_url + request_qs,
|
||||
timeout=30,
|
||||
cert=cert,
|
||||
verify=verify,
|
||||
)
|
||||
res.raise_for_status()
|
||||
return res.json()['Content']
|
||||
|
||||
|
||||
aim_plugin = CredentialPlugin(
|
||||
'CyberArk AIM Secret Lookup',
|
||||
inputs=aim_inputs,
|
||||
backend=aim_backend
|
||||
)
|
||||
65
awx/main/credential_plugins/azure_kv.py
Normal file
65
awx/main/credential_plugins/azure_kv.py
Normal file
@@ -0,0 +1,65 @@
|
||||
from .plugin import CredentialPlugin
|
||||
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
from azure.keyvault import KeyVaultClient, KeyVaultAuthentication
|
||||
from azure.common.credentials import ServicePrincipalCredentials
|
||||
|
||||
|
||||
azure_keyvault_inputs = {
|
||||
'fields': [{
|
||||
'id': 'url',
|
||||
'label': _('Vault URL (DNS Name)'),
|
||||
'type': 'string',
|
||||
'format': 'url',
|
||||
}, {
|
||||
'id': 'client',
|
||||
'label': _('Client ID'),
|
||||
'type': 'string'
|
||||
}, {
|
||||
'id': 'secret',
|
||||
'label': _('Client Secret'),
|
||||
'type': 'string',
|
||||
'secret': True,
|
||||
}, {
|
||||
'id': 'tenant',
|
||||
'label': _('Tenant ID'),
|
||||
'type': 'string'
|
||||
}],
|
||||
'metadata': [{
|
||||
'id': 'secret_field',
|
||||
'label': _('Secret Name'),
|
||||
'type': 'string',
|
||||
'help_text': _('The name of the secret to look up.'),
|
||||
}, {
|
||||
'id': 'secret_version',
|
||||
'label': _('Secret Version'),
|
||||
'type': 'string',
|
||||
'help_text': _('Used to specify a specific secret version (if left empty, the latest version will be used).'),
|
||||
}],
|
||||
'required': ['url', 'client', 'secret', 'tenant', 'secret_field'],
|
||||
}
|
||||
|
||||
|
||||
def azure_keyvault_backend(**kwargs):
|
||||
url = kwargs['url']
|
||||
|
||||
def auth_callback(server, resource, scope):
|
||||
credentials = ServicePrincipalCredentials(
|
||||
url = url,
|
||||
client_id = kwargs['client'],
|
||||
secret = kwargs['secret'],
|
||||
tenant = kwargs['tenant'],
|
||||
resource = "https://vault.azure.net",
|
||||
)
|
||||
token = credentials.token
|
||||
return token['token_type'], token['access_token']
|
||||
|
||||
kv = KeyVaultClient(KeyVaultAuthentication(auth_callback))
|
||||
return kv.get_secret(url, kwargs['secret_field'], kwargs.get('secret_version', '')).value
|
||||
|
||||
|
||||
azure_keyvault_plugin = CredentialPlugin(
|
||||
'Microsoft Azure Key Vault',
|
||||
inputs=azure_keyvault_inputs,
|
||||
backend=azure_keyvault_backend
|
||||
)
|
||||
121
awx/main/credential_plugins/conjur.py
Normal file
121
awx/main/credential_plugins/conjur.py
Normal file
@@ -0,0 +1,121 @@
|
||||
from .plugin import CredentialPlugin
|
||||
|
||||
import base64
|
||||
import os
|
||||
import stat
|
||||
import tempfile
|
||||
import threading
|
||||
from urllib.parse import urljoin, quote_plus
|
||||
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
import requests
|
||||
|
||||
|
||||
conjur_inputs = {
|
||||
'fields': [{
|
||||
'id': 'url',
|
||||
'label': _('Conjur URL'),
|
||||
'type': 'string',
|
||||
'format': 'url',
|
||||
}, {
|
||||
'id': 'api_key',
|
||||
'label': _('API Key'),
|
||||
'type': 'string',
|
||||
'secret': True,
|
||||
}, {
|
||||
'id': 'account',
|
||||
'label': _('Account'),
|
||||
'type': 'string',
|
||||
}, {
|
||||
'id': 'username',
|
||||
'label': _('Username'),
|
||||
'type': 'string',
|
||||
}, {
|
||||
'id': 'cacert',
|
||||
'label': _('Public Key Certificate'),
|
||||
'type': 'string',
|
||||
'multiline': True
|
||||
}],
|
||||
'metadata': [{
|
||||
'id': 'secret_path',
|
||||
'label': _('Secret Identifier'),
|
||||
'type': 'string',
|
||||
'help_text': _('The identifier for the secret e.g., /some/identifier'),
|
||||
}, {
|
||||
'id': 'secret_version',
|
||||
'label': _('Secret Version'),
|
||||
'type': 'string',
|
||||
'help_text': _('Used to specify a specific secret version (if left empty, the latest version will be used).'),
|
||||
}],
|
||||
'required': ['url', 'api_key', 'account', 'username'],
|
||||
}
|
||||
|
||||
|
||||
def create_temporary_fifo(data):
|
||||
"""Open fifo named pipe in a new thread using a temporary file path. The
|
||||
thread blocks until data is read from the pipe.
|
||||
|
||||
Returns the path to the fifo.
|
||||
|
||||
:param data(bytes): Data to write to the pipe.
|
||||
"""
|
||||
path = os.path.join(tempfile.mkdtemp(), next(tempfile._get_candidate_names()))
|
||||
os.mkfifo(path, stat.S_IRUSR | stat.S_IWUSR)
|
||||
|
||||
threading.Thread(
|
||||
target=lambda p, d: open(p, 'wb').write(d),
|
||||
args=(path, data)
|
||||
).start()
|
||||
return path
|
||||
|
||||
|
||||
def conjur_backend(**kwargs):
|
||||
url = kwargs['url']
|
||||
api_key = kwargs['api_key']
|
||||
account = quote_plus(kwargs['account'])
|
||||
username = quote_plus(kwargs['username'])
|
||||
secret_path = quote_plus(kwargs['secret_path'])
|
||||
version = kwargs.get('secret_version')
|
||||
cacert = kwargs.get('cacert', None)
|
||||
|
||||
auth_kwargs = {
|
||||
'headers': {'Content-Type': 'text/plain'},
|
||||
'data': api_key
|
||||
}
|
||||
if cacert:
|
||||
auth_kwargs['verify'] = create_temporary_fifo(cacert.encode())
|
||||
|
||||
# https://www.conjur.org/api.html#authentication-authenticate-post
|
||||
resp = requests.post(
|
||||
urljoin(url, '/'.join(['authn', account, username, 'authenticate'])),
|
||||
**auth_kwargs
|
||||
)
|
||||
resp.raise_for_status()
|
||||
token = base64.b64encode(resp.content).decode('utf-8')
|
||||
|
||||
lookup_kwargs = {
|
||||
'headers': {'Authorization': 'Token token="{}"'.format(token)},
|
||||
}
|
||||
if cacert:
|
||||
lookup_kwargs['verify'] = create_temporary_fifo(cacert.encode())
|
||||
|
||||
# https://www.conjur.org/api.html#secrets-retrieve-a-secret-get
|
||||
path = urljoin(url, '/'.join([
|
||||
'secrets',
|
||||
account,
|
||||
'variable',
|
||||
secret_path
|
||||
]))
|
||||
if version:
|
||||
path = '?'.join([path, version])
|
||||
|
||||
resp = requests.get(path, timeout=30, **lookup_kwargs)
|
||||
resp.raise_for_status()
|
||||
return resp.text
|
||||
|
||||
|
||||
conjur_plugin = CredentialPlugin(
|
||||
'CyberArk Conjur Secret Lookup',
|
||||
inputs=conjur_inputs,
|
||||
backend=conjur_backend
|
||||
)
|
||||
152
awx/main/credential_plugins/hashivault.py
Normal file
152
awx/main/credential_plugins/hashivault.py
Normal file
@@ -0,0 +1,152 @@
|
||||
import copy
|
||||
import os
|
||||
import pathlib
|
||||
from urllib.parse import urljoin
|
||||
|
||||
from .plugin import CredentialPlugin
|
||||
|
||||
import requests
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
|
||||
|
||||
base_inputs = {
|
||||
'fields': [{
|
||||
'id': 'url',
|
||||
'label': _('Server URL'),
|
||||
'type': 'string',
|
||||
'format': 'url',
|
||||
'help_text': _('The URL to the HashiCorp Vault'),
|
||||
}, {
|
||||
'id': 'token',
|
||||
'label': _('Token'),
|
||||
'type': 'string',
|
||||
'secret': True,
|
||||
'help_text': _('The access token used to authenticate to the Vault server'),
|
||||
}],
|
||||
'metadata': [{
|
||||
'id': 'secret_path',
|
||||
'label': _('Path to Secret'),
|
||||
'type': 'string',
|
||||
'help_text': _('The path to the secret e.g., /some-engine/some-secret/'),
|
||||
}],
|
||||
'required': ['url', 'token', 'secret_path'],
|
||||
}
|
||||
|
||||
hashi_kv_inputs = copy.deepcopy(base_inputs)
|
||||
hashi_kv_inputs['fields'].append({
|
||||
'id': 'api_version',
|
||||
'label': _('API Version'),
|
||||
'choices': ['v1', 'v2'],
|
||||
'help_text': _('API v1 is for static key/value lookups. API v2 is for versioned key/value lookups.'),
|
||||
'default': 'v1',
|
||||
})
|
||||
hashi_kv_inputs['metadata'].extend([{
|
||||
'id': 'secret_key',
|
||||
'label': _('Key Name'),
|
||||
'type': 'string',
|
||||
'help_text': _('The name of the key to look up in the secret.'),
|
||||
}, {
|
||||
'id': 'secret_version',
|
||||
'label': _('Secret Version (v2 only)'),
|
||||
'type': 'string',
|
||||
'help_text': _('Used to specify a specific secret version (if left empty, the latest version will be used).'),
|
||||
}])
|
||||
hashi_kv_inputs['required'].extend(['api_version', 'secret_key'])
|
||||
|
||||
hashi_ssh_inputs = copy.deepcopy(base_inputs)
|
||||
hashi_ssh_inputs['metadata'] = [{
|
||||
'id': 'public_key',
|
||||
'label': _('Unsigned Public Key'),
|
||||
'type': 'string',
|
||||
'multiline': True,
|
||||
}] + hashi_ssh_inputs['metadata'] + [{
|
||||
'id': 'role',
|
||||
'label': _('Role Name'),
|
||||
'type': 'string',
|
||||
'help_text': _('The name of the role used to sign.')
|
||||
}, {
|
||||
'id': 'valid_principals',
|
||||
'label': _('Valid Principals'),
|
||||
'type': 'string',
|
||||
'help_text': _('Valid principals (either usernames or hostnames) that the certificate should be signed for.'),
|
||||
}]
|
||||
hashi_ssh_inputs['required'].extend(['public_key', 'role'])
|
||||
|
||||
|
||||
def kv_backend(**kwargs):
|
||||
token = kwargs['token']
|
||||
url = urljoin(kwargs['url'], 'v1')
|
||||
secret_path = kwargs['secret_path']
|
||||
secret_key = kwargs.get('secret_key', None)
|
||||
|
||||
api_version = kwargs['api_version']
|
||||
|
||||
sess = requests.Session()
|
||||
sess.headers['Authorization'] = 'Bearer {}'.format(token)
|
||||
if api_version == 'v2':
|
||||
params = {}
|
||||
if kwargs.get('secret_version'):
|
||||
params['version'] = kwargs['secret_version']
|
||||
try:
|
||||
mount_point, *path = pathlib.Path(secret_path.lstrip(os.sep)).parts
|
||||
'/'.join(*path)
|
||||
except Exception:
|
||||
mount_point, path = secret_path, []
|
||||
# https://www.vaultproject.io/api/secret/kv/kv-v2.html#read-secret-version
|
||||
response = sess.get(
|
||||
'/'.join([url, mount_point, 'data'] + path).rstrip('/'),
|
||||
params=params,
|
||||
timeout=30
|
||||
)
|
||||
response.raise_for_status()
|
||||
json = response.json()['data']
|
||||
else:
|
||||
# https://www.vaultproject.io/api/secret/kv/kv-v1.html#read-secret
|
||||
response = sess.get('/'.join([url, secret_path]).rstrip('/'), timeout=30)
|
||||
response.raise_for_status()
|
||||
json = response.json()
|
||||
|
||||
if secret_key:
|
||||
try:
|
||||
return json['data'][secret_key]
|
||||
except KeyError:
|
||||
raise RuntimeError(
|
||||
'{} is not present at {}'.format(secret_key, secret_path)
|
||||
)
|
||||
return json['data']
|
||||
|
||||
|
||||
def ssh_backend(**kwargs):
|
||||
token = kwargs['token']
|
||||
url = urljoin(kwargs['url'], 'v1')
|
||||
secret_path = kwargs['secret_path']
|
||||
role = kwargs['role']
|
||||
|
||||
sess = requests.Session()
|
||||
sess.headers['Authorization'] = 'Bearer {}'.format(token)
|
||||
json = {
|
||||
'public_key': kwargs['public_key']
|
||||
}
|
||||
if kwargs.get('valid_principals'):
|
||||
json['valid_principals'] = kwargs['valid_principals']
|
||||
# https://www.vaultproject.io/api/secret/ssh/index.html#sign-ssh-key
|
||||
resp = sess.post(
|
||||
'/'.join([url, secret_path, 'sign', role]).rstrip('/'),
|
||||
json=json,
|
||||
timeout=30
|
||||
)
|
||||
resp.raise_for_status()
|
||||
return resp.json()['data']['signed_key']
|
||||
|
||||
|
||||
hashivault_kv_plugin = CredentialPlugin(
|
||||
'HashiCorp Vault Secret Lookup',
|
||||
inputs=hashi_kv_inputs,
|
||||
backend=kv_backend
|
||||
)
|
||||
|
||||
hashivault_ssh_plugin = CredentialPlugin(
|
||||
'HashiCorp Vault Signed SSH',
|
||||
inputs=hashi_ssh_inputs,
|
||||
backend=ssh_backend
|
||||
)
|
||||
3
awx/main/credential_plugins/plugin.py
Normal file
3
awx/main/credential_plugins/plugin.py
Normal file
@@ -0,0 +1,3 @@
|
||||
from collections import namedtuple
|
||||
|
||||
CredentialPlugin = namedtuple('CredentialPlugin', ['name', 'inputs', 'backend'])
|
||||
@@ -4,7 +4,8 @@ import socket
|
||||
from django.conf import settings
|
||||
|
||||
from awx.main.dispatch import get_local_queuename
|
||||
from kombu import Connection, Queue, Exchange, Producer, Consumer
|
||||
from awx.main.dispatch.kombu import Connection
|
||||
from kombu import Queue, Exchange, Producer, Consumer
|
||||
|
||||
logger = logging.getLogger('awx.main.dispatch')
|
||||
|
||||
|
||||
42
awx/main/dispatch/kombu.py
Normal file
42
awx/main/dispatch/kombu.py
Normal file
@@ -0,0 +1,42 @@
|
||||
from amqp.exceptions import PreconditionFailed
|
||||
from django.conf import settings
|
||||
from kombu.connection import Connection as KombuConnection
|
||||
from kombu.transport import pyamqp
|
||||
|
||||
import logging
|
||||
|
||||
logger = logging.getLogger('awx.main.dispatch')
|
||||
|
||||
|
||||
__all__ = ['Connection']
|
||||
|
||||
|
||||
class Connection(KombuConnection):
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(Connection, self).__init__(*args, **kwargs)
|
||||
class _Channel(pyamqp.Channel):
|
||||
|
||||
def queue_declare(self, queue, *args, **kwargs):
|
||||
kwargs['durable'] = settings.BROKER_DURABILITY
|
||||
try:
|
||||
return super(_Channel, self).queue_declare(queue, *args, **kwargs)
|
||||
except PreconditionFailed as e:
|
||||
if "inequivalent arg 'durable'" in getattr(e, 'reply_text', None):
|
||||
logger.error(
|
||||
'queue {} durability is not {}, deleting and recreating'.format(
|
||||
|
||||
queue,
|
||||
kwargs['durable']
|
||||
)
|
||||
)
|
||||
self.queue_delete(queue)
|
||||
return super(_Channel, self).queue_declare(queue, *args, **kwargs)
|
||||
|
||||
class _Connection(pyamqp.Connection):
|
||||
Channel = _Channel
|
||||
|
||||
class _Transport(pyamqp.Transport):
|
||||
Connection = _Connection
|
||||
|
||||
self.transport_cls = _Transport
|
||||
@@ -4,7 +4,9 @@ import sys
|
||||
from uuid import uuid4
|
||||
|
||||
from django.conf import settings
|
||||
from kombu import Connection, Exchange, Producer
|
||||
from kombu import Exchange, Producer
|
||||
|
||||
from awx.main.dispatch.kombu import Connection
|
||||
|
||||
logger = logging.getLogger('awx.main.dispatch')
|
||||
|
||||
|
||||
@@ -20,8 +20,8 @@ class CallbackBrokerWorker(BaseWorker):
|
||||
A worker implementation that deserializes callback event data and persists
|
||||
it into the database.
|
||||
|
||||
The code that *builds* these types of messages is found in the AWX display
|
||||
callback (`awx.lib.awx_display_callback`).
|
||||
The code that *generates* these types of messages is found in the
|
||||
ansible-runner display callback plugin.
|
||||
'''
|
||||
|
||||
MAX_RETRIES = 2
|
||||
|
||||
@@ -1,402 +0,0 @@
|
||||
import json
|
||||
import os
|
||||
import shutil
|
||||
import stat
|
||||
import tempfile
|
||||
import time
|
||||
import logging
|
||||
from io import StringIO
|
||||
|
||||
from django.conf import settings
|
||||
|
||||
import awx
|
||||
from awx.main.expect import run
|
||||
from awx.main.utils import get_system_task_capacity
|
||||
from awx.main.queue import CallbackQueueDispatcher
|
||||
|
||||
logger = logging.getLogger('awx.isolated.manager')
|
||||
playbook_logger = logging.getLogger('awx.isolated.manager.playbooks')
|
||||
|
||||
|
||||
class IsolatedManager(object):
|
||||
|
||||
def __init__(self, env, cancelled_callback=None, job_timeout=0,
|
||||
idle_timeout=None):
|
||||
"""
|
||||
:param env: a dict containing environment variables for the
|
||||
subprocess, ala `os.environ`
|
||||
:param cancelled_callback: a callable - which returns `True` or `False`
|
||||
- signifying if the job has been prematurely
|
||||
cancelled
|
||||
:param job_timeout a timeout (in seconds); if the total job runtime
|
||||
exceeds this, the process will be killed
|
||||
:param idle_timeout a timeout (in seconds); if new output is not
|
||||
sent to stdout in this interval, the process
|
||||
will be terminated
|
||||
"""
|
||||
self.management_env = self._base_management_env()
|
||||
self.cancelled_callback = cancelled_callback
|
||||
self.job_timeout = job_timeout
|
||||
self.idle_timeout = idle_timeout
|
||||
self.started_at = None
|
||||
|
||||
@staticmethod
|
||||
def _base_management_env():
|
||||
'''
|
||||
Returns environment variables to use when running a playbook
|
||||
that manages the isolated instance.
|
||||
Use of normal job callback and other such configurations are avoided.
|
||||
'''
|
||||
env = dict(os.environ.items())
|
||||
env['ANSIBLE_RETRY_FILES_ENABLED'] = 'False'
|
||||
env['ANSIBLE_HOST_KEY_CHECKING'] = 'False'
|
||||
env['ANSIBLE_LIBRARY'] = os.path.join(os.path.dirname(awx.__file__), 'plugins', 'isolated')
|
||||
return env
|
||||
|
||||
@staticmethod
|
||||
def _build_args(playbook, hosts, extra_vars=None):
|
||||
'''
|
||||
Returns list of Ansible CLI command arguments for a management task
|
||||
|
||||
:param playbook: name of the playbook to run
|
||||
:param hosts: host pattern to operate on, ex. "localhost,"
|
||||
:param extra_vars: optional dictionary of extra_vars to apply
|
||||
'''
|
||||
args = [
|
||||
'ansible-playbook',
|
||||
playbook,
|
||||
'-u', settings.AWX_ISOLATED_USERNAME,
|
||||
'-T', str(settings.AWX_ISOLATED_CONNECTION_TIMEOUT),
|
||||
'-i', hosts
|
||||
]
|
||||
if extra_vars:
|
||||
args.extend(['-e', json.dumps(extra_vars)])
|
||||
if settings.AWX_ISOLATED_VERBOSITY:
|
||||
args.append('-%s' % ('v' * min(5, settings.AWX_ISOLATED_VERBOSITY)))
|
||||
return args
|
||||
|
||||
@classmethod
|
||||
def awx_playbook_path(cls):
|
||||
return os.path.abspath(os.path.join(
|
||||
os.path.dirname(awx.__file__),
|
||||
'playbooks'
|
||||
))
|
||||
|
||||
def path_to(self, *args):
|
||||
return os.path.join(self.private_data_dir, *args)
|
||||
|
||||
def dispatch(self, playbook=None, module=None, module_args=None):
|
||||
'''
|
||||
Ship the runner payload to a remote host for isolated execution.
|
||||
'''
|
||||
self.handled_events = set()
|
||||
self.started_at = time.time()
|
||||
|
||||
self.build_isolated_job_data()
|
||||
extra_vars = {
|
||||
'src': self.private_data_dir,
|
||||
'dest': settings.AWX_PROOT_BASE_PATH,
|
||||
'ident': self.ident
|
||||
}
|
||||
if playbook:
|
||||
extra_vars['playbook'] = playbook
|
||||
if module and module_args:
|
||||
extra_vars['module'] = module
|
||||
extra_vars['module_args'] = module_args
|
||||
|
||||
# Run ansible-playbook to launch a job on the isolated host. This:
|
||||
#
|
||||
# - sets up a temporary directory for proot/bwrap (if necessary)
|
||||
# - copies encrypted job data from the controlling host to the isolated host (with rsync)
|
||||
# - writes the encryption secret to a named pipe on the isolated host
|
||||
# - launches ansible-runner
|
||||
args = self._build_args('run_isolated.yml', '%s,' % self.host, extra_vars)
|
||||
if self.instance.verbosity:
|
||||
args.append('-%s' % ('v' * min(5, self.instance.verbosity)))
|
||||
buff = StringIO()
|
||||
logger.debug('Starting job {} on isolated host with `run_isolated.yml` playbook.'.format(self.instance.id))
|
||||
status, rc = IsolatedManager.run_pexpect(
|
||||
args, self.awx_playbook_path(), self.management_env, buff,
|
||||
idle_timeout=self.idle_timeout,
|
||||
job_timeout=settings.AWX_ISOLATED_LAUNCH_TIMEOUT,
|
||||
pexpect_timeout=5
|
||||
)
|
||||
output = buff.getvalue()
|
||||
playbook_logger.info('Isolated job {} dispatch:\n{}'.format(self.instance.id, output))
|
||||
if status != 'successful':
|
||||
for event_data in [
|
||||
{'event': 'verbose', 'stdout': output},
|
||||
{'event': 'EOF', 'final_counter': 1},
|
||||
]:
|
||||
event_data.setdefault(self.event_data_key, self.instance.id)
|
||||
CallbackQueueDispatcher().dispatch(event_data)
|
||||
return status, rc
|
||||
|
||||
@classmethod
|
||||
def run_pexpect(cls, pexpect_args, *args, **kw):
|
||||
isolated_ssh_path = None
|
||||
try:
|
||||
if all([
|
||||
getattr(settings, 'AWX_ISOLATED_KEY_GENERATION', False) is True,
|
||||
getattr(settings, 'AWX_ISOLATED_PRIVATE_KEY', None)
|
||||
]):
|
||||
isolated_ssh_path = tempfile.mkdtemp(prefix='awx_isolated', dir=settings.AWX_PROOT_BASE_PATH)
|
||||
os.chmod(isolated_ssh_path, stat.S_IRUSR | stat.S_IWUSR | stat.S_IXUSR)
|
||||
isolated_key = os.path.join(isolated_ssh_path, '.isolated')
|
||||
ssh_sock = os.path.join(isolated_ssh_path, '.isolated_ssh_auth.sock')
|
||||
run.open_fifo_write(isolated_key, settings.AWX_ISOLATED_PRIVATE_KEY)
|
||||
pexpect_args = run.wrap_args_with_ssh_agent(pexpect_args, isolated_key, ssh_sock, silence_ssh_add=True)
|
||||
return run.run_pexpect(pexpect_args, *args, **kw)
|
||||
finally:
|
||||
if isolated_ssh_path:
|
||||
shutil.rmtree(isolated_ssh_path)
|
||||
|
||||
def build_isolated_job_data(self):
|
||||
'''
|
||||
Write metadata related to the playbook run into a collection of files
|
||||
on the local file system.
|
||||
'''
|
||||
|
||||
rsync_exclude = [
|
||||
# don't rsync source control metadata (it can be huge!)
|
||||
'- /project/.git',
|
||||
'- /project/.svn',
|
||||
'- /project/.hg',
|
||||
# don't rsync job events that are in the process of being written
|
||||
'- /artifacts/job_events/*-partial.json.tmp',
|
||||
# don't rsync the ssh_key FIFO
|
||||
'- /env/ssh_key',
|
||||
]
|
||||
|
||||
for filename, data in (
|
||||
['.rsync-filter', '\n'.join(rsync_exclude)],
|
||||
):
|
||||
path = self.path_to(filename)
|
||||
with open(path, 'w') as f:
|
||||
f.write(data)
|
||||
os.chmod(path, stat.S_IRUSR)
|
||||
|
||||
def check(self, interval=None):
|
||||
"""
|
||||
Repeatedly poll the isolated node to determine if the job has run.
|
||||
|
||||
On success, copy job artifacts to the controlling node.
|
||||
On failure, continue to poll the isolated node (until the job timeout
|
||||
is exceeded).
|
||||
|
||||
For a completed job run, this function returns (status, rc),
|
||||
representing the status and return code of the isolated
|
||||
`ansible-playbook` run.
|
||||
|
||||
:param interval: an interval (in seconds) to wait between status polls
|
||||
"""
|
||||
interval = interval if interval is not None else settings.AWX_ISOLATED_CHECK_INTERVAL
|
||||
extra_vars = {'src': self.private_data_dir}
|
||||
args = self._build_args('check_isolated.yml', '%s,' % self.host, extra_vars)
|
||||
if self.instance.verbosity:
|
||||
args.append('-%s' % ('v' * min(5, self.instance.verbosity)))
|
||||
|
||||
status = 'failed'
|
||||
output = ''
|
||||
rc = None
|
||||
buff = StringIO()
|
||||
last_check = time.time()
|
||||
job_timeout = remaining = self.job_timeout
|
||||
dispatcher = CallbackQueueDispatcher()
|
||||
while status == 'failed':
|
||||
if job_timeout != 0:
|
||||
remaining = max(0, job_timeout - (time.time() - self.started_at))
|
||||
|
||||
canceled = self.cancelled_callback() if self.cancelled_callback else False
|
||||
if not canceled and time.time() - last_check < interval:
|
||||
# If the job isn't cancelled, but we haven't waited `interval` seconds, wait longer
|
||||
time.sleep(1)
|
||||
continue
|
||||
|
||||
if canceled:
|
||||
logger.warning('Isolated job {} was manually cancelled.'.format(self.instance.id))
|
||||
|
||||
buff = StringIO()
|
||||
logger.debug('Checking on isolated job {} with `check_isolated.yml`.'.format(self.instance.id))
|
||||
status, rc = IsolatedManager.run_pexpect(
|
||||
args, self.awx_playbook_path(), self.management_env, buff,
|
||||
cancelled_callback=self.cancelled_callback,
|
||||
idle_timeout=remaining,
|
||||
job_timeout=remaining,
|
||||
pexpect_timeout=5,
|
||||
proot_cmd='bwrap'
|
||||
)
|
||||
output = buff.getvalue().encode('utf-8')
|
||||
playbook_logger.info('Isolated job {} check:\n{}'.format(self.instance.id, output))
|
||||
|
||||
# discover new events and ingest them
|
||||
events_path = self.path_to('artifacts', self.ident, 'job_events')
|
||||
|
||||
# it's possible that `events_path` doesn't exist *yet*, because runner
|
||||
# hasn't actually written any events yet (if you ran e.g., a sleep 30)
|
||||
# only attempt to consume events if any were rsynced back
|
||||
if os.path.exists(events_path):
|
||||
for event in set(os.listdir(events_path)) - self.handled_events:
|
||||
path = os.path.join(events_path, event)
|
||||
if os.path.exists(path):
|
||||
event_data = json.load(
|
||||
open(os.path.join(events_path, event), 'r')
|
||||
)
|
||||
event_data.setdefault(self.event_data_key, self.instance.id)
|
||||
dispatcher.dispatch(event_data)
|
||||
self.handled_events.add(event)
|
||||
|
||||
# handle artifacts
|
||||
if event_data.get('event_data', {}).get('artifact_data', {}):
|
||||
self.instance.artifacts = event_data['event_data']['artifact_data']
|
||||
self.instance.save(update_fields=['artifacts'])
|
||||
|
||||
last_check = time.time()
|
||||
|
||||
if status == 'successful':
|
||||
status_path = self.path_to('artifacts', self.ident, 'status')
|
||||
rc_path = self.path_to('artifacts', self.ident, 'rc')
|
||||
with open(status_path, 'r') as f:
|
||||
status = f.readline()
|
||||
with open(rc_path, 'r') as f:
|
||||
rc = int(f.readline())
|
||||
|
||||
# emit an EOF event
|
||||
event_data = {
|
||||
'event': 'EOF',
|
||||
'final_counter': len(self.handled_events)
|
||||
}
|
||||
event_data.setdefault(self.event_data_key, self.instance.id)
|
||||
dispatcher.dispatch(event_data)
|
||||
|
||||
return status, rc
|
||||
|
||||
def cleanup(self):
|
||||
# If the job failed for any reason, make a last-ditch effort at cleanup
|
||||
extra_vars = {
|
||||
'private_data_dir': self.private_data_dir,
|
||||
'cleanup_dirs': [
|
||||
self.private_data_dir,
|
||||
],
|
||||
}
|
||||
args = self._build_args('clean_isolated.yml', '%s,' % self.host, extra_vars)
|
||||
logger.debug('Cleaning up job {} on isolated host with `clean_isolated.yml` playbook.'.format(self.instance.id))
|
||||
buff = StringIO()
|
||||
timeout = max(60, 2 * settings.AWX_ISOLATED_CONNECTION_TIMEOUT)
|
||||
status, rc = IsolatedManager.run_pexpect(
|
||||
args, self.awx_playbook_path(), self.management_env, buff,
|
||||
idle_timeout=timeout, job_timeout=timeout,
|
||||
pexpect_timeout=5
|
||||
)
|
||||
output = buff.getvalue().encode('utf-8')
|
||||
playbook_logger.info('Isolated job {} cleanup:\n{}'.format(self.instance.id, output))
|
||||
|
||||
if status != 'successful':
|
||||
# stdout_handle is closed by this point so writing output to logs is our only option
|
||||
logger.warning('Isolated job {} cleanup error, output:\n{}'.format(self.instance.id, output))
|
||||
|
||||
@classmethod
|
||||
def update_capacity(cls, instance, task_result, awx_application_version):
|
||||
instance.version = 'ansible-runner-{}'.format(task_result['version'])
|
||||
|
||||
if instance.capacity == 0 and task_result['capacity_cpu']:
|
||||
logger.warning('Isolated instance {} has re-joined.'.format(instance.hostname))
|
||||
instance.cpu_capacity = int(task_result['capacity_cpu'])
|
||||
instance.mem_capacity = int(task_result['capacity_mem'])
|
||||
instance.capacity = get_system_task_capacity(scale=instance.capacity_adjustment,
|
||||
cpu_capacity=int(task_result['capacity_cpu']),
|
||||
mem_capacity=int(task_result['capacity_mem']))
|
||||
instance.save(update_fields=['cpu_capacity', 'mem_capacity', 'capacity', 'version', 'modified'])
|
||||
|
||||
@classmethod
|
||||
def health_check(cls, instance_qs, awx_application_version):
|
||||
'''
|
||||
:param instance_qs: List of Django objects representing the
|
||||
isolated instances to manage
|
||||
Runs playbook that will
|
||||
- determine if instance is reachable
|
||||
- find the instance capacity
|
||||
- clean up orphaned private files
|
||||
Performs save on each instance to update its capacity.
|
||||
'''
|
||||
hostname_string = ''
|
||||
for instance in instance_qs:
|
||||
hostname_string += '{},'.format(instance.hostname)
|
||||
args = cls._build_args('heartbeat_isolated.yml', hostname_string)
|
||||
args.extend(['--forks', str(len(instance_qs))])
|
||||
env = cls._base_management_env()
|
||||
|
||||
try:
|
||||
facts_path = tempfile.mkdtemp()
|
||||
env['ANSIBLE_CACHE_PLUGIN'] = 'jsonfile'
|
||||
env['ANSIBLE_CACHE_PLUGIN_CONNECTION'] = facts_path
|
||||
|
||||
buff = StringIO()
|
||||
timeout = max(60, 2 * settings.AWX_ISOLATED_CONNECTION_TIMEOUT)
|
||||
status, rc = IsolatedManager.run_pexpect(
|
||||
args, cls.awx_playbook_path(), env, buff,
|
||||
idle_timeout=timeout, job_timeout=timeout,
|
||||
pexpect_timeout=5
|
||||
)
|
||||
heartbeat_stdout = buff.getvalue().encode('utf-8')
|
||||
buff.close()
|
||||
|
||||
for instance in instance_qs:
|
||||
output = heartbeat_stdout
|
||||
task_result = {}
|
||||
try:
|
||||
with open(os.path.join(facts_path, instance.hostname), 'r') as facts_data:
|
||||
output = facts_data.read()
|
||||
task_result = json.loads(output)
|
||||
except Exception:
|
||||
logger.exception('Failed to read status from isolated instances, output:\n {}'.format(output))
|
||||
if 'awx_capacity_cpu' in task_result and 'awx_capacity_mem' in task_result:
|
||||
task_result = {
|
||||
'capacity_cpu': task_result['awx_capacity_cpu'],
|
||||
'capacity_mem': task_result['awx_capacity_mem'],
|
||||
'version': task_result['awx_capacity_version']
|
||||
}
|
||||
cls.update_capacity(instance, task_result, awx_application_version)
|
||||
logger.debug('Isolated instance {} successful heartbeat'.format(instance.hostname))
|
||||
elif instance.capacity == 0:
|
||||
logger.debug('Isolated instance {} previously marked as lost, could not re-join.'.format(
|
||||
instance.hostname))
|
||||
else:
|
||||
logger.warning('Could not update status of isolated instance {}'.format(instance.hostname))
|
||||
if instance.is_lost(isolated=True):
|
||||
instance.capacity = 0
|
||||
instance.save(update_fields=['capacity'])
|
||||
logger.error('Isolated instance {} last checked in at {}, marked as lost.'.format(
|
||||
instance.hostname, instance.modified))
|
||||
finally:
|
||||
if os.path.exists(facts_path):
|
||||
shutil.rmtree(facts_path)
|
||||
|
||||
def run(self, instance, private_data_dir, playbook, module, module_args,
|
||||
event_data_key, ident=None):
|
||||
"""
|
||||
Run a job on an isolated host.
|
||||
|
||||
:param instance: a `model.Job` instance
|
||||
:param private_data_dir: an absolute path on the local file system
|
||||
where job-specific data should be written
|
||||
(i.e., `/tmp/ansible_awx_xyz/`)
|
||||
:param playbook: the playbook to run
|
||||
:param module: the module to run
|
||||
:param module_args: the module args to use
|
||||
:param event_data_key: e.g., job_id, inventory_id, ...
|
||||
|
||||
For a completed job run, this function returns (status, rc),
|
||||
representing the status and return code of the isolated
|
||||
`ansible-playbook` run.
|
||||
"""
|
||||
self.ident = ident
|
||||
self.event_data_key = event_data_key
|
||||
self.instance = instance
|
||||
self.host = instance.execution_node
|
||||
self.private_data_dir = private_data_dir
|
||||
status, rc = self.dispatch(playbook, module, module_args)
|
||||
if status == 'successful':
|
||||
status, rc = self.check()
|
||||
self.cleanup()
|
||||
return status, rc
|
||||
@@ -1,316 +0,0 @@
|
||||
#! /usr/bin/env python
|
||||
|
||||
import argparse
|
||||
import base64
|
||||
import codecs
|
||||
import collections
|
||||
import logging
|
||||
import json
|
||||
import os
|
||||
import stat
|
||||
import pipes
|
||||
import re
|
||||
import signal
|
||||
import sys
|
||||
import threading
|
||||
import time
|
||||
from io import StringIO
|
||||
|
||||
import pexpect
|
||||
import psutil
|
||||
|
||||
|
||||
logger = logging.getLogger('awx.main.utils.expect')
|
||||
|
||||
|
||||
def args2cmdline(*args):
|
||||
return ' '.join([pipes.quote(a) for a in args])
|
||||
|
||||
|
||||
def wrap_args_with_ssh_agent(args, ssh_key_path, ssh_auth_sock=None, silence_ssh_add=False):
|
||||
if ssh_key_path:
|
||||
ssh_add_command = args2cmdline('ssh-add', ssh_key_path)
|
||||
if silence_ssh_add:
|
||||
ssh_add_command = ' '.join([ssh_add_command, '2>/dev/null'])
|
||||
cmd = ' && '.join([ssh_add_command,
|
||||
args2cmdline('rm', '-f', ssh_key_path),
|
||||
args2cmdline(*args)])
|
||||
args = ['ssh-agent']
|
||||
if ssh_auth_sock:
|
||||
args.extend(['-a', ssh_auth_sock])
|
||||
args.extend(['sh', '-c', cmd])
|
||||
return args
|
||||
|
||||
|
||||
def open_fifo_write(path, data):
|
||||
'''open_fifo_write opens the fifo named pipe in a new thread.
|
||||
This blocks the thread until an external process (such as ssh-agent)
|
||||
reads data from the pipe.
|
||||
'''
|
||||
os.mkfifo(path, 0o600)
|
||||
threading.Thread(
|
||||
target=lambda p, d: open(p, 'w').write(d),
|
||||
args=(path, data)
|
||||
).start()
|
||||
|
||||
|
||||
def run_pexpect(args, cwd, env, logfile,
|
||||
cancelled_callback=None, expect_passwords={},
|
||||
extra_update_fields=None, idle_timeout=None, job_timeout=0,
|
||||
pexpect_timeout=5, proot_cmd='bwrap'):
|
||||
'''
|
||||
Run the given command using pexpect to capture output and provide
|
||||
passwords when requested.
|
||||
|
||||
:param args: a list of `subprocess.call`-style arguments
|
||||
representing a subprocess e.g., ['ls', '-la']
|
||||
:param cwd: the directory in which the subprocess should
|
||||
run
|
||||
:param env: a dict containing environment variables for the
|
||||
subprocess, ala `os.environ`
|
||||
:param logfile: a file-like object for capturing stdout
|
||||
:param cancelled_callback: a callable - which returns `True` or `False`
|
||||
- signifying if the job has been prematurely
|
||||
cancelled
|
||||
:param expect_passwords: a dict of regular expression password prompts
|
||||
to input values, i.e., {r'Password:*?$':
|
||||
'some_password'}
|
||||
:param extra_update_fields: a dict used to specify DB fields which should
|
||||
be updated on the underlying model
|
||||
object after execution completes
|
||||
:param idle_timeout a timeout (in seconds); if new output is not
|
||||
sent to stdout in this interval, the process
|
||||
will be terminated
|
||||
:param job_timeout a timeout (in seconds); if the total job runtime
|
||||
exceeds this, the process will be killed
|
||||
:param pexpect_timeout a timeout (in seconds) to wait on
|
||||
`pexpect.spawn().expect()` calls
|
||||
:param proot_cmd the command used to isolate processes, `bwrap`
|
||||
|
||||
Returns a tuple (status, return_code) i.e., `('successful', 0)`
|
||||
'''
|
||||
expect_passwords[pexpect.TIMEOUT] = None
|
||||
expect_passwords[pexpect.EOF] = None
|
||||
|
||||
if not isinstance(expect_passwords, collections.OrderedDict):
|
||||
# We iterate over `expect_passwords.keys()` and
|
||||
# `expect_passwords.values()` separately to map matched inputs to
|
||||
# patterns and choose the proper string to send to the subprocess;
|
||||
# enforce usage of an OrderedDict so that the ordering of elements in
|
||||
# `keys()` matches `values()`.
|
||||
expect_passwords = collections.OrderedDict(expect_passwords)
|
||||
password_patterns = list(expect_passwords.keys())
|
||||
password_values = list(expect_passwords.values())
|
||||
|
||||
child = pexpect.spawn(
|
||||
args[0], args[1:], cwd=cwd, env=env, ignore_sighup=True,
|
||||
encoding='utf-8', echo=False, use_poll=True
|
||||
)
|
||||
child.logfile_read = logfile
|
||||
canceled = False
|
||||
timed_out = False
|
||||
errored = False
|
||||
last_stdout_update = time.time()
|
||||
|
||||
job_start = time.time()
|
||||
while child.isalive():
|
||||
result_id = child.expect(password_patterns, timeout=pexpect_timeout, searchwindowsize=100)
|
||||
password = password_values[result_id]
|
||||
if password is not None:
|
||||
child.sendline(password)
|
||||
last_stdout_update = time.time()
|
||||
if cancelled_callback:
|
||||
try:
|
||||
canceled = cancelled_callback()
|
||||
except Exception:
|
||||
logger.exception('Could not check cancel callback - canceling immediately')
|
||||
if isinstance(extra_update_fields, dict):
|
||||
extra_update_fields['job_explanation'] = "System error during job execution, check system logs"
|
||||
errored = True
|
||||
else:
|
||||
canceled = False
|
||||
if not canceled and job_timeout != 0 and (time.time() - job_start) > job_timeout:
|
||||
timed_out = True
|
||||
if isinstance(extra_update_fields, dict):
|
||||
extra_update_fields['job_explanation'] = "Job terminated due to timeout"
|
||||
if canceled or timed_out or errored:
|
||||
handle_termination(child.pid, child.args, proot_cmd, is_cancel=canceled)
|
||||
if idle_timeout and (time.time() - last_stdout_update) > idle_timeout:
|
||||
child.close(True)
|
||||
canceled = True
|
||||
if errored:
|
||||
return 'error', child.exitstatus
|
||||
elif canceled:
|
||||
return 'canceled', child.exitstatus
|
||||
elif child.exitstatus == 0 and not timed_out:
|
||||
return 'successful', child.exitstatus
|
||||
else:
|
||||
return 'failed', child.exitstatus
|
||||
|
||||
|
||||
def run_isolated_job(private_data_dir, secrets, logfile=sys.stdout):
|
||||
'''
|
||||
Launch `ansible-playbook`, executing a job packaged by
|
||||
`build_isolated_job_data`.
|
||||
|
||||
:param private_data_dir: an absolute path on the local file system where
|
||||
job metadata exists (i.e.,
|
||||
`/tmp/ansible_awx_xyz/`)
|
||||
:param secrets: a dict containing sensitive job metadata, {
|
||||
'env': { ... } # environment variables,
|
||||
'passwords': { ... } # pexpect password prompts
|
||||
'ssh_key_data': 'RSA KEY DATA',
|
||||
}
|
||||
:param logfile: a file-like object for capturing stdout
|
||||
|
||||
Returns a tuple (status, return_code) i.e., `('successful', 0)`
|
||||
'''
|
||||
with open(os.path.join(private_data_dir, 'args'), 'r') as args:
|
||||
args = json.load(args)
|
||||
|
||||
env = secrets.get('env', {})
|
||||
expect_passwords = {
|
||||
re.compile(pattern, re.M): password
|
||||
for pattern, password in secrets.get('passwords', {}).items()
|
||||
}
|
||||
|
||||
if 'AD_HOC_COMMAND_ID' in env:
|
||||
cwd = private_data_dir
|
||||
else:
|
||||
cwd = os.path.join(private_data_dir, 'project')
|
||||
|
||||
# write the SSH key data into a fifo read by ssh-agent
|
||||
ssh_key_data = secrets.get('ssh_key_data')
|
||||
if ssh_key_data:
|
||||
ssh_key_path = os.path.join(private_data_dir, 'ssh_key_data')
|
||||
ssh_auth_sock = os.path.join(private_data_dir, 'ssh_auth.sock')
|
||||
open_fifo_write(ssh_key_path, ssh_key_data)
|
||||
args = wrap_args_with_ssh_agent(args, ssh_key_path, ssh_auth_sock)
|
||||
|
||||
idle_timeout = secrets.get('idle_timeout', 10)
|
||||
job_timeout = secrets.get('job_timeout', 10)
|
||||
pexpect_timeout = secrets.get('pexpect_timeout', 5)
|
||||
|
||||
env['AWX_ISOLATED_DATA_DIR'] = private_data_dir
|
||||
|
||||
venv_path = env.get('VIRTUAL_ENV')
|
||||
if venv_path and not os.path.exists(venv_path):
|
||||
raise RuntimeError(
|
||||
'a valid Python virtualenv does not exist at {}'.format(venv_path)
|
||||
)
|
||||
|
||||
return run_pexpect(args, cwd, env, logfile,
|
||||
expect_passwords=expect_passwords,
|
||||
idle_timeout=idle_timeout,
|
||||
job_timeout=job_timeout,
|
||||
pexpect_timeout=pexpect_timeout)
|
||||
|
||||
|
||||
def handle_termination(pid, args, proot_cmd, is_cancel=True):
|
||||
'''
|
||||
Terminate a subprocess spawned by `pexpect`.
|
||||
|
||||
:param pid: the process id of the running the job.
|
||||
:param args: the args for the job, i.e., ['ansible-playbook', 'abc.yml']
|
||||
:param proot_cmd the command used to isolate processes i.e., `bwrap`
|
||||
:param is_cancel: flag showing whether this termination is caused by
|
||||
instance's cancel_flag.
|
||||
'''
|
||||
try:
|
||||
used_proot = proot_cmd.encode('utf-8') in args
|
||||
if used_proot:
|
||||
if not psutil:
|
||||
os.kill(pid, signal.SIGKILL)
|
||||
else:
|
||||
try:
|
||||
main_proc = psutil.Process(pid=pid)
|
||||
child_procs = main_proc.children(recursive=True)
|
||||
for child_proc in child_procs:
|
||||
os.kill(child_proc.pid, signal.SIGKILL)
|
||||
os.kill(main_proc.pid, signal.SIGKILL)
|
||||
except (TypeError, psutil.Error):
|
||||
os.kill(pid, signal.SIGKILL)
|
||||
else:
|
||||
os.kill(pid, signal.SIGTERM)
|
||||
time.sleep(3)
|
||||
except OSError:
|
||||
keyword = 'cancel' if is_cancel else 'timeout'
|
||||
logger.warn("Attempted to %s already finished job, ignoring" % keyword)
|
||||
|
||||
|
||||
def __run__(private_data_dir):
|
||||
buff = StringIO()
|
||||
with codecs.open(os.path.join(private_data_dir, 'env'), 'r', encoding='utf-8') as f:
|
||||
for line in f:
|
||||
buff.write(line)
|
||||
|
||||
artifacts_dir = os.path.join(private_data_dir, 'artifacts')
|
||||
|
||||
# Standard out directed to pickup location without event filtering applied
|
||||
stdout_filename = os.path.join(artifacts_dir, 'stdout')
|
||||
os.mknod(stdout_filename, stat.S_IFREG | stat.S_IRUSR | stat.S_IWUSR)
|
||||
stdout_handle = codecs.open(stdout_filename, 'w', encoding='utf-8')
|
||||
|
||||
status, rc = run_isolated_job(
|
||||
private_data_dir,
|
||||
json.loads(base64.b64decode(buff.getvalue())),
|
||||
stdout_handle
|
||||
)
|
||||
for filename, data in [
|
||||
('status', status),
|
||||
('rc', rc),
|
||||
]:
|
||||
artifact_path = os.path.join(private_data_dir, 'artifacts', filename)
|
||||
os.mknod(artifact_path, stat.S_IFREG | stat.S_IRUSR | stat.S_IWUSR)
|
||||
with open(artifact_path, 'w') as f:
|
||||
f.write(str(data))
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
import awx
|
||||
__version__ = awx.__version__
|
||||
parser = argparse.ArgumentParser(description='manage a daemonized, isolated ansible playbook')
|
||||
parser.add_argument('--version', action='version', version=__version__ + '-isolated')
|
||||
parser.add_argument('command', choices=['start', 'stop', 'is-alive'])
|
||||
parser.add_argument('private_data_dir')
|
||||
args = parser.parse_args()
|
||||
|
||||
private_data_dir = args.private_data_dir
|
||||
pidfile = os.path.join(private_data_dir, 'pid')
|
||||
|
||||
if args.command == 'start':
|
||||
# create a file to log stderr in case the daemonized process throws
|
||||
# an exception before it gets to `pexpect.spawn`
|
||||
stderr_path = os.path.join(private_data_dir, 'artifacts', 'daemon.log')
|
||||
if not os.path.exists(stderr_path):
|
||||
os.mknod(stderr_path, stat.S_IFREG | stat.S_IRUSR | stat.S_IWUSR)
|
||||
stderr = open(stderr_path, 'w+')
|
||||
|
||||
import daemon
|
||||
from daemon.pidfile import TimeoutPIDLockFile
|
||||
context = daemon.DaemonContext(
|
||||
pidfile=TimeoutPIDLockFile(pidfile),
|
||||
stderr=stderr
|
||||
)
|
||||
with context:
|
||||
__run__(private_data_dir)
|
||||
sys.exit(0)
|
||||
|
||||
try:
|
||||
with open(pidfile, 'r') as f:
|
||||
pid = int(f.readline())
|
||||
except IOError:
|
||||
sys.exit(1)
|
||||
|
||||
if args.command == 'stop':
|
||||
try:
|
||||
with open(os.path.join(private_data_dir, 'args'), 'r') as args:
|
||||
handle_termination(pid, json.load(args), 'bwrap')
|
||||
except IOError:
|
||||
handle_termination(pid, [], 'bwrap')
|
||||
elif args.command == 'is-alive':
|
||||
try:
|
||||
os.kill(pid, signal.SIG_DFL)
|
||||
sys.exit(0)
|
||||
except OSError:
|
||||
sys.exit(1)
|
||||
@@ -11,6 +11,7 @@ from jinja2 import Environment, StrictUndefined
|
||||
from jinja2.exceptions import UndefinedError, TemplateSyntaxError
|
||||
|
||||
# Django
|
||||
import django
|
||||
from django.core import exceptions as django_exceptions
|
||||
from django.db.models.signals import (
|
||||
post_save,
|
||||
@@ -18,14 +19,16 @@ from django.db.models.signals import (
|
||||
)
|
||||
from django.db.models.signals import m2m_changed
|
||||
from django.db import models
|
||||
from django.db.models.fields.related import add_lazy_relation
|
||||
from django.db.models.fields.related import lazy_related_operation
|
||||
from django.db.models.fields.related_descriptors import (
|
||||
ReverseOneToOneDescriptor,
|
||||
ForwardManyToOneDescriptor,
|
||||
ManyToManyDescriptor,
|
||||
ReverseManyToOneDescriptor,
|
||||
create_forward_many_to_many_manager
|
||||
)
|
||||
from django.utils.encoding import smart_text
|
||||
from django.utils.functional import cached_property
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
|
||||
# jsonschema
|
||||
@@ -43,14 +46,17 @@ from rest_framework import serializers
|
||||
from awx.main.utils.filters import SmartFilter
|
||||
from awx.main.utils.encryption import encrypt_value, decrypt_value, get_encryption_key
|
||||
from awx.main.validators import validate_ssh_private_key
|
||||
from awx.main.models.rbac import batch_role_ancestor_rebuilding, Role
|
||||
from awx.main.models.rbac import (
|
||||
batch_role_ancestor_rebuilding, Role,
|
||||
ROLE_SINGLETON_SYSTEM_ADMINISTRATOR, ROLE_SINGLETON_SYSTEM_AUDITOR
|
||||
)
|
||||
from awx.main.constants import ENV_BLACKLIST
|
||||
from awx.main import utils
|
||||
|
||||
|
||||
__all__ = ['AutoOneToOneField', 'ImplicitRoleField', 'JSONField',
|
||||
'SmartFilterField', 'update_role_parentage_for_instance',
|
||||
'is_implicit_parent']
|
||||
'SmartFilterField', 'OrderedManyToManyField',
|
||||
'update_role_parentage_for_instance', 'is_implicit_parent']
|
||||
|
||||
|
||||
# Provide a (better) custom error message for enum jsonschema validation
|
||||
@@ -159,6 +165,13 @@ def is_implicit_parent(parent_role, child_role):
|
||||
the model definition. This does not include any role parents that
|
||||
might have been set by the user.
|
||||
'''
|
||||
if child_role.content_object is None:
|
||||
# The only singleton implicit parent is the system admin being
|
||||
# a parent of the system auditor role
|
||||
return bool(
|
||||
child_role.singleton_name == ROLE_SINGLETON_SYSTEM_AUDITOR and
|
||||
parent_role.singleton_name == ROLE_SINGLETON_SYSTEM_ADMINISTRATOR
|
||||
)
|
||||
# Get the list of implicit parents that were defined at the class level.
|
||||
implicit_parents = getattr(
|
||||
child_role.content_object.__class__, child_role.role_field
|
||||
@@ -217,6 +230,7 @@ class ImplicitRoleField(models.ForeignKey):
|
||||
kwargs.setdefault('related_name', '+')
|
||||
kwargs.setdefault('null', 'True')
|
||||
kwargs.setdefault('editable', False)
|
||||
kwargs.setdefault('on_delete', models.CASCADE)
|
||||
super(ImplicitRoleField, self).__init__(*args, **kwargs)
|
||||
|
||||
def deconstruct(self):
|
||||
@@ -234,7 +248,9 @@ class ImplicitRoleField(models.ForeignKey):
|
||||
|
||||
post_save.connect(self._post_save, cls, True, dispatch_uid='implicit-role-post-save')
|
||||
post_delete.connect(self._post_delete, cls, True, dispatch_uid='implicit-role-post-delete')
|
||||
add_lazy_relation(cls, self, "self", self.bind_m2m_changed)
|
||||
|
||||
function = lambda local, related, field: self.bind_m2m_changed(field, related, local)
|
||||
lazy_related_operation(function, cls, "self", field=self)
|
||||
|
||||
def bind_m2m_changed(self, _self, _role_class, cls):
|
||||
if not self.parent_role:
|
||||
@@ -480,6 +496,86 @@ def format_ssh_private_key(value):
|
||||
return True
|
||||
|
||||
|
||||
@JSONSchemaField.format_checker.checks('url')
|
||||
def format_url(value):
|
||||
try:
|
||||
parsed = urllib.parse.urlparse(value)
|
||||
except Exception as e:
|
||||
raise jsonschema.exceptions.FormatError(str(e))
|
||||
if parsed.scheme == '':
|
||||
raise jsonschema.exceptions.FormatError(
|
||||
'Invalid URL: Missing url scheme (http, https, etc.)'
|
||||
)
|
||||
if parsed.netloc == '':
|
||||
raise jsonschema.exceptions.FormatError(
|
||||
'Invalid URL: {}'.format(value)
|
||||
)
|
||||
return True
|
||||
|
||||
|
||||
class DynamicCredentialInputField(JSONSchemaField):
|
||||
"""
|
||||
Used to validate JSON for
|
||||
`awx.main.models.credential:CredentialInputSource().metadata`.
|
||||
|
||||
Metadata for input sources is represented as a dictionary e.g.,
|
||||
{'secret_path': '/kv/somebody', 'secret_key': 'password'}
|
||||
|
||||
For the data to be valid, the keys of this dictionary should correspond
|
||||
with the metadata field (and datatypes) defined in the associated
|
||||
target CredentialType e.g.,
|
||||
"""
|
||||
|
||||
def schema(self, credential_type):
|
||||
# determine the defined fields for the associated credential type
|
||||
properties = {}
|
||||
for field in credential_type.inputs.get('metadata', []):
|
||||
field = field.copy()
|
||||
properties[field['id']] = field
|
||||
if field.get('choices', []):
|
||||
field['enum'] = list(field['choices'])[:]
|
||||
return {
|
||||
'type': 'object',
|
||||
'properties': properties,
|
||||
'additionalProperties': False,
|
||||
}
|
||||
|
||||
def validate(self, value, model_instance):
|
||||
if not isinstance(value, dict):
|
||||
return super(DynamicCredentialInputField, self).validate(value, model_instance)
|
||||
|
||||
super(JSONSchemaField, self).validate(value, model_instance)
|
||||
credential_type = model_instance.source_credential.credential_type
|
||||
errors = {}
|
||||
for error in Draft4Validator(
|
||||
self.schema(credential_type),
|
||||
format_checker=self.format_checker
|
||||
).iter_errors(value):
|
||||
if error.validator == 'pattern' and 'error' in error.schema:
|
||||
error.message = error.schema['error'].format(instance=error.instance)
|
||||
if 'id' not in error.schema:
|
||||
# If the error is not for a specific field, it's specific to
|
||||
# `inputs` in general
|
||||
raise django_exceptions.ValidationError(
|
||||
error.message,
|
||||
code='invalid',
|
||||
params={'value': value},
|
||||
)
|
||||
errors[error.schema['id']] = [error.message]
|
||||
|
||||
defined_metadata = [field.get('id') for field in credential_type.inputs.get('metadata', [])]
|
||||
for field in credential_type.inputs.get('required', []):
|
||||
if field in defined_metadata and not value.get(field, None):
|
||||
errors[field] = [_('required for %s') % (
|
||||
credential_type.name
|
||||
)]
|
||||
|
||||
if errors:
|
||||
raise serializers.ValidationError({
|
||||
'metadata': errors
|
||||
})
|
||||
|
||||
|
||||
class CredentialInputField(JSONSchemaField):
|
||||
"""
|
||||
Used to validate JSON for
|
||||
@@ -542,7 +638,7 @@ class CredentialInputField(JSONSchemaField):
|
||||
v != '$encrypted$',
|
||||
model_instance.pk
|
||||
]):
|
||||
if not isinstance(getattr(model_instance, k), str):
|
||||
if not isinstance(model_instance.inputs.get(k), str):
|
||||
raise django_exceptions.ValidationError(
|
||||
_('secret values must be of type string, not {}').format(type(v).__name__),
|
||||
code='invalid',
|
||||
@@ -592,18 +688,13 @@ class CredentialInputField(JSONSchemaField):
|
||||
)
|
||||
errors[error.schema['id']] = [error.message]
|
||||
|
||||
inputs = model_instance.credential_type.inputs
|
||||
for field in inputs.get('required', []):
|
||||
if not value.get(field, None):
|
||||
errors[field] = [_('required for %s') % (
|
||||
model_instance.credential_type.name
|
||||
)]
|
||||
defined_fields = model_instance.credential_type.defined_fields
|
||||
|
||||
# `ssh_key_unlock` requirements are very specific and can't be
|
||||
# represented without complicated JSON schema
|
||||
if (
|
||||
model_instance.credential_type.managed_by_tower is True and
|
||||
'ssh_key_unlock' in model_instance.credential_type.defined_fields
|
||||
'ssh_key_unlock' in defined_fields
|
||||
):
|
||||
|
||||
# in order to properly test the necessity of `ssh_key_unlock`, we
|
||||
@@ -613,15 +704,15 @@ class CredentialInputField(JSONSchemaField):
|
||||
# 'ssh_key_unlock': 'do-you-need-me?',
|
||||
# }
|
||||
# ...we have to fetch the actual key value from the database
|
||||
if model_instance.pk and model_instance.ssh_key_data == '$encrypted$':
|
||||
model_instance.ssh_key_data = model_instance.__class__.objects.get(
|
||||
if model_instance.pk and model_instance.inputs.get('ssh_key_data') == '$encrypted$':
|
||||
model_instance.inputs['ssh_key_data'] = model_instance.__class__.objects.get(
|
||||
pk=model_instance.pk
|
||||
).ssh_key_data
|
||||
).inputs.get('ssh_key_data')
|
||||
|
||||
if model_instance.has_encrypted_ssh_key_data and not value.get('ssh_key_unlock'):
|
||||
errors['ssh_key_unlock'] = [_('must be set when SSH key is encrypted.')]
|
||||
if all([
|
||||
model_instance.ssh_key_data,
|
||||
model_instance.inputs.get('ssh_key_data'),
|
||||
value.get('ssh_key_unlock'),
|
||||
not model_instance.has_encrypted_ssh_key_data
|
||||
]):
|
||||
@@ -654,7 +745,7 @@ class CredentialTypeInputField(JSONSchemaField):
|
||||
'type': 'object',
|
||||
'properties': {
|
||||
'type': {'enum': ['string', 'boolean']},
|
||||
'format': {'enum': ['ssh_private_key']},
|
||||
'format': {'enum': ['ssh_private_key', 'url']},
|
||||
'choices': {
|
||||
'type': 'array',
|
||||
'minItems': 1,
|
||||
@@ -899,3 +990,115 @@ class OAuth2ClientSecretField(models.CharField):
|
||||
if value and value.startswith('$encrypted$'):
|
||||
return decrypt_value(get_encryption_key('value', pk=None), value)
|
||||
return value
|
||||
|
||||
|
||||
class OrderedManyToManyDescriptor(ManyToManyDescriptor):
|
||||
"""
|
||||
Django doesn't seem to support:
|
||||
|
||||
class Meta:
|
||||
ordering = [...]
|
||||
|
||||
...on custom through= relations for ManyToMany fields.
|
||||
|
||||
Meaning, queries made _through_ the intermediary table will _not_ apply an
|
||||
ORDER_BY clause based on the `Meta.ordering` of the intermediary M2M class
|
||||
(which is the behavior we want for "ordered" many to many relations):
|
||||
|
||||
https://github.com/django/django/blob/stable/1.11.x/django/db/models/fields/related_descriptors.py#L593
|
||||
|
||||
This descriptor automatically sorts all queries through this relation
|
||||
using the `position` column on the M2M table.
|
||||
"""
|
||||
|
||||
@cached_property
|
||||
def related_manager_cls(self):
|
||||
model = self.rel.related_model if self.reverse else self.rel.model
|
||||
|
||||
def add_custom_queryset_to_many_related_manager(many_related_manage_cls):
|
||||
class OrderedManyRelatedManager(many_related_manage_cls):
|
||||
def get_queryset(self):
|
||||
return super(OrderedManyRelatedManager, self).get_queryset().order_by(
|
||||
'%s__position' % self.through._meta.model_name
|
||||
)
|
||||
|
||||
def add(self, *objs):
|
||||
# Django < 2 doesn't support this method on
|
||||
# ManyToManyFields w/ an intermediary model
|
||||
# We should be able to remove this code snippet when we
|
||||
# upgrade Django.
|
||||
# see: https://github.com/django/django/blob/stable/1.11.x/django/db/models/fields/related_descriptors.py#L926
|
||||
if not django.__version__.startswith('1.'):
|
||||
raise RuntimeError(
|
||||
'This method is no longer necessary in Django>=2'
|
||||
)
|
||||
try:
|
||||
self.through._meta.auto_created = True
|
||||
super(OrderedManyRelatedManager, self).add(*objs)
|
||||
finally:
|
||||
self.through._meta.auto_created = False
|
||||
|
||||
def remove(self, *objs):
|
||||
# Django < 2 doesn't support this method on
|
||||
# ManyToManyFields w/ an intermediary model
|
||||
# We should be able to remove this code snippet when we
|
||||
# upgrade Django.
|
||||
# see: https://github.com/django/django/blob/stable/1.11.x/django/db/models/fields/related_descriptors.py#L944
|
||||
if not django.__version__.startswith('1.'):
|
||||
raise RuntimeError(
|
||||
'This method is no longer necessary in Django>=2'
|
||||
)
|
||||
try:
|
||||
self.through._meta.auto_created = True
|
||||
super(OrderedManyRelatedManager, self).remove(*objs)
|
||||
finally:
|
||||
self.through._meta.auto_created = False
|
||||
|
||||
return OrderedManyRelatedManager
|
||||
|
||||
return add_custom_queryset_to_many_related_manager(
|
||||
create_forward_many_to_many_manager(
|
||||
model._default_manager.__class__,
|
||||
self.rel,
|
||||
reverse=self.reverse,
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
class OrderedManyToManyField(models.ManyToManyField):
|
||||
"""
|
||||
A ManyToManyField that automatically sorts all querysets
|
||||
by a special `position` column on the M2M table
|
||||
"""
|
||||
|
||||
def _update_m2m_position(self, sender, **kwargs):
|
||||
if kwargs.get('action') in ('post_add', 'post_remove'):
|
||||
order_with_respect_to = None
|
||||
for field in sender._meta.local_fields:
|
||||
if (
|
||||
isinstance(field, models.ForeignKey) and
|
||||
isinstance(kwargs['instance'], field.related_model)
|
||||
):
|
||||
order_with_respect_to = field.name
|
||||
for i, ig in enumerate(sender.objects.filter(**{
|
||||
order_with_respect_to: kwargs['instance'].pk}
|
||||
)):
|
||||
if ig.position != i:
|
||||
ig.position = i
|
||||
ig.save()
|
||||
|
||||
def contribute_to_class(self, cls, name, **kwargs):
|
||||
super(OrderedManyToManyField, self).contribute_to_class(cls, name, **kwargs)
|
||||
setattr(
|
||||
cls, name,
|
||||
OrderedManyToManyDescriptor(self.remote_field, reverse=False)
|
||||
)
|
||||
|
||||
through = getattr(cls, name).through
|
||||
if isinstance(through, str) and "." not in through:
|
||||
# support lazy loading of string model names
|
||||
through = '.'.join([cls._meta.app_label, through])
|
||||
m2m_changed.connect(
|
||||
self._update_m2m_position,
|
||||
sender=through
|
||||
)
|
||||
|
||||
404
awx/main/isolated/manager.py
Normal file
404
awx/main/isolated/manager.py
Normal file
@@ -0,0 +1,404 @@
|
||||
import fnmatch
|
||||
import json
|
||||
import os
|
||||
import shutil
|
||||
import stat
|
||||
import tempfile
|
||||
import time
|
||||
import logging
|
||||
|
||||
from django.conf import settings
|
||||
import ansible_runner
|
||||
|
||||
import awx
|
||||
from awx.main.utils import get_system_task_capacity
|
||||
from awx.main.queue import CallbackQueueDispatcher
|
||||
|
||||
logger = logging.getLogger('awx.isolated.manager')
|
||||
playbook_logger = logging.getLogger('awx.isolated.manager.playbooks')
|
||||
|
||||
|
||||
def set_pythonpath(venv_libdir, env):
|
||||
env.pop('PYTHONPATH', None) # default to none if no python_ver matches
|
||||
for version in os.listdir(venv_libdir):
|
||||
if fnmatch.fnmatch(version, 'python[23].*'):
|
||||
if os.path.isdir(os.path.join(venv_libdir, version)):
|
||||
env['PYTHONPATH'] = os.path.join(venv_libdir, version, "site-packages") + ":"
|
||||
break
|
||||
|
||||
|
||||
class IsolatedManager(object):
|
||||
|
||||
def __init__(self, cancelled_callback=None, check_callback=None):
|
||||
"""
|
||||
:param cancelled_callback: a callable - which returns `True` or `False`
|
||||
- signifying if the job has been prematurely
|
||||
cancelled
|
||||
"""
|
||||
self.cancelled_callback = cancelled_callback
|
||||
self.check_callback = check_callback
|
||||
self.idle_timeout = max(60, 2 * settings.AWX_ISOLATED_CONNECTION_TIMEOUT)
|
||||
self.started_at = None
|
||||
self.captured_command_artifact = False
|
||||
|
||||
def build_runner_params(self, hosts, verbosity=1):
|
||||
env = dict(os.environ.items())
|
||||
env['ANSIBLE_RETRY_FILES_ENABLED'] = 'False'
|
||||
env['ANSIBLE_HOST_KEY_CHECKING'] = 'False'
|
||||
env['ANSIBLE_LIBRARY'] = os.path.join(os.path.dirname(awx.__file__), 'plugins', 'isolated')
|
||||
set_pythonpath(os.path.join(settings.ANSIBLE_VENV_PATH, 'lib'), env)
|
||||
|
||||
def finished_callback(runner_obj):
|
||||
if runner_obj.status == 'failed' and runner_obj.config.playbook != 'check_isolated.yml':
|
||||
# failed for clean_isolated.yml just means the playbook hasn't
|
||||
# exited on the isolated host
|
||||
stdout = runner_obj.stdout.read()
|
||||
playbook_logger.error(stdout)
|
||||
elif runner_obj.status == 'timeout':
|
||||
# this means that the default idle timeout of
|
||||
# (2 * AWX_ISOLATED_CONNECTION_TIMEOUT) was exceeded
|
||||
# (meaning, we tried to sync with an isolated node, and we got
|
||||
# no new output for 2 * AWX_ISOLATED_CONNECTION_TIMEOUT seconds)
|
||||
# this _usually_ means SSH key auth from the controller ->
|
||||
# isolated didn't work, and ssh is hung waiting on interactive
|
||||
# input e.g.,
|
||||
#
|
||||
# awx@isolated's password:
|
||||
stdout = runner_obj.stdout.read()
|
||||
playbook_logger.error(stdout)
|
||||
else:
|
||||
playbook_logger.info(runner_obj.stdout.read())
|
||||
|
||||
inventory = '\n'.join([
|
||||
'{} ansible_ssh_user={}'.format(host, settings.AWX_ISOLATED_USERNAME)
|
||||
for host in hosts
|
||||
])
|
||||
|
||||
return {
|
||||
'project_dir': os.path.abspath(os.path.join(
|
||||
os.path.dirname(awx.__file__),
|
||||
'playbooks'
|
||||
)),
|
||||
'inventory': inventory,
|
||||
'envvars': env,
|
||||
'finished_callback': finished_callback,
|
||||
'verbosity': verbosity,
|
||||
'cancel_callback': self.cancelled_callback,
|
||||
'settings': {
|
||||
'idle_timeout': self.idle_timeout,
|
||||
'job_timeout': settings.AWX_ISOLATED_LAUNCH_TIMEOUT,
|
||||
'pexpect_timeout': getattr(settings, 'PEXPECT_TIMEOUT', 5),
|
||||
'suppress_ansible_output': True,
|
||||
},
|
||||
}
|
||||
|
||||
def path_to(self, *args):
|
||||
return os.path.join(self.private_data_dir, *args)
|
||||
|
||||
def run_management_playbook(self, playbook, private_data_dir, **kw):
|
||||
iso_dir = tempfile.mkdtemp(
|
||||
prefix=playbook,
|
||||
dir=private_data_dir
|
||||
)
|
||||
params = self.runner_params.copy()
|
||||
params['playbook'] = playbook
|
||||
params['private_data_dir'] = iso_dir
|
||||
params.update(**kw)
|
||||
if all([
|
||||
getattr(settings, 'AWX_ISOLATED_KEY_GENERATION', False) is True,
|
||||
getattr(settings, 'AWX_ISOLATED_PRIVATE_KEY', None)
|
||||
]):
|
||||
params['ssh_key'] = settings.AWX_ISOLATED_PRIVATE_KEY
|
||||
return ansible_runner.interface.run(**params)
|
||||
|
||||
def dispatch(self, playbook=None, module=None, module_args=None):
|
||||
'''
|
||||
Ship the runner payload to a remote host for isolated execution.
|
||||
'''
|
||||
self.handled_events = set()
|
||||
self.started_at = time.time()
|
||||
|
||||
# exclude certain files from the rsync
|
||||
rsync_exclude = [
|
||||
# don't rsync source control metadata (it can be huge!)
|
||||
'- /project/.git',
|
||||
'- /project/.svn',
|
||||
'- /project/.hg',
|
||||
# don't rsync job events that are in the process of being written
|
||||
'- /artifacts/job_events/*-partial.json.tmp',
|
||||
# don't rsync the ssh_key FIFO
|
||||
'- /env/ssh_key',
|
||||
]
|
||||
|
||||
for filename, data in (
|
||||
['.rsync-filter', '\n'.join(rsync_exclude)],
|
||||
):
|
||||
path = self.path_to(filename)
|
||||
with open(path, 'w') as f:
|
||||
f.write(data)
|
||||
os.chmod(path, stat.S_IRUSR)
|
||||
|
||||
extravars = {
|
||||
'src': self.private_data_dir,
|
||||
'dest': settings.AWX_PROOT_BASE_PATH,
|
||||
'ident': self.ident
|
||||
}
|
||||
if playbook:
|
||||
extravars['playbook'] = playbook
|
||||
if module and module_args:
|
||||
extravars['module'] = module
|
||||
extravars['module_args'] = module_args
|
||||
|
||||
logger.debug('Starting job {} on isolated host with `run_isolated.yml` playbook.'.format(self.instance.id))
|
||||
runner_obj = self.run_management_playbook('run_isolated.yml',
|
||||
self.private_data_dir,
|
||||
extravars=extravars)
|
||||
return runner_obj.status, runner_obj.rc
|
||||
|
||||
def check(self, interval=None):
|
||||
"""
|
||||
Repeatedly poll the isolated node to determine if the job has run.
|
||||
|
||||
On success, copy job artifacts to the controlling node.
|
||||
On failure, continue to poll the isolated node (until the job timeout
|
||||
is exceeded).
|
||||
|
||||
For a completed job run, this function returns (status, rc),
|
||||
representing the status and return code of the isolated
|
||||
`ansible-playbook` run.
|
||||
|
||||
:param interval: an interval (in seconds) to wait between status polls
|
||||
"""
|
||||
interval = interval if interval is not None else settings.AWX_ISOLATED_CHECK_INTERVAL
|
||||
extravars = {'src': self.private_data_dir}
|
||||
status = 'failed'
|
||||
rc = None
|
||||
last_check = time.time()
|
||||
dispatcher = CallbackQueueDispatcher()
|
||||
while status == 'failed':
|
||||
canceled = self.cancelled_callback() if self.cancelled_callback else False
|
||||
if not canceled and time.time() - last_check < interval:
|
||||
# If the job isn't cancelled, but we haven't waited `interval` seconds, wait longer
|
||||
time.sleep(1)
|
||||
continue
|
||||
|
||||
if canceled:
|
||||
logger.warning('Isolated job {} was manually cancelled.'.format(self.instance.id))
|
||||
|
||||
logger.debug('Checking on isolated job {} with `check_isolated.yml`.'.format(self.instance.id))
|
||||
runner_obj = self.run_management_playbook('check_isolated.yml',
|
||||
self.private_data_dir,
|
||||
extravars=extravars)
|
||||
status, rc = runner_obj.status, runner_obj.rc
|
||||
|
||||
if self.check_callback is not None and not self.captured_command_artifact:
|
||||
command_path = self.path_to('artifacts', self.ident, 'command')
|
||||
# If the configuration artifact has been synced back, update the model
|
||||
if os.path.exists(command_path):
|
||||
try:
|
||||
with open(command_path, 'r') as f:
|
||||
data = json.load(f)
|
||||
self.check_callback(data)
|
||||
self.captured_command_artifact = True
|
||||
except json.decoder.JSONDecodeError: # Just in case it's not fully here yet.
|
||||
pass
|
||||
|
||||
self.consume_events(dispatcher)
|
||||
|
||||
last_check = time.time()
|
||||
|
||||
if status == 'successful':
|
||||
status_path = self.path_to('artifacts', self.ident, 'status')
|
||||
rc_path = self.path_to('artifacts', self.ident, 'rc')
|
||||
if os.path.exists(status_path):
|
||||
with open(status_path, 'r') as f:
|
||||
status = f.readline()
|
||||
with open(rc_path, 'r') as f:
|
||||
rc = int(f.readline())
|
||||
else:
|
||||
# if there's no status file, it means that runner _probably_
|
||||
# exited with a traceback (which should be logged to
|
||||
# daemon.log) Record it so we can see how runner failed.
|
||||
daemon_path = self.path_to('daemon.log')
|
||||
if os.path.exists(daemon_path):
|
||||
with open(daemon_path, 'r') as f:
|
||||
self.instance.result_traceback = f.read()
|
||||
self.instance.save(update_fields=['result_traceback'])
|
||||
else:
|
||||
logger.error('Failed to rsync daemon.log (is ansible-runner installed on the isolated host?)')
|
||||
status = 'failed'
|
||||
rc = 1
|
||||
|
||||
# consume events one last time just to be sure we didn't miss anything
|
||||
# in the final sync
|
||||
self.consume_events(dispatcher)
|
||||
|
||||
# emit an EOF event
|
||||
event_data = {
|
||||
'event': 'EOF',
|
||||
'final_counter': len(self.handled_events)
|
||||
}
|
||||
event_data.setdefault(self.event_data_key, self.instance.id)
|
||||
dispatcher.dispatch(event_data)
|
||||
|
||||
return status, rc
|
||||
|
||||
def consume_events(self, dispatcher):
|
||||
# discover new events and ingest them
|
||||
events_path = self.path_to('artifacts', self.ident, 'job_events')
|
||||
|
||||
# it's possible that `events_path` doesn't exist *yet*, because runner
|
||||
# hasn't actually written any events yet (if you ran e.g., a sleep 30)
|
||||
# only attempt to consume events if any were rsynced back
|
||||
if os.path.exists(events_path):
|
||||
for event in set(os.listdir(events_path)) - self.handled_events:
|
||||
path = os.path.join(events_path, event)
|
||||
if os.path.exists(path):
|
||||
try:
|
||||
event_data = json.load(
|
||||
open(os.path.join(events_path, event), 'r')
|
||||
)
|
||||
except json.decoder.JSONDecodeError:
|
||||
# This means the event we got back isn't valid JSON
|
||||
# that can happen if runner is still partially
|
||||
# writing an event file while it's rsyncing
|
||||
# these event writes are _supposed_ to be atomic
|
||||
# but it doesn't look like they actually are in
|
||||
# practice
|
||||
# in this scenario, just ignore this event and try it
|
||||
# again on the next sync
|
||||
pass
|
||||
event_data.setdefault(self.event_data_key, self.instance.id)
|
||||
dispatcher.dispatch(event_data)
|
||||
self.handled_events.add(event)
|
||||
|
||||
# handle artifacts
|
||||
if event_data.get('event_data', {}).get('artifact_data', {}):
|
||||
self.instance.artifacts = event_data['event_data']['artifact_data']
|
||||
self.instance.save(update_fields=['artifacts'])
|
||||
|
||||
|
||||
def cleanup(self):
|
||||
# If the job failed for any reason, make a last-ditch effort at cleanup
|
||||
extravars = {
|
||||
'private_data_dir': self.private_data_dir,
|
||||
'cleanup_dirs': [
|
||||
self.private_data_dir,
|
||||
],
|
||||
}
|
||||
logger.debug('Cleaning up job {} on isolated host with `clean_isolated.yml` playbook.'.format(self.instance.id))
|
||||
self.run_management_playbook(
|
||||
'clean_isolated.yml',
|
||||
self.private_data_dir,
|
||||
extravars=extravars
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def update_capacity(cls, instance, task_result):
|
||||
instance.version = 'ansible-runner-{}'.format(task_result['version'])
|
||||
|
||||
if instance.capacity == 0 and task_result['capacity_cpu']:
|
||||
logger.warning('Isolated instance {} has re-joined.'.format(instance.hostname))
|
||||
instance.cpu = int(task_result['cpu'])
|
||||
instance.memory = int(task_result['mem'])
|
||||
instance.cpu_capacity = int(task_result['capacity_cpu'])
|
||||
instance.mem_capacity = int(task_result['capacity_mem'])
|
||||
instance.capacity = get_system_task_capacity(scale=instance.capacity_adjustment,
|
||||
cpu_capacity=int(task_result['capacity_cpu']),
|
||||
mem_capacity=int(task_result['capacity_mem']))
|
||||
instance.save(update_fields=['cpu', 'memory', 'cpu_capacity', 'mem_capacity', 'capacity', 'version', 'modified'])
|
||||
|
||||
def health_check(self, instance_qs):
|
||||
'''
|
||||
:param instance_qs: List of Django objects representing the
|
||||
isolated instances to manage
|
||||
Runs playbook that will
|
||||
- determine if instance is reachable
|
||||
- find the instance capacity
|
||||
- clean up orphaned private files
|
||||
Performs save on each instance to update its capacity.
|
||||
'''
|
||||
instance_qs = [i for i in instance_qs if i.enabled]
|
||||
if not len(instance_qs):
|
||||
return
|
||||
try:
|
||||
private_data_dir = tempfile.mkdtemp(
|
||||
prefix='awx_iso_heartbeat_',
|
||||
dir=settings.AWX_PROOT_BASE_PATH
|
||||
)
|
||||
self.runner_params = self.build_runner_params([
|
||||
instance.hostname for instance in instance_qs
|
||||
])
|
||||
self.runner_params['private_data_dir'] = private_data_dir
|
||||
self.runner_params['forks'] = len(instance_qs)
|
||||
runner_obj = self.run_management_playbook(
|
||||
'heartbeat_isolated.yml',
|
||||
private_data_dir
|
||||
)
|
||||
|
||||
if runner_obj.status == 'successful':
|
||||
for instance in instance_qs:
|
||||
task_result = {}
|
||||
try:
|
||||
task_result = runner_obj.get_fact_cache(instance.hostname)
|
||||
except Exception:
|
||||
logger.exception('Failed to read status from isolated instances')
|
||||
if 'awx_capacity_cpu' in task_result and 'awx_capacity_mem' in task_result:
|
||||
task_result = {
|
||||
'cpu': task_result['awx_cpu'],
|
||||
'mem': task_result['awx_mem'],
|
||||
'capacity_cpu': task_result['awx_capacity_cpu'],
|
||||
'capacity_mem': task_result['awx_capacity_mem'],
|
||||
'version': task_result['awx_capacity_version']
|
||||
}
|
||||
IsolatedManager.update_capacity(instance, task_result)
|
||||
logger.debug('Isolated instance {} successful heartbeat'.format(instance.hostname))
|
||||
elif instance.capacity == 0:
|
||||
logger.debug('Isolated instance {} previously marked as lost, could not re-join.'.format(
|
||||
instance.hostname))
|
||||
else:
|
||||
logger.warning('Could not update status of isolated instance {}'.format(instance.hostname))
|
||||
if instance.is_lost(isolated=True):
|
||||
instance.capacity = 0
|
||||
instance.save(update_fields=['capacity'])
|
||||
logger.error('Isolated instance {} last checked in at {}, marked as lost.'.format(
|
||||
instance.hostname, instance.modified))
|
||||
finally:
|
||||
if os.path.exists(private_data_dir):
|
||||
shutil.rmtree(private_data_dir)
|
||||
|
||||
def run(self, instance, private_data_dir, playbook, module, module_args,
|
||||
event_data_key, ident=None):
|
||||
"""
|
||||
Run a job on an isolated host.
|
||||
|
||||
:param instance: a `model.Job` instance
|
||||
:param private_data_dir: an absolute path on the local file system
|
||||
where job-specific data should be written
|
||||
(i.e., `/tmp/awx_N_xyz/`)
|
||||
:param playbook: the playbook to run
|
||||
:param module: the module to run
|
||||
:param module_args: the module args to use
|
||||
:param event_data_key: e.g., job_id, inventory_id, ...
|
||||
|
||||
For a completed job run, this function returns (status, rc),
|
||||
representing the status and return code of the isolated
|
||||
`ansible-playbook` run.
|
||||
"""
|
||||
self.ident = ident
|
||||
self.event_data_key = event_data_key
|
||||
self.instance = instance
|
||||
self.private_data_dir = private_data_dir
|
||||
self.runner_params = self.build_runner_params(
|
||||
[instance.execution_node],
|
||||
verbosity=min(5, self.instance.verbosity)
|
||||
)
|
||||
status, rc = self.dispatch(playbook, module, module_args)
|
||||
if status == 'successful':
|
||||
status, rc = self.check()
|
||||
else:
|
||||
# emit an EOF event
|
||||
event_data = {'event': 'EOF', 'final_counter': 0}
|
||||
event_data.setdefault(self.event_data_key, self.instance.id)
|
||||
CallbackQueueDispatcher().dispatch(event_data)
|
||||
return status, rc
|
||||
@@ -59,7 +59,7 @@ class Command(BaseCommand):
|
||||
if len(pks_to_delete):
|
||||
ActivityStream.objects.filter(pk__in=pks_to_delete).delete()
|
||||
n_deleted_items += len(pks_to_delete)
|
||||
self.logger.log(99, "Removed %d items", n_deleted_items)
|
||||
self.logger.info("Removed {} items".format(n_deleted_items))
|
||||
|
||||
def handle(self, *args, **options):
|
||||
self.verbosity = int(options.get('verbosity', 1))
|
||||
|
||||
@@ -1,148 +0,0 @@
|
||||
# Copyright (c) 2015 Ansible, Inc.
|
||||
# All Rights Reserved
|
||||
|
||||
# Python
|
||||
import re
|
||||
import sys
|
||||
from dateutil.relativedelta import relativedelta
|
||||
|
||||
# Django
|
||||
from django.core.management.base import BaseCommand, CommandError
|
||||
from django.db import transaction
|
||||
from django.utils.timezone import now
|
||||
|
||||
# AWX
|
||||
from awx.main.models.fact import Fact
|
||||
from awx.conf.license import feature_enabled
|
||||
|
||||
OLDER_THAN = 'older_than'
|
||||
GRANULARITY = 'granularity'
|
||||
|
||||
|
||||
class CleanupFacts(object):
|
||||
def __init__(self):
|
||||
self.timestamp = None
|
||||
|
||||
# Find all with timestamp < older_than
|
||||
# Start search at < older_than, stop search at oldest entry
|
||||
# Find all factVersion < pivot && > (pivot - granularity) grouped by host sorted by time descending (because it's indexed this way)
|
||||
# foreach group
|
||||
# Delete all except LAST entry (or Delete all except the FIRST entry, it's an arbitrary decision)
|
||||
#
|
||||
# pivot -= granularity
|
||||
# group by host
|
||||
def cleanup(self, older_than_abs, granularity, module=None):
|
||||
fact_oldest = Fact.objects.all().order_by('timestamp').first()
|
||||
if not fact_oldest:
|
||||
return 0
|
||||
|
||||
kv = {
|
||||
'timestamp__lte': older_than_abs
|
||||
}
|
||||
if module:
|
||||
kv['module'] = module
|
||||
|
||||
# Special case, granularity=0x where x is d, w, or y
|
||||
# The intent is to delete all facts < older_than_abs
|
||||
if granularity == relativedelta():
|
||||
qs = Fact.objects.filter(**kv)
|
||||
count = qs.count()
|
||||
qs.delete()
|
||||
return count
|
||||
|
||||
total = 0
|
||||
|
||||
date_pivot = older_than_abs
|
||||
while date_pivot > fact_oldest.timestamp:
|
||||
date_pivot_next = date_pivot - granularity
|
||||
|
||||
# For the current time window.
|
||||
# Delete all facts expect the fact that matches the largest timestamp.
|
||||
kv = {
|
||||
'timestamp__lte': date_pivot
|
||||
}
|
||||
if module:
|
||||
kv['module'] = module
|
||||
|
||||
|
||||
fact_version_obj = Fact.objects.filter(**kv).order_by('-timestamp').first()
|
||||
if fact_version_obj:
|
||||
kv = {
|
||||
'timestamp__lt': fact_version_obj.timestamp,
|
||||
'timestamp__gt': date_pivot_next
|
||||
}
|
||||
if module:
|
||||
kv['module'] = module
|
||||
qs = Fact.objects.filter(**kv)
|
||||
count = qs.count()
|
||||
qs.delete()
|
||||
total += count
|
||||
|
||||
date_pivot = date_pivot_next
|
||||
|
||||
return total
|
||||
|
||||
'''
|
||||
older_than and granularity are of type relativedelta
|
||||
'''
|
||||
def run(self, older_than, granularity, module=None):
|
||||
t = now()
|
||||
deleted_count = self.cleanup(t - older_than, granularity, module=module)
|
||||
print("Deleted %d facts." % deleted_count)
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = 'Cleanup facts. For each host older than the value specified, keep one fact scan for each time window (granularity).'
|
||||
|
||||
def add_arguments(self, parser):
|
||||
parser.add_argument('--older_than',
|
||||
dest='older_than',
|
||||
default='30d',
|
||||
help='Specify the relative time to consider facts older than (w)eek (d)ay or (y)ear (i.e. 5d, 2w, 1y). Defaults to 30d.')
|
||||
parser.add_argument('--granularity',
|
||||
dest='granularity',
|
||||
default='1w',
|
||||
help='Window duration to group same hosts by for deletion (w)eek (d)ay or (y)ear (i.e. 5d, 2w, 1y). Defaults to 1w.')
|
||||
parser.add_argument('--module',
|
||||
dest='module',
|
||||
default=None,
|
||||
help='Limit cleanup to a particular module.')
|
||||
|
||||
def __init__(self):
|
||||
super(Command, self).__init__()
|
||||
|
||||
def string_time_to_timestamp(self, time_string):
|
||||
units = {
|
||||
'y': 'years',
|
||||
'd': 'days',
|
||||
'w': 'weeks',
|
||||
'm': 'months'
|
||||
}
|
||||
try:
|
||||
match = re.match(r'(?P<value>[0-9]+)(?P<unit>.*)', time_string)
|
||||
group = match.groupdict()
|
||||
kv = {}
|
||||
units_verbose = units[group['unit']]
|
||||
kv[units_verbose]= int(group['value'])
|
||||
return relativedelta(**kv)
|
||||
except (KeyError, TypeError, AttributeError):
|
||||
return None
|
||||
|
||||
@transaction.atomic
|
||||
def handle(self, *args, **options):
|
||||
sys.stderr.write("This command has been deprecated and will be removed in a future release.\n")
|
||||
if not feature_enabled('system_tracking'):
|
||||
raise CommandError("The System Tracking feature is not enabled for your instance")
|
||||
cleanup_facts = CleanupFacts()
|
||||
if not all([options[GRANULARITY], options[OLDER_THAN]]):
|
||||
raise CommandError('Both --granularity and --older_than are required.')
|
||||
|
||||
older_than = self.string_time_to_timestamp(options[OLDER_THAN])
|
||||
granularity = self.string_time_to_timestamp(options[GRANULARITY])
|
||||
|
||||
if older_than is None:
|
||||
raise CommandError('--older_than invalid value "%s"' % options[OLDER_THAN])
|
||||
if granularity is None:
|
||||
raise CommandError('--granularity invalid value "%s"' % options[GRANULARITY])
|
||||
|
||||
cleanup_facts.run(older_than, granularity, module=options['module'])
|
||||
25
awx/main/management/commands/cleanup_sessions.py
Normal file
25
awx/main/management/commands/cleanup_sessions.py
Normal file
@@ -0,0 +1,25 @@
|
||||
import logging
|
||||
from django.core import management
|
||||
from django.core.management.base import BaseCommand
|
||||
|
||||
from django.contrib.sessions.models import Session
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
|
||||
def init_logging(self):
|
||||
log_levels = dict(enumerate([logging.ERROR, logging.INFO,
|
||||
logging.DEBUG, 0]))
|
||||
self.logger = logging.getLogger('awx.main.commands.cleanup_sessions')
|
||||
self.logger.setLevel(log_levels.get(self.verbosity, 0))
|
||||
handler = logging.StreamHandler()
|
||||
handler.setFormatter(logging.Formatter('%(message)s'))
|
||||
self.logger.addHandler(handler)
|
||||
self.logger.propagate = False
|
||||
|
||||
def execute(self, *args, **options):
|
||||
self.verbosity = int(options.get('verbosity', 1))
|
||||
self.init_logging()
|
||||
total_sessions = Session.objects.all().count()
|
||||
management.call_command('clearsessions')
|
||||
self.logger.info("Expired Sessions deleted {}".format(total_sessions - Session.objects.all().count()))
|
||||
28
awx/main/management/commands/cleanup_tokens.py
Normal file
28
awx/main/management/commands/cleanup_tokens.py
Normal file
@@ -0,0 +1,28 @@
|
||||
import logging
|
||||
from django.core import management
|
||||
from django.core.management.base import BaseCommand
|
||||
|
||||
from awx.main.models import OAuth2AccessToken
|
||||
from oauth2_provider.models import RefreshToken
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
|
||||
def init_logging(self):
|
||||
log_levels = dict(enumerate([logging.ERROR, logging.INFO,
|
||||
logging.DEBUG, 0]))
|
||||
self.logger = logging.getLogger('awx.main.commands.cleanup_tokens')
|
||||
self.logger.setLevel(log_levels.get(self.verbosity, 0))
|
||||
handler = logging.StreamHandler()
|
||||
handler.setFormatter(logging.Formatter('%(message)s'))
|
||||
self.logger.addHandler(handler)
|
||||
self.logger.propagate = False
|
||||
|
||||
def execute(self, *args, **options):
|
||||
self.verbosity = int(options.get('verbosity', 1))
|
||||
self.init_logging()
|
||||
total_accesstokens = OAuth2AccessToken.objects.all().count()
|
||||
total_refreshtokens = RefreshToken.objects.all().count()
|
||||
management.call_command('cleartokens')
|
||||
self.logger.info("Expired OAuth 2 Access Tokens deleted: {}".format(total_accesstokens - OAuth2AccessToken.objects.all().count()))
|
||||
self.logger.info("Expired OAuth 2 Refresh Tokens deleted: {}".format(total_refreshtokens - RefreshToken.objects.all().count()))
|
||||
@@ -34,7 +34,7 @@ class Command(BaseCommand):
|
||||
scm_update_cache_timeout=0,
|
||||
organization=o)
|
||||
p.save(skip_update=True)
|
||||
ssh_type = CredentialType.from_v1_kind('ssh')
|
||||
ssh_type = CredentialType.objects.filter(namespace='ssh').first()
|
||||
c = Credential.objects.create(credential_type=ssh_type,
|
||||
name='Demo Credential',
|
||||
inputs={
|
||||
@@ -47,7 +47,7 @@ class Command(BaseCommand):
|
||||
created_by=superuser)
|
||||
Host.objects.create(name='localhost',
|
||||
inventory=i,
|
||||
variables="ansible_connection: local",
|
||||
variables="ansible_connection: local\nansible_python_interpreter: '{{ ansible_playbook_python }}'",
|
||||
created_by=superuser)
|
||||
jt = JobTemplate.objects.create(name='Demo Job Template',
|
||||
playbook='hello_world.yml',
|
||||
|
||||
@@ -4,8 +4,6 @@ from importlib import import_module
|
||||
# Django
|
||||
from django.utils import timezone
|
||||
from django.conf import settings
|
||||
from django.contrib.auth import logout
|
||||
from django.http import HttpRequest
|
||||
from django.core.management.base import BaseCommand, CommandError
|
||||
from django.contrib.auth.models import User
|
||||
from django.contrib.sessions.models import Session
|
||||
@@ -29,9 +27,9 @@ class Command(BaseCommand):
|
||||
# with consideration for timezones.
|
||||
start = timezone.now()
|
||||
sessions = Session.objects.filter(expire_date__gte=start).iterator()
|
||||
request = HttpRequest()
|
||||
for session in sessions:
|
||||
user_id = session.get_decoded().get('_auth_user_id')
|
||||
if (user is None) or (user_id and user.id == int(user_id)):
|
||||
request.session = import_module(settings.SESSION_ENGINE).SessionStore(session.session_key)
|
||||
logout(request)
|
||||
session = import_module(settings.SESSION_ENGINE).SessionStore(session.session_key)
|
||||
# Log out the session, but without the need for a request object.
|
||||
session.flush()
|
||||
|
||||
31
awx/main/management/commands/gather_analytics.py
Normal file
31
awx/main/management/commands/gather_analytics.py
Normal file
@@ -0,0 +1,31 @@
|
||||
import logging
|
||||
from awx.main.analytics import gather, ship
|
||||
from django.core.management.base import BaseCommand
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
'''
|
||||
Gather AWX analytics data
|
||||
'''
|
||||
|
||||
help = 'Gather AWX analytics data'
|
||||
|
||||
def add_arguments(self, parser):
|
||||
parser.add_argument('--ship', dest='ship', action='store_true',
|
||||
help='Enable to ship metrics via insights-client')
|
||||
|
||||
def init_logging(self):
|
||||
self.logger = logging.getLogger('awx.main.analytics')
|
||||
handler = logging.StreamHandler()
|
||||
handler.setLevel(logging.DEBUG)
|
||||
handler.setFormatter(logging.Formatter('%(message)s'))
|
||||
self.logger.addHandler(handler)
|
||||
self.logger.propagate = False
|
||||
|
||||
def handle(self, *args, **options):
|
||||
tgz = gather()
|
||||
self.init_logging()
|
||||
if tgz:
|
||||
self.logger.debug(tgz)
|
||||
if options.get('ship'):
|
||||
ship(tgz)
|
||||
@@ -41,6 +41,7 @@ from awx.main.utils import (
|
||||
from awx.main.utils.common import _get_ansible_version
|
||||
from awx.main.signals import disable_activity_stream
|
||||
from awx.main.constants import STANDARD_INVENTORY_UPDATE_ENV
|
||||
from awx.main.utils.pglock import advisory_lock
|
||||
|
||||
logger = logging.getLogger('awx.main.commands.inventory_import')
|
||||
|
||||
@@ -75,12 +76,13 @@ class AnsibleInventoryLoader(object):
|
||||
/usr/bin/ansible/ansible-inventory -i hosts --list
|
||||
'''
|
||||
|
||||
def __init__(self, source, is_custom=False, venv_path=None):
|
||||
def __init__(self, source, is_custom=False, venv_path=None, verbosity=0):
|
||||
self.source = source
|
||||
self.source_dir = functioning_dir(self.source)
|
||||
self.is_custom = is_custom
|
||||
self.tmp_private_dir = None
|
||||
self.method = 'ansible-inventory'
|
||||
self.verbosity = verbosity
|
||||
if venv_path:
|
||||
self.venv_path = venv_path
|
||||
else:
|
||||
@@ -134,8 +136,14 @@ class AnsibleInventoryLoader(object):
|
||||
# https://github.com/ansible/ansible/issues/50714
|
||||
bargs = ['python', ansible_inventory_path, '-i', self.source]
|
||||
ansible_version = _get_ansible_version(ansible_inventory_path[:-len('-inventory')])
|
||||
if ansible_version != 'unknown' and Version(ansible_version) >= Version('2.5'):
|
||||
bargs.extend(['--playbook-dir', self.source_dir])
|
||||
if ansible_version != 'unknown':
|
||||
this_version = Version(ansible_version)
|
||||
if this_version >= Version('2.5'):
|
||||
bargs.extend(['--playbook-dir', self.source_dir])
|
||||
if this_version >= Version('2.8'):
|
||||
if self.verbosity:
|
||||
# INFO: -vvv, DEBUG: -vvvvv, for inventory, any more than 3 makes little difference
|
||||
bargs.append('-{}'.format('v' * min(5, self.verbosity * 2 + 1)))
|
||||
logger.debug('Using base command: {}'.format(' '.join(bargs)))
|
||||
return bargs
|
||||
|
||||
@@ -302,7 +310,7 @@ class Command(BaseCommand):
|
||||
if enabled is not default:
|
||||
enabled_value = getattr(self, 'enabled_value', None)
|
||||
if enabled_value is not None:
|
||||
enabled = bool(str(enabled_value) == str(enabled))
|
||||
enabled = bool(str(enabled_value).lower() == str(enabled).lower())
|
||||
else:
|
||||
enabled = bool(enabled)
|
||||
if enabled is default:
|
||||
@@ -870,20 +878,21 @@ class Command(BaseCommand):
|
||||
Load inventory from in-memory groups to the database, overwriting or
|
||||
merging as appropriate.
|
||||
'''
|
||||
# FIXME: Attribute changes to superuser?
|
||||
# Perform __in queries in batches (mainly for unit tests using SQLite).
|
||||
self._batch_size = 500
|
||||
self._build_db_instance_id_map()
|
||||
self._build_mem_instance_id_map()
|
||||
if self.overwrite:
|
||||
self._delete_hosts()
|
||||
self._delete_groups()
|
||||
self._delete_group_children_and_hosts()
|
||||
self._update_inventory()
|
||||
self._create_update_groups()
|
||||
self._create_update_hosts()
|
||||
self._create_update_group_children()
|
||||
self._create_update_group_hosts()
|
||||
with advisory_lock('inventory_{}_update'.format(self.inventory.id)):
|
||||
# FIXME: Attribute changes to superuser?
|
||||
# Perform __in queries in batches (mainly for unit tests using SQLite).
|
||||
self._batch_size = 500
|
||||
self._build_db_instance_id_map()
|
||||
self._build_mem_instance_id_map()
|
||||
if self.overwrite:
|
||||
self._delete_hosts()
|
||||
self._delete_groups()
|
||||
self._delete_group_children_and_hosts()
|
||||
self._update_inventory()
|
||||
self._create_update_groups()
|
||||
self._create_update_hosts()
|
||||
self._create_update_group_children()
|
||||
self._create_update_group_hosts()
|
||||
|
||||
def remote_tower_license_compare(self, local_license_type):
|
||||
# this requires https://github.com/ansible/ansible/pull/52747
|
||||
@@ -1025,7 +1034,8 @@ class Command(BaseCommand):
|
||||
|
||||
source = self.get_source_absolute_path(self.source)
|
||||
|
||||
data = AnsibleInventoryLoader(source=source, is_custom=self.is_custom, venv_path=venv_path).load()
|
||||
data = AnsibleInventoryLoader(source=source, is_custom=self.is_custom,
|
||||
venv_path=venv_path, verbosity=self.verbosity).load()
|
||||
|
||||
logger.debug('Finished loading from source: %s', source)
|
||||
logger.info('Processing JSON output...')
|
||||
@@ -1078,7 +1088,7 @@ class Command(BaseCommand):
|
||||
logger.warning('update computed fields took %d queries',
|
||||
len(connection.queries) - queries_before2)
|
||||
# Check if the license is valid.
|
||||
# If the license is not valid, a CommandError will be thrown,
|
||||
# If the license is not valid, a CommandError will be thrown,
|
||||
# and inventory update will be marked as invalid.
|
||||
# with transaction.atomic() will roll back the changes.
|
||||
license_fail = True
|
||||
|
||||
@@ -44,8 +44,10 @@ class Command(BaseCommand):
|
||||
print((fmt + ']').format(instance_group))
|
||||
for x in instance_group.instances.all():
|
||||
color = '\033[92m'
|
||||
if x.capacity == 0 or x.enabled is False:
|
||||
if x.capacity == 0:
|
||||
color = '\033[91m'
|
||||
if x.enabled is False:
|
||||
color = '\033[90m[DISABLED] '
|
||||
fmt = '\t' + color + '{0.hostname} capacity={0.capacity} version={1}'
|
||||
if x.last_isolated_check:
|
||||
fmt += ' last_isolated_check="{0.last_isolated_check:%Y-%m-%d %H:%M:%S}"'
|
||||
|
||||
@@ -3,14 +3,15 @@
|
||||
|
||||
from django.conf import settings
|
||||
from django.core.management.base import BaseCommand
|
||||
from kombu import Connection, Exchange, Queue
|
||||
from kombu import Exchange, Queue
|
||||
|
||||
from awx.main.dispatch.kombu import Connection
|
||||
from awx.main.dispatch.worker import AWXConsumer, CallbackBrokerWorker
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
'''
|
||||
Save Job Callback receiver (see awx.plugins.callbacks.job_event_callback)
|
||||
Save Job Callback receiver
|
||||
Runs as a management command and receives job save events. It then hands
|
||||
them off to worker processors (see Worker) which writes them to the database
|
||||
'''
|
||||
|
||||
@@ -8,10 +8,12 @@ from django.conf import settings
|
||||
from django.core.cache import cache as django_cache
|
||||
from django.core.management.base import BaseCommand
|
||||
from django.db import connection as django_connection, connections
|
||||
from kombu import Connection, Exchange, Queue
|
||||
from kombu import Exchange, Queue
|
||||
|
||||
from awx.main.utils.handlers import AWXProxyHandler
|
||||
from awx.main.dispatch import get_local_queuename, reaper
|
||||
from awx.main.dispatch.control import Control
|
||||
from awx.main.dispatch.kombu import Connection
|
||||
from awx.main.dispatch.pool import AutoscalePool
|
||||
from awx.main.dispatch.worker import AWXConsumer, TaskWorker
|
||||
|
||||
@@ -120,6 +122,12 @@ class Command(BaseCommand):
|
||||
|
||||
reaper.reap()
|
||||
consumer = None
|
||||
|
||||
# don't ship external logs inside the dispatcher's parent process
|
||||
# this exists to work around a race condition + deadlock bug on fork
|
||||
# in cpython itself:
|
||||
# https://bugs.python.org/issue37429
|
||||
AWXProxyHandler.disable()
|
||||
with Connection(settings.BROKER_URL) as conn:
|
||||
try:
|
||||
bcast = 'tower_broadcast_all'
|
||||
|
||||
@@ -0,0 +1,14 @@
|
||||
# Copyright (c) 2019 Ansible by Red Hat
|
||||
# All Rights Reserved.
|
||||
|
||||
from django.core.management.base import BaseCommand
|
||||
|
||||
from awx.main.models import CredentialType
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
|
||||
help = 'Load default managed credential types.'
|
||||
|
||||
def handle(self, *args, **options):
|
||||
CredentialType.setup_tower_managed_defaults()
|
||||
@@ -1,13 +1,14 @@
|
||||
import os
|
||||
import shutil
|
||||
import subprocess
|
||||
import sys
|
||||
import tempfile
|
||||
|
||||
from django.conf import settings
|
||||
from django.core.management.base import BaseCommand, CommandError
|
||||
|
||||
from awx.main.expect import run
|
||||
import ansible_runner
|
||||
|
||||
from awx.main.isolated.manager import set_pythonpath
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
@@ -25,23 +26,24 @@ class Command(BaseCommand):
|
||||
|
||||
try:
|
||||
path = tempfile.mkdtemp(prefix='awx_isolated_ssh', dir=settings.AWX_PROOT_BASE_PATH)
|
||||
args = [
|
||||
'ansible', 'all', '-i', '{},'.format(hostname), '-u',
|
||||
settings.AWX_ISOLATED_USERNAME, '-T5', '-m', 'shell',
|
||||
'-a', 'ansible-runner --version', '-vvv'
|
||||
]
|
||||
ssh_key = None
|
||||
if all([
|
||||
getattr(settings, 'AWX_ISOLATED_KEY_GENERATION', False) is True,
|
||||
getattr(settings, 'AWX_ISOLATED_PRIVATE_KEY', None)
|
||||
]):
|
||||
ssh_key_path = os.path.join(path, '.isolated')
|
||||
ssh_auth_sock = os.path.join(path, 'ssh_auth.sock')
|
||||
run.open_fifo_write(ssh_key_path, settings.AWX_ISOLATED_PRIVATE_KEY)
|
||||
args = run.wrap_args_with_ssh_agent(args, ssh_key_path, ssh_auth_sock)
|
||||
try:
|
||||
print(' '.join(args))
|
||||
subprocess.check_call(args)
|
||||
except subprocess.CalledProcessError as e:
|
||||
sys.exit(e.returncode)
|
||||
ssh_key = settings.AWX_ISOLATED_PRIVATE_KEY
|
||||
env = dict(os.environ.items())
|
||||
set_pythonpath(os.path.join(settings.ANSIBLE_VENV_PATH, 'lib'), env)
|
||||
res = ansible_runner.interface.run(
|
||||
private_data_dir=path,
|
||||
host_pattern='all',
|
||||
inventory='{} ansible_ssh_user={}'.format(hostname, settings.AWX_ISOLATED_USERNAME),
|
||||
module='shell',
|
||||
module_args='ansible-runner --version',
|
||||
envvars=env,
|
||||
verbosity=3,
|
||||
ssh_key=ssh_key,
|
||||
)
|
||||
sys.exit(res.rc)
|
||||
finally:
|
||||
shutil.rmtree(path)
|
||||
|
||||
@@ -18,9 +18,9 @@ from django.db import IntegrityError, connection
|
||||
from django.utils.functional import curry
|
||||
from django.shortcuts import get_object_or_404, redirect
|
||||
from django.apps import apps
|
||||
from django.utils.deprecation import MiddlewareMixin
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
from django.core.urlresolvers import reverse
|
||||
from django.urls import resolve
|
||||
from django.urls import reverse, resolve
|
||||
|
||||
from awx.main.models import ActivityStream
|
||||
from awx.main.utils.named_url_graph import generate_graph, GraphNode
|
||||
@@ -32,7 +32,7 @@ analytics_logger = logging.getLogger('awx.analytics.activity_stream')
|
||||
perf_logger = logging.getLogger('awx.analytics.performance')
|
||||
|
||||
|
||||
class TimingMiddleware(threading.local):
|
||||
class TimingMiddleware(threading.local, MiddlewareMixin):
|
||||
|
||||
dest = '/var/log/tower/profile'
|
||||
|
||||
@@ -65,11 +65,12 @@ class TimingMiddleware(threading.local):
|
||||
return filepath
|
||||
|
||||
|
||||
class ActivityStreamMiddleware(threading.local):
|
||||
class ActivityStreamMiddleware(threading.local, MiddlewareMixin):
|
||||
|
||||
def __init__(self):
|
||||
def __init__(self, get_response=None):
|
||||
self.disp_uid = None
|
||||
self.instance_ids = []
|
||||
super().__init__(get_response)
|
||||
|
||||
def process_request(self, request):
|
||||
if hasattr(request, 'user') and hasattr(request.user, 'is_authenticated') and request.user.is_authenticated():
|
||||
@@ -119,7 +120,7 @@ class ActivityStreamMiddleware(threading.local):
|
||||
self.instance_ids.append(instance.id)
|
||||
|
||||
|
||||
class SessionTimeoutMiddleware(object):
|
||||
class SessionTimeoutMiddleware(MiddlewareMixin):
|
||||
"""
|
||||
Resets the session timeout for both the UI and the actual session for the API
|
||||
to the value of SESSION_COOKIE_AGE on every request if there is a valid session.
|
||||
@@ -127,8 +128,11 @@ class SessionTimeoutMiddleware(object):
|
||||
|
||||
def process_response(self, request, response):
|
||||
should_skip = 'HTTP_X_WS_SESSION_QUIET' in request.META
|
||||
req_session = getattr(request, 'session', None)
|
||||
if req_session and not req_session.is_empty() and should_skip is False:
|
||||
# Something went wrong, such as upgrade-in-progress page
|
||||
if not hasattr(request, 'session'):
|
||||
return response
|
||||
# Only update the session if it hasn't been flushed by being forced to log out.
|
||||
if request.session and not request.session.is_empty() and not should_skip:
|
||||
expiry = int(settings.SESSION_COOKIE_AGE)
|
||||
request.session.set_expiry(expiry)
|
||||
response['Session-Timeout'] = expiry
|
||||
@@ -149,9 +153,9 @@ def _customize_graph():
|
||||
settings.NAMED_URL_GRAPH[Instance].add_bindings()
|
||||
|
||||
|
||||
class URLModificationMiddleware(object):
|
||||
class URLModificationMiddleware(MiddlewareMixin):
|
||||
|
||||
def __init__(self):
|
||||
def __init__(self, get_response=None):
|
||||
models = [m for m in apps.get_app_config('main').get_models() if hasattr(m, 'get_absolute_url')]
|
||||
generate_graph(models)
|
||||
_customize_graph()
|
||||
@@ -175,6 +179,7 @@ class URLModificationMiddleware(object):
|
||||
category=_('Named URL'),
|
||||
category_slug='named-url',
|
||||
)
|
||||
super().__init__(get_response)
|
||||
|
||||
def _named_url_to_pk(self, node, named_url):
|
||||
kwargs = {}
|
||||
@@ -205,7 +210,7 @@ class URLModificationMiddleware(object):
|
||||
request.path_info = new_path
|
||||
|
||||
|
||||
class MigrationRanCheckMiddleware(object):
|
||||
class MigrationRanCheckMiddleware(MiddlewareMixin):
|
||||
|
||||
def process_request(self, request):
|
||||
executor = MigrationExecutor(connection)
|
||||
|
||||
@@ -5,9 +5,11 @@ from __future__ import unicode_literals
|
||||
from django.db import migrations, models
|
||||
|
||||
from awx.main.models import CredentialType
|
||||
from awx.main.utils.common import set_current_apps
|
||||
|
||||
|
||||
def migrate_to_static_inputs(apps, schema_editor):
|
||||
set_current_apps(apps)
|
||||
CredentialType.setup_tower_managed_defaults()
|
||||
|
||||
|
||||
|
||||
25
awx/main/migrations/0064_v350_analytics_state.py
Normal file
25
awx/main/migrations/0064_v350_analytics_state.py
Normal file
@@ -0,0 +1,25 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Generated by Django 1.11.16 on 2019-01-28 14:27
|
||||
from __future__ import unicode_literals
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('main', '0063_v350_org_host_limits'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name='TowerAnalyticsState',
|
||||
fields=[
|
||||
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||
('last_run', models.DateTimeField(auto_now_add=True)),
|
||||
],
|
||||
options={
|
||||
'abstract': False,
|
||||
},
|
||||
),
|
||||
]
|
||||
20
awx/main/migrations/0065_v350_index_job_status.py
Normal file
20
awx/main/migrations/0065_v350_index_job_status.py
Normal file
@@ -0,0 +1,20 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Generated by Django 1.11.16 on 2019-03-29 16:56
|
||||
from __future__ import unicode_literals
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('main', '0064_v350_analytics_state'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name='unifiedjob',
|
||||
name='status',
|
||||
field=models.CharField(choices=[('new', 'New'), ('pending', 'Pending'), ('waiting', 'Waiting'), ('running', 'Running'), ('successful', 'Successful'), ('failed', 'Failed'), ('error', 'Error'), ('canceled', 'Canceled')], db_index=True, default='new', editable=False, max_length=20),
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,20 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Generated by Django 1.11.16 on 2019-03-28 17:56
|
||||
from __future__ import unicode_literals
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('main', '0065_v350_index_job_status'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='inventorysource',
|
||||
name='custom_virtualenv',
|
||||
field=models.CharField(blank=True, default=None, help_text='Local absolute file path containing a custom Python virtualenv to use', max_length=100, null=True),
|
||||
),
|
||||
]
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user