mirror of
https://github.com/ansible/awx.git
synced 2026-02-05 11:34:43 -03:30
Compare commits
566 Commits
bump-djang
...
thedoubl3j
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
6389316206 | ||
|
|
d1d3a3471b | ||
|
|
a53fdaddae | ||
|
|
f72591195e | ||
|
|
0d9483b54c | ||
|
|
f3fd9945d6 | ||
|
|
72a42f23d5 | ||
|
|
309724b12b | ||
|
|
300605ff73 | ||
|
|
77fab1c534 | ||
|
|
51b2524b25 | ||
|
|
612e8e7688 | ||
|
|
0d18308112 | ||
|
|
f51af03424 | ||
|
|
622f6ea166 | ||
|
|
2729076f7f | ||
|
|
6db08bfa4e | ||
|
|
ceed41d352 | ||
|
|
98697a8ce7 | ||
|
|
f1edbd8ef5 | ||
|
|
d0a99c37c1 | ||
|
|
1f06d1bb9a | ||
|
|
873f5c0ecc | ||
|
|
b31da105ad | ||
|
|
a285843cf2 | ||
|
|
dd02d56de6 | ||
|
|
b1944ba676 | ||
|
|
24818b510d | ||
|
|
55a7591f89 | ||
|
|
38f858303d | ||
|
|
0fa8135691 | ||
|
|
e63eba247f | ||
|
|
8fb6a3a633 | ||
|
|
7dc4f149a7 | ||
|
|
2c96c48a5c | ||
|
|
58dcd2f5dc | ||
|
|
25896a8772 | ||
|
|
d96727c3bd | ||
|
|
d8737435fa | ||
|
|
bb46268eec | ||
|
|
af2efec2b4 | ||
|
|
a7eb1ef763 | ||
|
|
e9928ff513 | ||
|
|
df1c453c37 | ||
|
|
5a89d7bc29 | ||
|
|
505ec560c8 | ||
|
|
4f2d28db51 | ||
|
|
0b17007764 | ||
|
|
dfad93cf4c | ||
|
|
6bd7c3831f | ||
|
|
7b56f23c0e | ||
|
|
3e1b9b2c88 | ||
|
|
cf0bc16cf7 | ||
|
|
a0b6083d4e | ||
|
|
d452098123 | ||
|
|
c5fb0c351d | ||
|
|
a3f2401740 | ||
|
|
ad461a3aab | ||
|
|
44c53b02ae | ||
|
|
58e237a09a | ||
|
|
052166df39 | ||
|
|
b4ba7595c6 | ||
|
|
c5211df9ca | ||
|
|
5e0870a7ec | ||
|
|
0936b28f9b | ||
|
|
8e58fee49c | ||
|
|
e746589019 | ||
|
|
c4a6b28b87 | ||
|
|
abc4692231 | ||
|
|
ab9bde3698 | ||
|
|
c5e55fe0f5 | ||
|
|
6b2e9a66d5 | ||
|
|
512857c2a9 | ||
|
|
c2c0f2b828 | ||
|
|
534549139c | ||
|
|
d98118a108 | ||
|
|
e4758e8b4b | ||
|
|
46710c4d86 | ||
|
|
b70e884484 | ||
|
|
05b6f4fcb9 | ||
|
|
243e27c7a9 | ||
|
|
7fe525a533 | ||
|
|
c36ce902db | ||
|
|
44e9dee9c7 | ||
|
|
51eb109dbe | ||
|
|
5ca76f3d64 | ||
|
|
e3a9d9fbe8 | ||
|
|
8b13c75f2e | ||
|
|
36ec5efc88 | ||
|
|
4e332ac2c7 | ||
|
|
b730bfa193 | ||
|
|
8fe4223eac | ||
|
|
461678df08 | ||
|
|
e8c4b302ad | ||
|
|
e82de50edb | ||
|
|
11f31ef796 | ||
|
|
09b539bc34 | ||
|
|
9033e829fe | ||
|
|
4757785016 | ||
|
|
902f2634a6 | ||
|
|
793c85ef24 | ||
|
|
290dec8bf8 | ||
|
|
80f9f87181 | ||
|
|
cd12f4dcac | ||
|
|
3ccc5e5f2c | ||
|
|
550ae51aec | ||
|
|
e8b2920aec | ||
|
|
7977e8639c | ||
|
|
03cd450669 | ||
|
|
1d4b555a2c | ||
|
|
69df7d0e27 | ||
|
|
bf0567ca41 | ||
|
|
ec0732ce94 | ||
|
|
d6482d3898 | ||
|
|
20b203ea8e | ||
|
|
1afd23043d | ||
|
|
1330a1b353 | ||
|
|
11a9a2b066 | ||
|
|
022314b542 | ||
|
|
5752c7a8e2 | ||
|
|
3db2e04efe | ||
|
|
db874f5aea | ||
|
|
c975b1aa22 | ||
|
|
d005402205 | ||
|
|
635e947413 | ||
|
|
3d027bafd0 | ||
|
|
ee19ee0c10 | ||
|
|
024fe55047 | ||
|
|
a909083792 | ||
|
|
873e6a084c | ||
|
|
6182d68b74 | ||
|
|
f1e5cadce7 | ||
|
|
1a4dbcfe2e | ||
|
|
a238c5dd09 | ||
|
|
d26c7fedb8 | ||
|
|
f4347d05a9 | ||
|
|
4eefce622d | ||
|
|
57b8773613 | ||
|
|
d0776dabdf | ||
|
|
2d730abb82 | ||
|
|
8896f75f9b | ||
|
|
c449c4c41a | ||
|
|
31ee509dd5 | ||
|
|
222f387d65 | ||
|
|
d7ca19f9f0 | ||
|
|
a655a3f127 | ||
|
|
9520c83da9 | ||
|
|
144f08f762 | ||
|
|
6aea699284 | ||
|
|
bb6bf33b9e | ||
|
|
7ee0aab856 | ||
|
|
3eb809696a | ||
|
|
5cf3a09163 | ||
|
|
54db6c792b | ||
|
|
7995196cff | ||
|
|
eb96d5d984 | ||
|
|
94764a1f17 | ||
|
|
3e122778e4 | ||
|
|
f98b2e2455 | ||
|
|
c1b6f9a786 | ||
|
|
32bbf3a0c3 | ||
|
|
c76ae8a2ac | ||
|
|
6accd1e5e6 | ||
|
|
01eb162378 | ||
|
|
20a512bdd9 | ||
|
|
f734d8bf19 | ||
|
|
872349ac75 | ||
|
|
6377824af5 | ||
|
|
12dcc10416 | ||
|
|
6bd39aea4b | ||
|
|
b7a3c6b025 | ||
|
|
ba7ee23298 | ||
|
|
537850c650 | ||
|
|
0d85dc5fc5 | ||
|
|
825a48bb32 | ||
|
|
eb6aebff00 | ||
|
|
60114ab929 | ||
|
|
2ba6603436 | ||
|
|
21c463c0dd | ||
|
|
c3bf843ad7 | ||
|
|
0e28d2590a | ||
|
|
de4e707bb2 | ||
|
|
95289ff28c | ||
|
|
2bc08b421d | ||
|
|
cae8a4e16c | ||
|
|
41d3729501 | ||
|
|
000f6b0708 | ||
|
|
c799d51ec8 | ||
|
|
a3303bb74b | ||
|
|
db6e8b9bad | ||
|
|
6a10e0ea5c | ||
|
|
483417762f | ||
|
|
6690d71357 | ||
|
|
ae0a8a80eb | ||
|
|
4532c627e3 | ||
|
|
49240ca8e8 | ||
|
|
5ff3d4b2fc | ||
|
|
3f96ea17d6 | ||
|
|
f59ad4f39c | ||
|
|
87cb6dc0b9 | ||
|
|
bbcdef18a7 | ||
|
|
c3ee0c2d8a | ||
|
|
7a3010f0e6 | ||
|
|
d35d7f62ec | ||
|
|
9d9c125e47 | ||
|
|
5dd81a04ce | ||
|
|
05dc9bad1c | ||
|
|
38f0f8d45f | ||
|
|
d3ee9a1bfd | ||
|
|
438aa463d5 | ||
|
|
51f9160654 | ||
|
|
ac3123a2ac | ||
|
|
c4ee5127c5 | ||
|
|
9ec7540c4b | ||
|
|
2389fc691e | ||
|
|
567f5a2476 | ||
|
|
e837535396 | ||
|
|
1d57f1c355 | ||
|
|
e060e44b05 | ||
|
|
7676f14114 | ||
|
|
182e5cfaa4 | ||
|
|
99be91e939 | ||
|
|
9ff163b919 | ||
|
|
5d0d0404c7 | ||
|
|
db5b6d0019 | ||
|
|
a2c8ecb4e6 | ||
|
|
277bc581e7 | ||
|
|
ef89c59a13 | ||
|
|
5872a88a57 | ||
|
|
7fdd15f115 | ||
|
|
5d53821ce5 | ||
|
|
39cd09ce19 | ||
|
|
353f0adf36 | ||
|
|
cd0e27446a | ||
|
|
bdfd9dec74 | ||
|
|
628a0e6a36 | ||
|
|
bad4e630ba | ||
|
|
e9f2a14ebd | ||
|
|
01fae57de2 | ||
|
|
c7ac45717b | ||
|
|
8fb5862223 | ||
|
|
6f7d5ca8a3 | ||
|
|
0f0f5aa289 | ||
|
|
bc12fa2283 | ||
|
|
03b37037d6 | ||
|
|
5668973d70 | ||
|
|
e6434454ce | ||
|
|
3ba9c026ea | ||
|
|
c7b6b43913 | ||
|
|
1e6a7c0749 | ||
|
|
b5bc85e639 | ||
|
|
a206ca22ec | ||
|
|
e961cbe46f | ||
|
|
0ffe04ed9c | ||
|
|
ee739b5fd9 | ||
|
|
abc04e5c88 | ||
|
|
5b17e5c9c3 | ||
|
|
f04bf5ccf0 | ||
|
|
28712a4c6e | ||
|
|
7b8b37d9a8 | ||
|
|
698d769a7a | ||
|
|
529ee73fcd | ||
|
|
ba053dfb51 | ||
|
|
43b72161ce | ||
|
|
de4a971cb3 | ||
|
|
b351dfb102 | ||
|
|
fb4879b2c9 | ||
|
|
b502a9444a | ||
|
|
2d648d1225 | ||
|
|
7d30dff075 | ||
|
|
0ba9fc6980 | ||
|
|
70ea0a785b | ||
|
|
fa099fe737 | ||
|
|
bf4d45452c | ||
|
|
e56752d55b | ||
|
|
3495c421c1 | ||
|
|
b8a1e90b06 | ||
|
|
8145de3917 | ||
|
|
c0b9d3f428 | ||
|
|
376a791052 | ||
|
|
cb2df43580 | ||
|
|
4487f2afa7 | ||
|
|
c886f57119 | ||
|
|
2e9fd7bd67 | ||
|
|
ccb6360a96 | ||
|
|
397fb297bf | ||
|
|
f8ff48fe5c | ||
|
|
69a60493a3 | ||
|
|
e7440c6074 | ||
|
|
63bb4d66ef | ||
|
|
7d2b2d672c | ||
|
|
7017c28706 | ||
|
|
26346d237d | ||
|
|
48ee5b05ee | ||
|
|
386f85c59f | ||
|
|
c2e5425d93 | ||
|
|
15932e3f7c | ||
|
|
b7b15584af | ||
|
|
148f28f448 | ||
|
|
26b6eac849 | ||
|
|
18ea5cc561 | ||
|
|
a74e7301cd | ||
|
|
30b0c19e72 | ||
|
|
b53c576944 | ||
|
|
99b67f1e37 | ||
|
|
c418bc034f | ||
|
|
d639953a4c | ||
|
|
c6930bdf32 | ||
|
|
d36cd6c6ab | ||
|
|
aa0f2e362b | ||
|
|
00238850f4 | ||
|
|
cdd9e7263d | ||
|
|
1f503645fd | ||
|
|
edba126193 | ||
|
|
ad706d67c2 | ||
|
|
534c312328 | ||
|
|
a270b9b474 | ||
|
|
22ecb2030c | ||
|
|
ada42d7d7c | ||
|
|
4eed454ed7 | ||
|
|
c43dfde45a | ||
|
|
2e8114394b | ||
|
|
46403e4312 | ||
|
|
492c7a1af6 | ||
|
|
a19e1ba28f | ||
|
|
f05173cb65 | ||
|
|
e106e10b49 | ||
|
|
f57a9863d6 | ||
|
|
bb8d878a36 | ||
|
|
885cb8846f | ||
|
|
d51d4eb392 | ||
|
|
ae0d6b70a0 | ||
|
|
cc6337b344 | ||
|
|
c185ff51a7 | ||
|
|
211339ce73 | ||
|
|
3268c9b5fe | ||
|
|
c45eb43d63 | ||
|
|
f89be5ec8b | ||
|
|
8ab89d29ca | ||
|
|
ec2966225b | ||
|
|
fb12c834eb | ||
|
|
6228fe9b66 | ||
|
|
c1572af1d4 | ||
|
|
3e50b019e0 | ||
|
|
2d7bbc4ec8 | ||
|
|
f7cda7696c | ||
|
|
a209751f22 | ||
|
|
5944d041e6 | ||
|
|
56079612c8 | ||
|
|
2186c24c8f | ||
|
|
9c732d2406 | ||
|
|
9a5ed20ed5 | ||
|
|
2657ea840b | ||
|
|
7835e39bac | ||
|
|
b215699586 | ||
|
|
14808cb99b | ||
|
|
cf9e6796ea | ||
|
|
bd96000494 | ||
|
|
ac34e14228 | ||
|
|
1b418f75e6 | ||
|
|
288e8d78d3 | ||
|
|
c0158181c3 | ||
|
|
65b104e1f9 | ||
|
|
29f36793de | ||
|
|
38f72ac7ea | ||
|
|
36c75a2c62 | ||
|
|
b361aef0fb | ||
|
|
df79fa4ae1 | ||
|
|
86d202456a | ||
|
|
c1f0a831ff | ||
|
|
e605883592 | ||
|
|
f377b5fdde | ||
|
|
efbe729c42 | ||
|
|
9c2de6b535 | ||
|
|
0ce0023561 | ||
|
|
32122e6822 | ||
|
|
f2ae68f302 | ||
|
|
b3542c226d | ||
|
|
a129bc860b | ||
|
|
c82a8f4b9c | ||
|
|
99c18b681d | ||
|
|
aeca9db470 | ||
|
|
4b85e7e25a | ||
|
|
325b6d3997 | ||
|
|
91d92a6636 | ||
|
|
56d3933154 | ||
|
|
a1ec28aeb9 | ||
|
|
148afce455 | ||
|
|
1a35775c25 | ||
|
|
f3b86b5193 | ||
|
|
698a8aeb62 | ||
|
|
055d853c54 | ||
|
|
82c967a66e | ||
|
|
cb04ad8ef5 | ||
|
|
f62dfdad2d | ||
|
|
3ceca1b4c7 | ||
|
|
cdb294c5c7 | ||
|
|
c64b5eb462 | ||
|
|
adc2162bac | ||
|
|
e411f3534f | ||
|
|
699c0c769d | ||
|
|
268ca7c78a | ||
|
|
789a43077f | ||
|
|
d8e87da898 | ||
|
|
8174a28716 | ||
|
|
4bbcb34ae3 | ||
|
|
9c556db4c0 | ||
|
|
1d12f0c837 | ||
|
|
71a18c0d61 | ||
|
|
790875ceef | ||
|
|
d2cd4e08c5 | ||
|
|
c55fb369fa | ||
|
|
2c3b4ff5d7 | ||
|
|
ce7911e578 | ||
|
|
51896f0e1b | ||
|
|
3ba6e2e394 | ||
|
|
6599f3f827 | ||
|
|
670b7e7754 | ||
|
|
108cf843d4 | ||
|
|
d26396ce74 | ||
|
|
3dbcfb138c | ||
|
|
54487573f3 | ||
|
|
943964e14f | ||
|
|
989a4387df | ||
|
|
c9f880414c | ||
|
|
6f184e3f76 | ||
|
|
69baa739fa | ||
|
|
d388f91bcd | ||
|
|
51b1fa412d | ||
|
|
dfee5a1821 | ||
|
|
aa162c6128 | ||
|
|
f4cbb9f9a8 | ||
|
|
b97240417a | ||
|
|
6195e8e879 | ||
|
|
6d959daca1 | ||
|
|
8fbe0c2b1f | ||
|
|
68055bb89f | ||
|
|
e21dd0a093 | ||
|
|
c85fa70745 | ||
|
|
23528b7fef | ||
|
|
784ff3193d | ||
|
|
7972486594 | ||
|
|
dbdbc7635a | ||
|
|
4820b084c1 | ||
|
|
764dcbf94b | ||
|
|
42420ebde6 | ||
|
|
31e47706b9 | ||
|
|
4c7697465b | ||
|
|
1ca034b0a7 | ||
|
|
bf09b95b61 | ||
|
|
65817d4fa4 | ||
|
|
0f0919937d | ||
|
|
bcd006f1a5 | ||
|
|
2c2694ce89 | ||
|
|
e4c11561cc | ||
|
|
f22b192fb4 | ||
|
|
6dea7bfe17 | ||
|
|
d5388b3c56 | ||
|
|
1acf8cfde6 | ||
|
|
433974aea6 | ||
|
|
dbe6fcc4e7 | ||
|
|
825a02c86a | ||
|
|
579c2b7229 | ||
|
|
d1c85dae4d | ||
|
|
534b0209f4 | ||
|
|
ece21b15d0 | ||
|
|
97ececa8b4 | ||
|
|
46becf15e9 | ||
|
|
a1ad320622 | ||
|
|
48e3afbb00 | ||
|
|
486a1264d5 | ||
|
|
02795c9ed9 | ||
|
|
5b7a0504f4 | ||
|
|
7db7abcd65 | ||
|
|
6574cfe3a9 | ||
|
|
14698b177b | ||
|
|
1881c26ac4 | ||
|
|
2fdb776ce7 | ||
|
|
ce2b8e9a9e | ||
|
|
cf25a09323 | ||
|
|
eccc32cbad | ||
|
|
fafed924e3 | ||
|
|
d2f3c02945 | ||
|
|
eb4f3c2864 | ||
|
|
66e7210ba4 | ||
|
|
0a4370acf0 | ||
|
|
9fbbe3cba0 | ||
|
|
2ca5fcae2f | ||
|
|
69c1d2f64d | ||
|
|
c4500cfc3d | ||
|
|
af900c8370 | ||
|
|
ef8cb892cb | ||
|
|
bcd18e161c | ||
|
|
b62d0ff8e6 | ||
|
|
c33947af7f | ||
|
|
30b005aa9d | ||
|
|
c9ae36804a | ||
|
|
a1e3919b1f | ||
|
|
1140981c64 | ||
|
|
0a8e92cab7 | ||
|
|
30e2c3a8cd | ||
|
|
6fd483698a | ||
|
|
aef3d8750b | ||
|
|
5315a2b194 | ||
|
|
abdc669e50 | ||
|
|
3baea0f206 | ||
|
|
acd6b2eb22 | ||
|
|
cc6a0612da | ||
|
|
ea7ca3d32d | ||
|
|
31ae3f25e7 | ||
|
|
d0cc2a1658 | ||
|
|
1b1975a93b | ||
|
|
b722f7003d | ||
|
|
6bfe76d6d1 | ||
|
|
a9b0d9f2e5 | ||
|
|
f799376b3d | ||
|
|
e68370f2aa | ||
|
|
a5de4652b9 | ||
|
|
38719405c3 | ||
|
|
96ec709e90 | ||
|
|
090511e65b | ||
|
|
1c170c3a12 | ||
|
|
490db08224 | ||
|
|
70f7ac72d4 | ||
|
|
71856d61c9 | ||
|
|
4c9c22fea2 | ||
|
|
9914229a5a | ||
|
|
011733ad06 | ||
|
|
ce0d176508 | ||
|
|
059f52f314 | ||
|
|
446046c4bf | ||
|
|
17e01e0eb0 | ||
|
|
82b8f7d4c0 | ||
|
|
5a0080658c | ||
|
|
5a4b789488 | ||
|
|
c4d8fdb197 | ||
|
|
6dfe2e3a9f | ||
|
|
01ea091e8a | ||
|
|
3da9e322b7 | ||
|
|
79684ab603 | ||
|
|
1d89e1a019 | ||
|
|
a4346a667c | ||
|
|
4328093c05 | ||
|
|
16d1f34179 | ||
|
|
376cc35a92 | ||
|
|
effbd0e416 | ||
|
|
2334211ba0 | ||
|
|
15e28371eb | ||
|
|
8a1d1e9c12 | ||
|
|
c59c64c915 | ||
|
|
64d2e10dc2 | ||
|
|
ac6c5630f1 | ||
|
|
444af2b500 | ||
|
|
50db80182b | ||
|
|
79c1921ea4 | ||
|
|
d6493fd4df | ||
|
|
9cf66de454 | ||
|
|
85bd7c3ca0 | ||
|
|
77e999f7c8 | ||
|
|
01aa760510 | ||
|
|
16a4c66c73 | ||
|
|
9fa5be015c | ||
|
|
8b293e7046 | ||
|
|
467024bc54 | ||
|
|
bdf3f81016 | ||
|
|
4a5cfdc11d |
57
.codecov.yml
Normal file
57
.codecov.yml
Normal file
@@ -0,0 +1,57 @@
|
||||
---
|
||||
|
||||
codecov:
|
||||
notify:
|
||||
after_n_builds: 9 # Number of test matrix+lint jobs uploading coverage
|
||||
wait_for_ci: false
|
||||
|
||||
require_ci_to_pass: false
|
||||
|
||||
token: >- # repo-scoped, upload-only, needed for stability in PRs from forks
|
||||
2b8c7a7a-7293-4a00-bf02-19bd55a1389b
|
||||
|
||||
comment:
|
||||
require_changes: true
|
||||
|
||||
coverage:
|
||||
range: 100..100
|
||||
status:
|
||||
patch:
|
||||
default:
|
||||
target: 100%
|
||||
pytest:
|
||||
target: 100%
|
||||
flags:
|
||||
- pytest
|
||||
typing:
|
||||
flags:
|
||||
- MyPy
|
||||
project:
|
||||
default:
|
||||
target: 75%
|
||||
lib:
|
||||
flags:
|
||||
- pytest
|
||||
paths:
|
||||
- awx/
|
||||
target: 75%
|
||||
tests:
|
||||
flags:
|
||||
- pytest
|
||||
paths:
|
||||
- tests/
|
||||
- >-
|
||||
**/test/
|
||||
- >-
|
||||
**/tests/
|
||||
- >-
|
||||
**/test/**
|
||||
- >-
|
||||
**/tests/**
|
||||
target: 95%
|
||||
typing:
|
||||
flags:
|
||||
- MyPy
|
||||
target: 100%
|
||||
|
||||
...
|
||||
42
.coveragerc
42
.coveragerc
@@ -1,16 +1,6 @@
|
||||
[run]
|
||||
source = awx
|
||||
branch = True
|
||||
omit =
|
||||
awx/main/migrations/*
|
||||
awx/lib/site-packages/*
|
||||
|
||||
[report]
|
||||
# Regexes for lines to exclude from consideration
|
||||
exclude_lines =
|
||||
# Have to re-enable the standard pragma
|
||||
pragma: no cover
|
||||
|
||||
exclude_also =
|
||||
# Don't complain about missing debug-only code:
|
||||
def __repr__
|
||||
if self\.debug
|
||||
@@ -23,7 +13,35 @@ exclude_lines =
|
||||
if 0:
|
||||
if __name__ == .__main__.:
|
||||
|
||||
ignore_errors = True
|
||||
^\s*@pytest\.mark\.xfail
|
||||
|
||||
[run]
|
||||
branch = True
|
||||
# NOTE: `disable_warnings` is needed when `pytest-cov` runs in tandem
|
||||
# NOTE: with `pytest-xdist`. These warnings are false negative in this
|
||||
# NOTE: context.
|
||||
#
|
||||
# NOTE: It's `coveragepy` that emits the warnings and previously they
|
||||
# NOTE: wouldn't get on the radar of `pytest`'s `filterwarnings`
|
||||
# NOTE: mechanism. This changed, however, with `pytest >= 8.4`. And
|
||||
# NOTE: since we set `filterwarnings = error`, those warnings are being
|
||||
# NOTE: raised as exceptions, cascading into `pytest`'s internals and
|
||||
# NOTE: causing tracebacks and crashes of the test sessions.
|
||||
#
|
||||
# Ref:
|
||||
# * https://github.com/pytest-dev/pytest-cov/issues/693
|
||||
# * https://github.com/pytest-dev/pytest-cov/pull/695
|
||||
# * https://github.com/pytest-dev/pytest-cov/pull/696
|
||||
disable_warnings =
|
||||
module-not-measured
|
||||
omit =
|
||||
awx/main/migrations/*
|
||||
awx/settings/defaults.py
|
||||
awx/settings/*_defaults.py
|
||||
source =
|
||||
.
|
||||
source_pkgs =
|
||||
awx
|
||||
|
||||
[xml]
|
||||
output = ./reports/coverage.xml
|
||||
|
||||
9
.github/PULL_REQUEST_TEMPLATE.md
vendored
9
.github/PULL_REQUEST_TEMPLATE.md
vendored
@@ -4,7 +4,8 @@
|
||||
<!---
|
||||
If you are fixing an existing issue, please include "related #nnn" in your
|
||||
commit message and your description; but you should still explain what
|
||||
the change does.
|
||||
the change does. Also please make sure that if this PR has an attached JIRA, put AAP-<number>
|
||||
in as the first entry for your PR title.
|
||||
-->
|
||||
|
||||
##### ISSUE TYPE
|
||||
@@ -16,17 +17,11 @@ the change does.
|
||||
##### COMPONENT NAME
|
||||
<!--- Name of the module/plugin/module/task -->
|
||||
- API
|
||||
- UI
|
||||
- Collection
|
||||
- CLI
|
||||
- Docs
|
||||
- Other
|
||||
|
||||
##### AWX VERSION
|
||||
<!--- Paste verbatim output from `make VERSION` between quotes below -->
|
||||
```
|
||||
|
||||
```
|
||||
|
||||
|
||||
##### ADDITIONAL INFORMATION
|
||||
|
||||
20
.github/actions/awx_devel_image/action.yml
vendored
20
.github/actions/awx_devel_image/action.yml
vendored
@@ -4,12 +4,14 @@ inputs:
|
||||
github-token:
|
||||
description: GitHub Token for registry access
|
||||
required: true
|
||||
private-github-key:
|
||||
description: GitHub private key for private repositories
|
||||
required: false
|
||||
default: ''
|
||||
runs:
|
||||
using: composite
|
||||
steps:
|
||||
- name: Get python version from Makefile
|
||||
shell: bash
|
||||
run: echo py_version=`make PYTHON_VERSION` >> $GITHUB_ENV
|
||||
- uses: ./.github/actions/setup-python
|
||||
|
||||
- name: Set lower case owner name
|
||||
shell: bash
|
||||
@@ -22,13 +24,21 @@ runs:
|
||||
run: |
|
||||
echo "${{ inputs.github-token }}" | docker login ghcr.io -u ${{ github.actor }} --password-stdin
|
||||
|
||||
- uses: ./.github/actions/setup-ssh-agent
|
||||
with:
|
||||
ssh-private-key: ${{ inputs.private-github-key }}
|
||||
|
||||
- name: Pre-pull latest devel image to warm cache
|
||||
shell: bash
|
||||
run: docker pull -q ghcr.io/${OWNER_LC}/awx_devel:${{ github.base_ref }}
|
||||
run: |
|
||||
DEV_DOCKER_TAG_BASE=ghcr.io/${OWNER_LC} \
|
||||
COMPOSE_TAG=${{ github.base_ref || github.ref_name }} \
|
||||
docker pull -q `make print-DEVEL_IMAGE_NAME`
|
||||
continue-on-error: true
|
||||
|
||||
- name: Build image for current source checkout
|
||||
shell: bash
|
||||
run: |
|
||||
DEV_DOCKER_TAG_BASE=ghcr.io/${OWNER_LC} \
|
||||
COMPOSE_TAG=${{ github.base_ref }} \
|
||||
COMPOSE_TAG=${{ github.base_ref || github.ref_name }} \
|
||||
make docker-compose-build
|
||||
|
||||
24
.github/actions/run_awx_devel/action.yml
vendored
24
.github/actions/run_awx_devel/action.yml
vendored
@@ -9,20 +9,30 @@ inputs:
|
||||
required: false
|
||||
default: false
|
||||
type: boolean
|
||||
private-github-key:
|
||||
description: GitHub private key for private repositories
|
||||
required: false
|
||||
default: ''
|
||||
outputs:
|
||||
ip:
|
||||
description: The IP of the tools_awx_1 container
|
||||
value: ${{ steps.data.outputs.ip }}
|
||||
admin-token:
|
||||
description: OAuth token for admin user
|
||||
value: ${{ steps.data.outputs.admin_token }}
|
||||
runs:
|
||||
using: composite
|
||||
steps:
|
||||
- name: Disable apparmor for rsyslogd, first step
|
||||
shell: bash
|
||||
run: sudo ln -s /etc/apparmor.d/usr.sbin.rsyslogd /etc/apparmor.d/disable/
|
||||
|
||||
- name: Disable apparmor for rsyslogd, second step
|
||||
shell: bash
|
||||
run: sudo apparmor_parser -R /etc/apparmor.d/usr.sbin.rsyslogd
|
||||
|
||||
- name: Build awx_devel image for running checks
|
||||
uses: ./.github/actions/awx_devel_image
|
||||
with:
|
||||
github-token: ${{ inputs.github-token }}
|
||||
private-github-key: ${{ inputs.private-github-key }}
|
||||
|
||||
- name: Upgrade ansible-core
|
||||
shell: bash
|
||||
@@ -36,8 +46,10 @@ runs:
|
||||
shell: bash
|
||||
run: |
|
||||
DEV_DOCKER_OWNER=${{ github.repository_owner }} \
|
||||
COMPOSE_TAG=${{ github.base_ref }} \
|
||||
COMPOSE_UP_OPTS="-d" \
|
||||
COMPOSE_TAG=${{ github.base_ref || github.ref_name }} \
|
||||
DJANGO_COLORS=nocolor \
|
||||
SUPERVISOR_ARGS="-n -t" \
|
||||
COMPOSE_UP_OPTS="-d --no-color" \
|
||||
make docker-compose
|
||||
|
||||
- name: Update default AWX password
|
||||
@@ -62,6 +74,4 @@ runs:
|
||||
shell: bash
|
||||
run: |
|
||||
AWX_IP=$(docker inspect -f '{{.NetworkSettings.Networks.awx.IPAddress}}' tools_awx_1)
|
||||
ADMIN_TOKEN=$(docker exec -i tools_awx_1 awx-manage create_oauth2_token --user admin)
|
||||
echo "ip=$AWX_IP" >> $GITHUB_OUTPUT
|
||||
echo "admin_token=$ADMIN_TOKEN" >> $GITHUB_OUTPUT
|
||||
|
||||
27
.github/actions/setup-python/action.yml
vendored
Normal file
27
.github/actions/setup-python/action.yml
vendored
Normal file
@@ -0,0 +1,27 @@
|
||||
name: 'Setup Python from Makefile'
|
||||
description: 'Extract and set up Python version from Makefile'
|
||||
inputs:
|
||||
python-version:
|
||||
description: 'Override Python version (optional)'
|
||||
required: false
|
||||
default: ''
|
||||
working-directory:
|
||||
description: 'Directory containing the Makefile'
|
||||
required: false
|
||||
default: '.'
|
||||
runs:
|
||||
using: composite
|
||||
steps:
|
||||
- name: Get python version from Makefile
|
||||
shell: bash
|
||||
run: |
|
||||
if [ -n "${{ inputs.python-version }}" ]; then
|
||||
echo "py_version=${{ inputs.python-version }}" >> $GITHUB_ENV
|
||||
else
|
||||
cd ${{ inputs.working-directory }}
|
||||
echo "py_version=`make PYTHON_VERSION`" >> $GITHUB_ENV
|
||||
fi
|
||||
- name: Install python
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: ${{ env.py_version }}
|
||||
29
.github/actions/setup-ssh-agent/action.yml
vendored
Normal file
29
.github/actions/setup-ssh-agent/action.yml
vendored
Normal file
@@ -0,0 +1,29 @@
|
||||
name: 'Setup SSH for GitHub'
|
||||
description: 'Configure SSH for private repository access'
|
||||
inputs:
|
||||
ssh-private-key:
|
||||
description: 'SSH private key for repository access'
|
||||
required: false
|
||||
default: ''
|
||||
runs:
|
||||
using: composite
|
||||
steps:
|
||||
- name: Generate placeholder SSH private key if SSH auth for private repos is not needed
|
||||
id: generate_key
|
||||
shell: bash
|
||||
run: |
|
||||
if [[ -z "${{ inputs.ssh-private-key }}" ]]; then
|
||||
ssh-keygen -t ed25519 -C "github-actions" -N "" -f ~/.ssh/id_ed25519
|
||||
echo "SSH_PRIVATE_KEY<<EOF" >> $GITHUB_OUTPUT
|
||||
cat ~/.ssh/id_ed25519 >> $GITHUB_OUTPUT
|
||||
echo "EOF" >> $GITHUB_OUTPUT
|
||||
else
|
||||
echo "SSH_PRIVATE_KEY<<EOF" >> $GITHUB_OUTPUT
|
||||
echo "${{ inputs.ssh-private-key }}" >> $GITHUB_OUTPUT
|
||||
echo "EOF" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
- name: Add private GitHub key to SSH agent
|
||||
uses: webfactory/ssh-agent@v0.9.0
|
||||
with:
|
||||
ssh-private-key: ${{ steps.generate_key.outputs.SSH_PRIVATE_KEY }}
|
||||
@@ -13,7 +13,7 @@ runs:
|
||||
docker logs tools_awx_1 > ${{ inputs.log-filename }}
|
||||
|
||||
- name: Upload AWX logs as artifact
|
||||
uses: actions/upload-artifact@v3
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: docker-compose-logs
|
||||
name: docker-compose-logs-${{ inputs.log-filename }}
|
||||
path: ${{ inputs.log-filename }}
|
||||
|
||||
7
.github/dependabot.yml
vendored
7
.github/dependabot.yml
vendored
@@ -8,3 +8,10 @@ updates:
|
||||
labels:
|
||||
- "docs"
|
||||
- "dependencies"
|
||||
- package-ecosystem: "pip"
|
||||
directory: "requirements/"
|
||||
schedule:
|
||||
interval: "daily" #run daily until we trust it, then back this off to weekly
|
||||
open-pull-requests-limit: 2
|
||||
labels:
|
||||
- "dependencies"
|
||||
|
||||
9
.github/triage_replies.md
vendored
9
.github/triage_replies.md
vendored
@@ -1,5 +1,4 @@
|
||||
## General
|
||||
- For the roundup of all the different mailing lists available from AWX, Ansible, and beyond visit: https://docs.ansible.com/ansible/latest/community/communication.html
|
||||
- Hello, we think your question is answered in our FAQ. Does this: https://www.ansible.com/products/awx-project/faq cover your question?
|
||||
- You can find the latest documentation here: https://ansible.readthedocs.io/projects/awx/en/latest/userguide/index.html
|
||||
|
||||
@@ -83,7 +82,7 @@ The Ansible Community is looking at building an EE that corresponds to all of th
|
||||
## Mailing List Triage
|
||||
|
||||
### Create an issue
|
||||
- Hello, thanks for reaching out on list. We think this merits an issue on our Github, https://github.com/ansible/awx/issues. If you could open an issue up on Github it will get tagged and integrated into our planning and workflow. All future work will be tracked there. Issues should include as much information as possible, including screenshots, log outputs, or any reproducers.
|
||||
- Hello, thanks for reaching out on list. We think this merits an issue on our GitHub, https://github.com/ansible/awx/issues. If you could open an issue up on GitHub it will get tagged and integrated into our planning and workflow. All future work will be tracked there. Issues should include as much information as possible, including screenshots, log outputs, or any reproducers.
|
||||
|
||||
### Create a Pull Request
|
||||
- Hello, we think your idea is good! Please consider contributing a PR for this following our contributing guidelines: https://github.com/ansible/awx/blob/devel/CONTRIBUTING.md
|
||||
@@ -93,8 +92,8 @@ The Ansible Community is looking at building an EE that corresponds to all of th
|
||||
- Hello, your issue seems related to receptor. Could you please open an issue in the receptor repository? https://github.com/ansible/receptor. Thanks!
|
||||
|
||||
### Ansible Engine not AWX
|
||||
- Hello, your question seems to be about Ansible development, not about AWX. Try asking on the Ansible-devel specific mailing list: https://groups.google.com/g/ansible-devel
|
||||
- Hello, your question seems to be about using Ansible, not about AWX. https://groups.google.com/g/ansible-project is the best place to visit for user questions about Ansible. Thanks!
|
||||
- Hello, your question seems to be about Ansible development, not about AWX. Try asking on in the Forum https://forum.ansible.com/tag/development
|
||||
- Hello, your question seems to be about using Ansible Core, not about AWX. https://forum.ansible.com/tag/ansible-core is the best place to visit for user questions about Ansible. Thanks!
|
||||
|
||||
### Ansible Galaxy not AWX
|
||||
- Hey there. That sounds like an FAQ question. Did this: https://www.ansible.com/products/awx-project/faq cover your question?
|
||||
@@ -104,7 +103,7 @@ The Ansible Community is looking at building an EE that corresponds to all of th
|
||||
- AWX-Operator: https://github.com/ansible/awx-operator/blob/devel/CONTRIBUTING.md
|
||||
|
||||
### Oracle AWX
|
||||
We'd be happy to help if you can reproduce this with AWX since we do not have Oracle's Linux Automation Manager. If you need help with this specific version of Oracles Linux Automation Manager you will need to contact your Oracle for support.
|
||||
We'd be happy to help if you can reproduce this with AWX since we do not have Oracle's Linux Automation Manager. If you need help with this specific version of Oracles Linux Automation Manager you will need to contact your Oracle for support.
|
||||
|
||||
### Community Resolved
|
||||
Hi,
|
||||
|
||||
66
.github/workflows/api_schema_check.yml
vendored
Normal file
66
.github/workflows/api_schema_check.yml
vendored
Normal file
@@ -0,0 +1,66 @@
|
||||
---
|
||||
name: API Schema Change Detection
|
||||
env:
|
||||
LC_ALL: "C.UTF-8" # prevent ERROR: Ansible could not initialize the preferred locale: unsupported locale setting
|
||||
CI_GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
DEV_DOCKER_OWNER: ${{ github.repository_owner }}
|
||||
COMPOSE_TAG: ${{ github.base_ref || 'devel' }}
|
||||
UPSTREAM_REPOSITORY_ID: 91594105
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
branches:
|
||||
- devel
|
||||
- release_**
|
||||
- feature_**
|
||||
- stable-**
|
||||
|
||||
jobs:
|
||||
api-schema-detection:
|
||||
name: Detect API Schema Changes
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 30
|
||||
permissions:
|
||||
packages: write
|
||||
contents: read
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
show-progress: false
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Build awx_devel image for schema check
|
||||
uses: ./.github/actions/awx_devel_image
|
||||
with:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
private-github-key: ${{ secrets.PRIVATE_GITHUB_KEY }}
|
||||
|
||||
- name: Detect API schema changes
|
||||
id: schema-check
|
||||
continue-on-error: true
|
||||
run: |
|
||||
AWX_DOCKER_ARGS='-e GITHUB_ACTIONS' \
|
||||
AWX_DOCKER_CMD='make detect-schema-change SCHEMA_DIFF_BASE_BRANCH=${{ github.event.pull_request.base.ref }}' \
|
||||
make docker-runner 2>&1 | tee schema-diff.txt
|
||||
exit ${PIPESTATUS[0]}
|
||||
|
||||
- name: Add schema diff to job summary
|
||||
if: always()
|
||||
# show text and if for some reason, it can't be generated, state that it can't be.
|
||||
run: |
|
||||
echo "## API Schema Change Detection Results" >> $GITHUB_STEP_SUMMARY
|
||||
echo "" >> $GITHUB_STEP_SUMMARY
|
||||
if [ -f schema-diff.txt ]; then
|
||||
if grep -q "^+" schema-diff.txt || grep -q "^-" schema-diff.txt; then
|
||||
echo "### Schema changes detected" >> $GITHUB_STEP_SUMMARY
|
||||
echo "" >> $GITHUB_STEP_SUMMARY
|
||||
echo '```diff' >> $GITHUB_STEP_SUMMARY
|
||||
cat schema-diff.txt >> $GITHUB_STEP_SUMMARY
|
||||
echo '```' >> $GITHUB_STEP_SUMMARY
|
||||
else
|
||||
echo "### No schema changes detected" >> $GITHUB_STEP_SUMMARY
|
||||
fi
|
||||
else
|
||||
echo "### Unable to generate schema diff" >> $GITHUB_STEP_SUMMARY
|
||||
fi
|
||||
247
.github/workflows/ci.yml
vendored
247
.github/workflows/ci.yml
vendored
@@ -5,8 +5,12 @@ env:
|
||||
CI_GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
DEV_DOCKER_OWNER: ${{ github.repository_owner }}
|
||||
COMPOSE_TAG: ${{ github.base_ref || 'devel' }}
|
||||
UPSTREAM_REPOSITORY_ID: 91594105
|
||||
on:
|
||||
pull_request:
|
||||
push:
|
||||
branches:
|
||||
- devel # needed to publish code coverage post-merge
|
||||
jobs:
|
||||
common-tests:
|
||||
name: ${{ matrix.tests.name }}
|
||||
@@ -20,17 +24,20 @@ jobs:
|
||||
matrix:
|
||||
tests:
|
||||
- name: api-test
|
||||
command: /start_tests.sh
|
||||
command: /start_tests.sh test_coverage
|
||||
coverage-upload-name: ""
|
||||
- name: api-migrations
|
||||
command: /start_tests.sh test_migrations
|
||||
coverage-upload-name: ""
|
||||
- name: api-lint
|
||||
command: /var/lib/awx/venv/awx/bin/tox -e linters
|
||||
coverage-upload-name: ""
|
||||
- name: api-swagger
|
||||
command: /start_tests.sh swagger
|
||||
coverage-upload-name: ""
|
||||
- name: awx-collection
|
||||
command: /start_tests.sh test_collection_all
|
||||
- name: api-schema
|
||||
command: /start_tests.sh detect-schema-change SCHEMA_DIFF_BASE_BRANCH=${{ github.event.pull_request.base.ref }}
|
||||
coverage-upload-name: "awx-collection"
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
@@ -41,9 +48,73 @@ jobs:
|
||||
uses: ./.github/actions/awx_devel_image
|
||||
with:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
private-github-key: ${{ secrets.PRIVATE_GITHUB_KEY }}
|
||||
|
||||
- name: Run check ${{ matrix.tests.name }}
|
||||
run: AWX_DOCKER_CMD='${{ matrix.tests.command }}' make docker-runner
|
||||
id: make-run
|
||||
run: >-
|
||||
AWX_DOCKER_ARGS='-e GITHUB_ACTIONS -e GITHUB_OUTPUT -v "${GITHUB_OUTPUT}:${GITHUB_OUTPUT}:rw,Z"'
|
||||
AWX_DOCKER_CMD='${{ matrix.tests.command }}'
|
||||
make docker-runner
|
||||
|
||||
- name: Upload test coverage to Codecov
|
||||
if: >-
|
||||
!cancelled()
|
||||
&& steps.make-run.outputs.cov-report-files != ''
|
||||
uses: codecov/codecov-action@v4
|
||||
with:
|
||||
fail_ci_if_error: >-
|
||||
${{
|
||||
toJSON(env.UPSTREAM_REPOSITORY_ID == github.repository_id)
|
||||
}}
|
||||
files: >-
|
||||
${{ steps.make-run.outputs.cov-report-files }}
|
||||
flags: >-
|
||||
CI-GHA,
|
||||
pytest,
|
||||
OS-${{
|
||||
runner.os
|
||||
}}
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
- name: Upload test results to Codecov
|
||||
if: >-
|
||||
!cancelled()
|
||||
&& steps.make-run.outputs.test-result-files != ''
|
||||
uses: codecov/test-results-action@v1
|
||||
with:
|
||||
fail_ci_if_error: >-
|
||||
${{
|
||||
toJSON(env.UPSTREAM_REPOSITORY_ID == github.repository_id)
|
||||
}}
|
||||
files: >-
|
||||
${{ steps.make-run.outputs.test-result-files }}
|
||||
flags: >-
|
||||
CI-GHA,
|
||||
pytest,
|
||||
OS-${{
|
||||
runner.os
|
||||
}}
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
|
||||
- name: Upload awx jUnit test reports
|
||||
if: >-
|
||||
!cancelled()
|
||||
&& steps.make-run.outputs.test-result-files != ''
|
||||
&& github.event_name == 'push'
|
||||
&& env.UPSTREAM_REPOSITORY_ID == github.repository_id
|
||||
&& github.ref_name == github.event.repository.default_branch
|
||||
run: |
|
||||
for junit_file in $(echo '${{ steps.make-run.outputs.test-result-files }}' | sed 's/,/ /')
|
||||
do
|
||||
curl \
|
||||
-v \
|
||||
--user "${{ vars.PDE_ORG_RESULTS_AGGREGATOR_UPLOAD_USER }}:${{ secrets.PDE_ORG_RESULTS_UPLOAD_PASSWORD }}" \
|
||||
--form "xunit_xml=@${junit_file}" \
|
||||
--form "component_name=${{ matrix.tests.coverage-upload-name || 'awx' }}" \
|
||||
--form "git_commit_sha=${{ github.sha }}" \
|
||||
--form "git_repository_url=https://github.com/${{ github.repository }}" \
|
||||
"${{ vars.PDE_ORG_RESULTS_AGGREGATOR_UPLOAD_URL }}/api/results/upload/"
|
||||
done
|
||||
|
||||
dev-env:
|
||||
runs-on: ubuntu-latest
|
||||
@@ -53,14 +124,24 @@ jobs:
|
||||
with:
|
||||
show-progress: false
|
||||
|
||||
- uses: ./.github/actions/setup-python
|
||||
with:
|
||||
python-version: '3.x'
|
||||
|
||||
- uses: ./.github/actions/run_awx_devel
|
||||
id: awx
|
||||
with:
|
||||
build-ui: false
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
private-github-key: ${{ secrets.PRIVATE_GITHUB_KEY }}
|
||||
|
||||
- name: Run smoke test
|
||||
run: ansible-playbook tools/docker-compose/ansible/smoke-test.yml -v
|
||||
- name: Run live dev env tests
|
||||
run: docker exec tools_awx_1 /bin/bash -c "make live_test"
|
||||
|
||||
- uses: ./.github/actions/upload_awx_devel_logs
|
||||
if: always()
|
||||
with:
|
||||
log-filename: live-tests.log
|
||||
|
||||
awx-operator:
|
||||
runs-on: ubuntu-latest
|
||||
@@ -74,6 +155,10 @@ jobs:
|
||||
show-progress: false
|
||||
path: awx
|
||||
|
||||
- uses: ./awx/.github/actions/setup-ssh-agent
|
||||
with:
|
||||
ssh-private-key: ${{ secrets.PRIVATE_GITHUB_KEY }}
|
||||
|
||||
- name: Checkout awx-operator
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
@@ -81,14 +166,10 @@ jobs:
|
||||
repository: ansible/awx-operator
|
||||
path: awx-operator
|
||||
|
||||
- name: Get python version from Makefile
|
||||
working-directory: awx
|
||||
run: echo py_version=`make PYTHON_VERSION` >> $GITHUB_ENV
|
||||
|
||||
- name: Install python ${{ env.py_version }}
|
||||
uses: actions/setup-python@v4
|
||||
- name: Setup python, referencing action at awx relative path
|
||||
uses: ./awx/.github/actions/setup-python
|
||||
with:
|
||||
python-version: ${{ env.py_version }}
|
||||
python-version: '3.x'
|
||||
|
||||
- name: Install playbook dependencies
|
||||
run: |
|
||||
@@ -107,6 +188,8 @@ jobs:
|
||||
working-directory: awx-operator
|
||||
run: |
|
||||
python3 -m pip install -r molecule/requirements.txt
|
||||
python3 -m pip install PyYAML # for awx/tools/scripts/rewrite-awx-operator-requirements.py
|
||||
$(realpath ../awx/tools/scripts/rewrite-awx-operator-requirements.py) molecule/requirements.yml $(realpath ../awx)
|
||||
ansible-galaxy collection install -r molecule/requirements.yml
|
||||
sudo rm -f $(which kustomize)
|
||||
make kustomize
|
||||
@@ -119,7 +202,7 @@ jobs:
|
||||
|
||||
- name: Upload debug output
|
||||
if: failure()
|
||||
uses: actions/upload-artifact@v3
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: awx-operator-debug-output
|
||||
path: ${{ env.DEBUG_OUTPUT_DIR }}
|
||||
@@ -130,17 +213,46 @@ jobs:
|
||||
timeout-minutes: 30
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
ansible:
|
||||
- stable-2.17
|
||||
# - devel
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Perform sanity testing
|
||||
uses: ansible-community/ansible-test-gh-action@release/v1
|
||||
with:
|
||||
show-progress: false
|
||||
ansible-core-version: ${{ matrix.ansible }}
|
||||
codecov-token: ${{ secrets.CODECOV_TOKEN }}
|
||||
collection-root: awx_collection
|
||||
pre-test-cmd: >-
|
||||
ansible-playbook
|
||||
-i localhost,
|
||||
tools/template_galaxy.yml
|
||||
-e collection_package=awx
|
||||
-e collection_namespace=awx
|
||||
-e collection_version=1.0.0
|
||||
-e '{"awx_template_version": false}'
|
||||
testing-type: sanity
|
||||
|
||||
# The containers that GitHub Actions use have Ansible installed, so upgrade to make sure we have the latest version.
|
||||
- name: Upgrade ansible-core
|
||||
run: python3 -m pip install --upgrade ansible-core
|
||||
|
||||
- name: Run sanity tests
|
||||
run: make test_collection_sanity
|
||||
- name: Upload awx jUnit test reports to the unified dashboard
|
||||
if: >-
|
||||
!cancelled()
|
||||
&& steps.make-run.outputs.test-result-files != ''
|
||||
&& github.event_name == 'push'
|
||||
&& env.UPSTREAM_REPOSITORY_ID == github.repository_id
|
||||
&& github.ref_name == github.event.repository.default_branch
|
||||
run: |
|
||||
for junit_file in $(echo '${{ steps.make-run.outputs.test-result-files }}' | sed 's/,/ /')
|
||||
do
|
||||
curl \
|
||||
-v \
|
||||
--user "${{ vars.PDE_ORG_RESULTS_AGGREGATOR_UPLOAD_USER }}:${{ secrets.PDE_ORG_RESULTS_UPLOAD_PASSWORD }}" \
|
||||
--form "xunit_xml=@${junit_file}" \
|
||||
--form "component_name=awx" \
|
||||
--form "git_commit_sha=${{ github.sha }}" \
|
||||
--form "git_repository_url=https://github.com/${{ github.repository }}" \
|
||||
"${{ vars.PDE_ORG_RESULTS_AGGREGATOR_UPLOAD_URL }}/api/results/upload/"
|
||||
done
|
||||
|
||||
collection-integration:
|
||||
name: awx_collection integration
|
||||
@@ -161,11 +273,16 @@ jobs:
|
||||
with:
|
||||
show-progress: false
|
||||
|
||||
- uses: ./.github/actions/setup-python
|
||||
with:
|
||||
python-version: '3.x'
|
||||
|
||||
- uses: ./.github/actions/run_awx_devel
|
||||
id: awx
|
||||
with:
|
||||
build-ui: false
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
private-github-key: ${{ secrets.PRIVATE_GITHUB_KEY }}
|
||||
|
||||
- name: Install dependencies for running tests
|
||||
run: |
|
||||
@@ -173,23 +290,47 @@ jobs:
|
||||
python3 -m pip install -r awx_collection/requirements.txt
|
||||
|
||||
- name: Run integration tests
|
||||
id: make-run
|
||||
run: |
|
||||
echo "::remove-matcher owner=python::" # Disable annoying annotations from setup-python
|
||||
echo '[general]' > ~/.tower_cli.cfg
|
||||
echo 'host = https://${{ steps.awx.outputs.ip }}:8043' >> ~/.tower_cli.cfg
|
||||
echo 'oauth_token = ${{ steps.awx.outputs.admin-token }}' >> ~/.tower_cli.cfg
|
||||
echo 'username = admin' >> ~/.tower_cli.cfg
|
||||
echo 'password = password' >> ~/.tower_cli.cfg
|
||||
echo 'verify_ssl = false' >> ~/.tower_cli.cfg
|
||||
TARGETS="$(ls awx_collection/tests/integration/targets | grep '${{ matrix.target-regex.regex }}' | tr '\n' ' ')"
|
||||
make COLLECTION_VERSION=100.100.100-git COLLECTION_TEST_TARGET="--coverage --requirements $TARGETS" test_collection_integration
|
||||
make COLLECTION_VERSION=100.100.100-git COLLECTION_TEST_TARGET="--requirements $TARGETS" test_collection_integration
|
||||
env:
|
||||
ANSIBLE_TEST_PREFER_PODMAN: 1
|
||||
|
||||
- name: Upload test coverage to Codecov
|
||||
if: >-
|
||||
!cancelled()
|
||||
&& steps.make-run.outputs.cov-report-files != ''
|
||||
uses: codecov/codecov-action@v4
|
||||
with:
|
||||
fail_ci_if_error: >-
|
||||
${{
|
||||
toJSON(env.UPSTREAM_REPOSITORY_ID == github.repository_id)
|
||||
}}
|
||||
files: >-
|
||||
${{ steps.make-run.outputs.cov-report-files }}
|
||||
flags: >-
|
||||
CI-GHA,
|
||||
ansible-test,
|
||||
integration,
|
||||
OS-${{
|
||||
runner.os
|
||||
}}
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
|
||||
# Upload coverage report as artifact
|
||||
- uses: actions/upload-artifact@v3
|
||||
- uses: actions/upload-artifact@v4
|
||||
if: always()
|
||||
with:
|
||||
name: coverage-${{ matrix.target-regex.name }}
|
||||
path: ~/.ansible/collections/ansible_collections/awx/awx/tests/output/coverage/
|
||||
retention-days: 1
|
||||
|
||||
- uses: ./.github/actions/upload_awx_devel_logs
|
||||
if: always()
|
||||
@@ -207,24 +348,28 @@ jobs:
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
show-progress: false
|
||||
|
||||
- uses: ./.github/actions/setup-python
|
||||
with:
|
||||
python-version: '3.x'
|
||||
|
||||
- name: Upgrade ansible-core
|
||||
run: python3 -m pip install --upgrade ansible-core
|
||||
|
||||
- name: Download coverage artifacts
|
||||
uses: actions/download-artifact@v3
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
merge-multiple: true
|
||||
path: coverage
|
||||
pattern: coverage-*
|
||||
|
||||
- name: Combine coverage
|
||||
run: |
|
||||
make COLLECTION_VERSION=100.100.100-git install_collection
|
||||
mkdir -p ~/.ansible/collections/ansible_collections/awx/awx/tests/output/coverage
|
||||
cd coverage
|
||||
for i in coverage-*; do
|
||||
cp -rv $i/* ~/.ansible/collections/ansible_collections/awx/awx/tests/output/coverage/
|
||||
done
|
||||
cp -rv coverage/* ~/.ansible/collections/ansible_collections/awx/awx/tests/output/coverage/
|
||||
cd ~/.ansible/collections/ansible_collections/awx/awx
|
||||
ansible-test coverage combine --requirements
|
||||
ansible-test coverage html
|
||||
@@ -236,48 +381,8 @@ jobs:
|
||||
echo '## AWX Collection Integration Coverage HTML' >> $GITHUB_STEP_SUMMARY
|
||||
echo 'Download the HTML artifacts to view the coverage report.' >> $GITHUB_STEP_SUMMARY
|
||||
|
||||
# This is a huge hack, there's no official action for removing artifacts currently.
|
||||
# Also ACTIONS_RUNTIME_URL and ACTIONS_RUNTIME_TOKEN aren't available in normal run
|
||||
# steps, so we have to use github-script to get them.
|
||||
#
|
||||
# The advantage of doing this, though, is that we save on artifact storage space.
|
||||
|
||||
- name: Get secret artifact runtime URL
|
||||
uses: actions/github-script@v6
|
||||
id: get-runtime-url
|
||||
with:
|
||||
result-encoding: string
|
||||
script: |
|
||||
const { ACTIONS_RUNTIME_URL } = process.env;
|
||||
return ACTIONS_RUNTIME_URL;
|
||||
|
||||
- name: Get secret artifact runtime token
|
||||
uses: actions/github-script@v6
|
||||
id: get-runtime-token
|
||||
with:
|
||||
result-encoding: string
|
||||
script: |
|
||||
const { ACTIONS_RUNTIME_TOKEN } = process.env;
|
||||
return ACTIONS_RUNTIME_TOKEN;
|
||||
|
||||
- name: Remove intermediary artifacts
|
||||
env:
|
||||
ACTIONS_RUNTIME_URL: ${{ steps.get-runtime-url.outputs.result }}
|
||||
ACTIONS_RUNTIME_TOKEN: ${{ steps.get-runtime-token.outputs.result }}
|
||||
run: |
|
||||
echo "::add-mask::${ACTIONS_RUNTIME_TOKEN}"
|
||||
artifacts=$(
|
||||
curl -H "Authorization: Bearer $ACTIONS_RUNTIME_TOKEN" \
|
||||
${ACTIONS_RUNTIME_URL}_apis/pipelines/workflows/${{ github.run_id }}/artifacts?api-version=6.0-preview \
|
||||
| jq -r '.value | .[] | select(.name | startswith("coverage-")) | .url'
|
||||
)
|
||||
|
||||
for artifact in $artifacts; do
|
||||
curl -i -X DELETE -H "Accept: application/json;api-version=6.0-preview" -H "Authorization: Bearer $ACTIONS_RUNTIME_TOKEN" "$artifact"
|
||||
done
|
||||
|
||||
- name: Upload coverage report as artifact
|
||||
uses: actions/upload-artifact@v3
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: awx-collection-integration-coverage-html
|
||||
path: ~/.ansible/collections/ansible_collections/awx/awx/tests/output/reports/coverage
|
||||
|
||||
11
.github/workflows/devel_images.yml
vendored
11
.github/workflows/devel_images.yml
vendored
@@ -10,6 +10,7 @@ on:
|
||||
- devel
|
||||
- release_*
|
||||
- feature_*
|
||||
- stable-*
|
||||
jobs:
|
||||
push-development-images:
|
||||
runs-on: ubuntu-latest
|
||||
@@ -49,14 +50,10 @@ jobs:
|
||||
run: |
|
||||
echo "DEV_DOCKER_TAG_BASE=ghcr.io/${OWNER,,}" >> $GITHUB_ENV
|
||||
echo "COMPOSE_TAG=${GITHUB_REF##*/}" >> $GITHUB_ENV
|
||||
echo py_version=`make PYTHON_VERSION` >> $GITHUB_ENV
|
||||
env:
|
||||
OWNER: '${{ github.repository_owner }}'
|
||||
|
||||
- name: Install python ${{ env.py_version }}
|
||||
uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: ${{ env.py_version }}
|
||||
- uses: ./.github/actions/setup-python
|
||||
|
||||
- name: Log in to registry
|
||||
run: |
|
||||
@@ -73,6 +70,10 @@ jobs:
|
||||
make ui
|
||||
if: matrix.build-targets.image-name == 'awx'
|
||||
|
||||
- uses: ./.github/actions/setup-ssh-agent
|
||||
with:
|
||||
ssh-private-key: ${{ secrets.PRIVATE_GITHUB_KEY }}
|
||||
|
||||
- name: Build and push AWX devel images
|
||||
run: |
|
||||
make ${{ matrix.build-targets.make-target }}
|
||||
|
||||
4
.github/workflows/docs.yml
vendored
4
.github/workflows/docs.yml
vendored
@@ -12,6 +12,10 @@ jobs:
|
||||
with:
|
||||
show-progress: false
|
||||
|
||||
- uses: ./.github/actions/setup-python
|
||||
with:
|
||||
python-version: '3.x'
|
||||
|
||||
- name: install tox
|
||||
run: pip install tox
|
||||
|
||||
|
||||
4
.github/workflows/label_issue.yml
vendored
4
.github/workflows/label_issue.yml
vendored
@@ -34,9 +34,11 @@ jobs:
|
||||
with:
|
||||
show-progress: false
|
||||
|
||||
- uses: actions/setup-python@v4
|
||||
- uses: ./.github/actions/setup-python
|
||||
|
||||
- name: Install python requests
|
||||
run: pip install requests
|
||||
|
||||
- name: Check if user is a member of Ansible org
|
||||
uses: jannekem/run-python-script-action@v1
|
||||
id: check_user
|
||||
|
||||
5
.github/workflows/label_pr.yml
vendored
5
.github/workflows/label_pr.yml
vendored
@@ -33,7 +33,10 @@ jobs:
|
||||
with:
|
||||
show-progress: false
|
||||
|
||||
- uses: actions/setup-python@v4
|
||||
- uses: ./.github/actions/setup-python
|
||||
with:
|
||||
python-version: '3.x'
|
||||
|
||||
- name: Install python requests
|
||||
run: pip install requests
|
||||
- name: Check if user is a member of Ansible org
|
||||
|
||||
8
.github/workflows/promote.yml
vendored
8
.github/workflows/promote.yml
vendored
@@ -36,13 +36,7 @@ jobs:
|
||||
with:
|
||||
show-progress: false
|
||||
|
||||
- name: Get python version from Makefile
|
||||
run: echo py_version=`make PYTHON_VERSION` >> $GITHUB_ENV
|
||||
|
||||
- name: Install python ${{ env.py_version }}
|
||||
uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: ${{ env.py_version }}
|
||||
- uses: ./.github/actions/setup-python
|
||||
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
|
||||
85
.github/workflows/sonarcloud_pr.yml
vendored
Normal file
85
.github/workflows/sonarcloud_pr.yml
vendored
Normal file
@@ -0,0 +1,85 @@
|
||||
---
|
||||
name: SonarQube
|
||||
|
||||
on:
|
||||
workflow_run:
|
||||
workflows:
|
||||
- CI
|
||||
types:
|
||||
- completed
|
||||
|
||||
permissions: read-all
|
||||
|
||||
jobs:
|
||||
sonarqube:
|
||||
runs-on: ubuntu-latest
|
||||
if: github.event.workflow_run.conclusion == 'success' && github.event.workflow_run.event == 'pull_request'
|
||||
steps:
|
||||
- name: Checkout Code
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
show-progress: false
|
||||
|
||||
- name: Download coverage report artifact
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: coverage.xml
|
||||
path: reports/
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
run-id: ${{ github.event.workflow_run.id }}
|
||||
|
||||
- name: Download PR number artifact
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: pr-number
|
||||
path: .
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
run-id: ${{ github.event.workflow_run.id }}
|
||||
|
||||
- name: Extract PR number
|
||||
run: |
|
||||
cat pr-number.txt
|
||||
echo "PR_NUMBER=$(cat pr-number.txt)" >> $GITHUB_ENV
|
||||
|
||||
- name: Get PR info
|
||||
uses: octokit/request-action@v2.x
|
||||
id: pr_info
|
||||
with:
|
||||
route: GET /repos/{repo}/pulls/{number}
|
||||
repo: ${{ github.event.repository.full_name }}
|
||||
number: ${{ env.PR_NUMBER }}
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Set PR info into env
|
||||
run: |
|
||||
echo "PR_BASE=${{ fromJson(steps.pr_info.outputs.data).base.ref }}" >> $GITHUB_ENV
|
||||
echo "PR_HEAD=${{ fromJson(steps.pr_info.outputs.data).head.ref }}" >> $GITHUB_ENV
|
||||
|
||||
- name: Add base branch
|
||||
run: |
|
||||
gh pr checkout ${{ env.PR_NUMBER }}
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Extract and export repo owner/name
|
||||
run: |
|
||||
REPO_SLUG="${GITHUB_REPOSITORY}"
|
||||
IFS="/" read -r REPO_OWNER REPO_NAME <<< "$REPO_SLUG"
|
||||
echo "REPO_OWNER=$REPO_OWNER" >> $GITHUB_ENV
|
||||
echo "REPO_NAME=$REPO_NAME" >> $GITHUB_ENV
|
||||
|
||||
- name: SonarQube scan
|
||||
uses: SonarSource/sonarqube-scan-action@v5
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
SONAR_TOKEN: ${{ secrets[format('{0}', vars.SONAR_TOKEN_SECRET_NAME)] }}
|
||||
with:
|
||||
args: >
|
||||
-Dsonar.organization=${{ env.REPO_OWNER }}
|
||||
-Dsonar.projectKey=${{ env.REPO_OWNER }}_${{ env.REPO_NAME }}
|
||||
-Dsonar.pullrequest.key=${{ env.PR_NUMBER }}
|
||||
-Dsonar.pullrequest.branch=${{ env.PR_HEAD }}
|
||||
-Dsonar.pullrequest.base=${{ env.PR_BASE }}
|
||||
-Dsonar.scm.revision=${{ github.event.workflow_run.head_sha }}
|
||||
13
.github/workflows/stage.yml
vendored
13
.github/workflows/stage.yml
vendored
@@ -64,14 +64,9 @@ jobs:
|
||||
repository: ansible/awx-logos
|
||||
path: awx-logos
|
||||
|
||||
- name: Get python version from Makefile
|
||||
working-directory: awx
|
||||
run: echo py_version=`make PYTHON_VERSION` >> $GITHUB_ENV
|
||||
|
||||
- name: Install python ${{ env.py_version }}
|
||||
uses: actions/setup-python@v4
|
||||
- uses: ./awx/.github/actions/setup-python
|
||||
with:
|
||||
python-version: ${{ env.py_version }}
|
||||
working-directory: awx
|
||||
|
||||
- name: Install playbook dependencies
|
||||
run: |
|
||||
@@ -90,9 +85,11 @@ jobs:
|
||||
cp ../awx-logos/awx/ui/client/assets/* awx/ui/public/static/media/
|
||||
|
||||
- name: Setup node and npm for new UI build
|
||||
uses: actions/setup-node@v2
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: '18'
|
||||
cache: 'npm'
|
||||
cache-dependency-path: awx/awx/ui/**/package-lock.json
|
||||
|
||||
- name: Prebuild new UI for awx image (to speed up build process)
|
||||
working-directory: awx
|
||||
|
||||
47
.github/workflows/upload_schema.yml
vendored
47
.github/workflows/upload_schema.yml
vendored
@@ -5,11 +5,13 @@ env:
|
||||
LC_ALL: "C.UTF-8" # prevent ERROR: Ansible could not initialize the preferred locale: unsupported locale setting
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
push:
|
||||
branches:
|
||||
- devel
|
||||
- release_**
|
||||
- feature_**
|
||||
- stable-**
|
||||
jobs:
|
||||
push:
|
||||
runs-on: ubuntu-latest
|
||||
@@ -22,39 +24,26 @@ jobs:
|
||||
with:
|
||||
show-progress: false
|
||||
|
||||
- name: Get python version from Makefile
|
||||
run: echo py_version=`make PYTHON_VERSION` >> $GITHUB_ENV
|
||||
|
||||
- name: Install python ${{ env.py_version }}
|
||||
uses: actions/setup-python@v4
|
||||
- name: Build awx_devel image to use for schema gen
|
||||
uses: ./.github/actions/awx_devel_image
|
||||
with:
|
||||
python-version: ${{ env.py_version }}
|
||||
|
||||
- name: Log in to registry
|
||||
run: |
|
||||
echo "${{ secrets.GITHUB_TOKEN }}" | docker login ghcr.io -u ${{ github.actor }} --password-stdin
|
||||
|
||||
- name: Pre-pull image to warm build cache
|
||||
run: |
|
||||
docker pull -q ghcr.io/${{ github.repository_owner }}/awx_devel:${GITHUB_REF##*/} || :
|
||||
|
||||
- name: Build image
|
||||
run: |
|
||||
DEV_DOCKER_TAG_BASE=ghcr.io/${{ github.repository_owner }} COMPOSE_TAG=${GITHUB_REF##*/} make docker-compose-build
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
private-github-key: ${{ secrets.PRIVATE_GITHUB_KEY }}
|
||||
|
||||
- name: Generate API Schema
|
||||
run: |
|
||||
DEV_DOCKER_TAG_BASE=ghcr.io/${OWNER_LC} \
|
||||
COMPOSE_TAG=${{ github.base_ref || github.ref_name }} \
|
||||
docker run -u $(id -u) --rm -v ${{ github.workspace }}:/awx_devel/:Z \
|
||||
--workdir=/awx_devel ghcr.io/${{ github.repository_owner }}/awx_devel:${GITHUB_REF##*/} /start_tests.sh genschema
|
||||
--workdir=/awx_devel `make print-DEVEL_IMAGE_NAME` /start_tests.sh genschema
|
||||
|
||||
- name: Upload API Schema
|
||||
env:
|
||||
AWS_ACCESS_KEY: ${{ secrets.AWS_ACCESS_KEY }}
|
||||
AWS_SECRET_KEY: ${{ secrets.AWS_SECRET_KEY }}
|
||||
AWS_REGION: 'us-east-1'
|
||||
run: |
|
||||
ansible localhost -c local, -m command -a "{{ ansible_python_interpreter + ' -m pip install boto3'}}"
|
||||
ansible localhost -c local -m aws_s3 \
|
||||
-a "src=${{ github.workspace }}/schema.json bucket=awx-public-ci-files object=${GITHUB_REF##*/}/schema.json mode=put permission=public-read"
|
||||
|
||||
|
||||
uses: keithweaver/aws-s3-github-action@4dd5a7b81d54abaa23bbac92b27e85d7f405ae53
|
||||
with:
|
||||
command: cp
|
||||
source: ${{ github.workspace }}/schema.json
|
||||
destination: s3://awx-public-ci-files/${{ github.ref_name }}/schema.json
|
||||
aws_access_key_id: ${{ secrets.AWS_ACCESS_KEY }}
|
||||
aws_secret_access_key: ${{ secrets.AWS_SECRET_KEY }}
|
||||
aws_region: us-east-1
|
||||
flags: --acl public-read --only-show-errors
|
||||
|
||||
3
.gitignore
vendored
3
.gitignore
vendored
@@ -31,7 +31,6 @@ tools/docker-compose/_build
|
||||
tools/docker-compose/_sources
|
||||
tools/docker-compose/overrides/
|
||||
tools/docker-compose-minikube/_sources
|
||||
tools/docker-compose/keycloak.awx.realm.json
|
||||
|
||||
!tools/docker-compose/editable_dependencies
|
||||
tools/docker-compose/editable_dependencies/*
|
||||
@@ -151,6 +150,8 @@ use_dev_supervisor.txt
|
||||
|
||||
awx/ui/src
|
||||
awx/ui/build
|
||||
awx/ui/.ui-built
|
||||
awx/ui_next
|
||||
|
||||
# Docs build stuff
|
||||
docs/docsite/build/
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
|
||||
Hi there! We're excited to have you as a contributor.
|
||||
|
||||
Have questions about this document or anything not covered here? Come chat with us at `#ansible-awx` on irc.libera.chat, or submit your question to the [mailing list](https://groups.google.com/forum/#!forum/awx-project).
|
||||
Have questions about this document or anything not covered here? Create a topic using the [AWX tag on the Ansible Forum](https://forum.ansible.com/tag/awx).
|
||||
|
||||
## Table of contents
|
||||
|
||||
@@ -30,7 +30,7 @@ Have questions about this document or anything not covered here? Come chat with
|
||||
- You must use `git commit --signoff` for any commit to be merged, and agree that usage of --signoff constitutes agreement with the terms of [DCO 1.1](./DCO_1_1.md).
|
||||
- Take care to make sure no merge commits are in the submission, and use `git rebase` vs `git merge` for this reason.
|
||||
- If collaborating with someone else on the same branch, consider using `--force-with-lease` instead of `--force`. This will prevent you from accidentally overwriting commits pushed by someone else. For more information, see [git push docs](https://git-scm.com/docs/git-push#git-push---force-with-leaseltrefnamegt).
|
||||
- If submitting a large code change, it's a good idea to join the `#ansible-awx` channel on irc.libera.chat, and talk about what you would like to do or add first. This not only helps everyone know what's going on, it also helps save time and effort, if the community decides some changes are needed.
|
||||
- If submitting a large code change, it's a good idea to create a [forum topic tagged with 'awx'](https://forum.ansible.com/tag/awx), and talk about what you would like to do or add first. This not only helps everyone know what's going on, it also helps save time and effort, if the community decides some changes are needed.
|
||||
- We ask all of our community members and contributors to adhere to the [Ansible code of conduct](http://docs.ansible.com/ansible/latest/community/code_of_conduct.html). If you have questions, or need assistance, please reach out to our community team at [codeofconduct@ansible.com](mailto:codeofconduct@ansible.com)
|
||||
|
||||
## Setting up your development environment
|
||||
@@ -124,7 +124,7 @@ If it has someone assigned to it then that person is the person responsible for
|
||||
> Issue assignment will only be done for maintainers of the project. If you decide to work on an issue, please feel free to add a comment in the issue to let others know that you are working on it; but know that we will accept the first pull request from whomever is able to fix an issue. Once your PR is accepted we can add you as an assignee to an issue upon request.
|
||||
|
||||
|
||||
> If you work in a part of the codebase that is going through active development, your changes may be rejected, or you may be asked to `rebase`. A good idea before starting work is to have a discussion with us in the `#ansible-awx` channel on irc.libera.chat, or on the [mailing list](https://groups.google.com/forum/#!forum/awx-project).
|
||||
> If you work in a part of the codebase that is going through active development, your changes may be rejected, or you may be asked to `rebase`. A good idea before starting work is to have a discussion with us in the [Ansible Forum](https://forum.ansible.com/tag/awx).
|
||||
|
||||
> If you're planning to develop features or fixes for the UI, please review the [UI Developer doc](https://github.com/ansible/ansible-ui/blob/main/CONTRIBUTING.md).
|
||||
|
||||
@@ -149,7 +149,7 @@ Here are a few things you can do to help the visibility of your change, and incr
|
||||
- Make the smallest change possible
|
||||
- Write good commit messages. See [How to write a Git commit message](https://chris.beams.io/posts/git-commit/).
|
||||
|
||||
It's generally a good idea to discuss features with us first by engaging us in the `#ansible-awx` channel on irc.libera.chat, or on the [mailing list](https://groups.google.com/forum/#!forum/awx-project).
|
||||
It's generally a good idea to discuss features with us first by engaging on the [Ansible Forum](https://forum.ansible.com/tag/awx).
|
||||
|
||||
We like to keep our commit history clean, and will require resubmission of pull requests that contain merge commits. Use `git pull --rebase`, rather than
|
||||
`git pull`, and `git rebase`, rather than `git merge`.
|
||||
@@ -164,6 +164,6 @@ We welcome your feedback, and encourage you to file an issue when you run into a
|
||||
|
||||
## Getting Help
|
||||
|
||||
If you require additional assistance, please reach out to us at `#ansible-awx` on irc.libera.chat, or submit your question to the [mailing list](https://groups.google.com/forum/#!forum/awx-project).
|
||||
If you require additional assistance, please submit your question to the [Ansible Forum](https://forum.ansible.com/tag/awx).
|
||||
|
||||
For extra information on debugging tools, see [Debugging](./docs/debugging/).
|
||||
|
||||
@@ -1,11 +1,11 @@
|
||||
# Issues
|
||||
|
||||
## Reporting
|
||||
## Reporting
|
||||
|
||||
Use the GitHub [issue tracker](https://github.com/ansible/awx/issues) for filing bugs. In order to save time, and help us respond to issues quickly, make sure to fill out as much of the issue template
|
||||
as possible. Version information, and an accurate reproducing scenario are critical to helping us identify the problem.
|
||||
|
||||
Please don't use the issue tracker as a way to ask how to do something. Instead, use the [mailing list](https://groups.google.com/forum/#!forum/awx-project) , and the `#ansible-awx` channel on irc.libera.chat to get help.
|
||||
Please don't use the issue tracker as a way to ask how to do something. Instead, use the [Ansible Forum](https://forum.ansible.com/tag/awx).
|
||||
|
||||
Before opening a new issue, please use the issue search feature to see if what you're experiencing has already been reported. If you have any extra detail to provide, please comment. Otherwise, rather than posting a "me too" comment, please consider giving it a ["thumbs up"](https://github.com/blog/2119-add-reactions-to-pull-requests-issues-and-comment) to give us an indication of the severity of the problem.
|
||||
|
||||
@@ -14,7 +14,7 @@ Before opening a new issue, please use the issue search feature to see if what y
|
||||
When reporting issues for the UI, we also appreciate having screen shots and any error messages from the web browser's console. It's not unusual for browser extensions
|
||||
and plugins to cause problems. Reporting those will also help speed up analyzing and resolving UI bugs.
|
||||
|
||||
### API and backend issues
|
||||
### API and backend issues
|
||||
|
||||
For the API and backend services, please capture all of the logs that you can from the time the problem occurred.
|
||||
|
||||
|
||||
130
Makefile
130
Makefile
@@ -8,6 +8,7 @@ NODE ?= node
|
||||
NPM_BIN ?= npm
|
||||
KIND_BIN ?= $(shell which kind)
|
||||
CHROMIUM_BIN=/tmp/chrome-linux/chrome
|
||||
GIT_REPO_NAME ?= $(shell basename `git rev-parse --show-toplevel`)
|
||||
GIT_BRANCH ?= $(shell git rev-parse --abbrev-ref HEAD)
|
||||
MANAGEMENT_COMMAND ?= awx-manage
|
||||
VERSION ?= $(shell $(PYTHON) tools/scripts/scm_version.py 2> /dev/null)
|
||||
@@ -18,12 +19,18 @@ COLLECTION_VERSION ?= $(shell $(PYTHON) tools/scripts/scm_version.py | cut -d .
|
||||
COLLECTION_SANITY_ARGS ?= --docker
|
||||
# collection unit testing directories
|
||||
COLLECTION_TEST_DIRS ?= awx_collection/test/awx
|
||||
# pytest added args to collect coverage
|
||||
COVERAGE_ARGS ?= --cov --cov-report=xml --junitxml=reports/junit.xml
|
||||
# pytest test directories
|
||||
TEST_DIRS ?= awx/main/tests/unit awx/main/tests/functional awx/conf/tests
|
||||
# pytest args to run tests in parallel
|
||||
PARALLEL_TESTS ?= -n auto
|
||||
# collection integration test directories (defaults to all)
|
||||
COLLECTION_TEST_TARGET ?=
|
||||
# args for collection install
|
||||
COLLECTION_PACKAGE ?= awx
|
||||
COLLECTION_NAMESPACE ?= awx
|
||||
COLLECTION_INSTALL = ~/.ansible/collections/ansible_collections/$(COLLECTION_NAMESPACE)/$(COLLECTION_PACKAGE)
|
||||
COLLECTION_INSTALL = $(HOME)/.ansible/collections/ansible_collections/$(COLLECTION_NAMESPACE)/$(COLLECTION_PACKAGE)
|
||||
COLLECTION_TEMPLATE_VERSION ?= false
|
||||
|
||||
# NOTE: This defaults the container image version to the branch that's active
|
||||
@@ -31,10 +38,6 @@ COMPOSE_TAG ?= $(GIT_BRANCH)
|
||||
MAIN_NODE_TYPE ?= hybrid
|
||||
# If set to true docker-compose will also start a pgbouncer instance and use it
|
||||
PGBOUNCER ?= false
|
||||
# If set to true docker-compose will also start a keycloak instance
|
||||
KEYCLOAK ?= false
|
||||
# If set to true docker-compose will also start an ldap instance
|
||||
LDAP ?= false
|
||||
# If set to true docker-compose will also start a splunk instance
|
||||
SPLUNK ?= false
|
||||
# If set to true docker-compose will also start a prometheus instance
|
||||
@@ -45,8 +48,6 @@ GRAFANA ?= false
|
||||
VAULT ?= false
|
||||
# If set to true docker-compose will also start a hashicorp vault instance with TLS enabled
|
||||
VAULT_TLS ?= false
|
||||
# If set to true docker-compose will also start a tacacs+ instance
|
||||
TACACS ?= false
|
||||
# If set to true docker-compose will also start an OpenTelemetry Collector instance
|
||||
OTEL ?= false
|
||||
# If set to true docker-compose will also start a Loki instance
|
||||
@@ -62,9 +63,9 @@ DEV_DOCKER_OWNER ?= ansible
|
||||
# Docker will only accept lowercase, so github names like Paul need to be paul
|
||||
DEV_DOCKER_OWNER_LOWER = $(shell echo $(DEV_DOCKER_OWNER) | tr A-Z a-z)
|
||||
DEV_DOCKER_TAG_BASE ?= ghcr.io/$(DEV_DOCKER_OWNER_LOWER)
|
||||
DEVEL_IMAGE_NAME ?= $(DEV_DOCKER_TAG_BASE)/awx_devel:$(COMPOSE_TAG)
|
||||
IMAGE_KUBE_DEV=$(DEV_DOCKER_TAG_BASE)/awx_kube_devel:$(COMPOSE_TAG)
|
||||
IMAGE_KUBE=$(DEV_DOCKER_TAG_BASE)/awx:$(COMPOSE_TAG)
|
||||
DEVEL_IMAGE_NAME ?= $(DEV_DOCKER_TAG_BASE)/$(GIT_REPO_NAME)_devel:$(COMPOSE_TAG)
|
||||
IMAGE_KUBE_DEV=$(DEV_DOCKER_TAG_BASE)/$(GIT_REPO_NAME)_kube_devel:$(COMPOSE_TAG)
|
||||
IMAGE_KUBE=$(DEV_DOCKER_TAG_BASE)/$(GIT_REPO_NAME):$(COMPOSE_TAG)
|
||||
|
||||
# Common command to use for running ansible-playbook
|
||||
ANSIBLE_PLAYBOOK ?= ansible-playbook -e ansible_python_interpreter=$(PYTHON)
|
||||
@@ -76,7 +77,7 @@ RECEPTOR_IMAGE ?= quay.io/ansible/receptor:devel
|
||||
SRC_ONLY_PKGS ?= cffi,pycparser,psycopg,twilio
|
||||
# These should be upgraded in the AWX and Ansible venv before attempting
|
||||
# to install the actual requirements
|
||||
VENV_BOOTSTRAP ?= pip==21.2.4 setuptools==69.0.2 setuptools_scm[toml]==8.0.4 wheel==0.42.0 cython==0.29.37
|
||||
VENV_BOOTSTRAP ?= pip==21.2.4 setuptools==80.9.0 setuptools_scm[toml]==8.0.4 wheel==0.42.0 cython==3.1.3
|
||||
|
||||
NAME ?= awx
|
||||
|
||||
@@ -228,12 +229,6 @@ migrate:
|
||||
dbchange:
|
||||
$(MANAGEMENT_COMMAND) makemigrations
|
||||
|
||||
supervisor:
|
||||
@if [ "$(VENV_BASE)" ]; then \
|
||||
. $(VENV_BASE)/awx/bin/activate; \
|
||||
fi; \
|
||||
supervisord --pidfile=/tmp/supervisor_pid -n
|
||||
|
||||
collectstatic:
|
||||
@if [ "$(VENV_BASE)" ]; then \
|
||||
. $(VENV_BASE)/awx/bin/activate; \
|
||||
@@ -320,14 +315,19 @@ black: reports
|
||||
@chmod +x .git/hooks/pre-commit
|
||||
|
||||
genschema: reports
|
||||
$(MAKE) swagger PYTEST_ARGS="--genschema --create-db "
|
||||
$(MAKE) swagger PYTEST_ADDOPTS="--genschema --create-db "
|
||||
mv swagger.json schema.json
|
||||
|
||||
swagger: reports
|
||||
@if [ "$(VENV_BASE)" ]; then \
|
||||
. $(VENV_BASE)/awx/bin/activate; \
|
||||
fi; \
|
||||
(set -o pipefail && py.test $(PYTEST_ARGS) awx/conf/tests/functional awx/main/tests/functional/api awx/main/tests/docs | tee reports/$@.report)
|
||||
(set -o pipefail && py.test $(COVERAGE_ARGS) $(PARALLEL_TESTS) awx/conf/tests/functional awx/main/tests/functional/api awx/main/tests/docs | tee reports/$@.report)
|
||||
@if [ "${GITHUB_ACTIONS}" = "true" ]; \
|
||||
then \
|
||||
echo 'cov-report-files=reports/coverage.xml' >> "${GITHUB_OUTPUT}"; \
|
||||
echo 'test-result-files=reports/junit.xml' >> "${GITHUB_OUTPUT}"; \
|
||||
fi
|
||||
|
||||
check: black
|
||||
|
||||
@@ -340,26 +340,38 @@ api-lint:
|
||||
awx-link:
|
||||
[ -d "/awx_devel/awx.egg-info" ] || $(PYTHON) /awx_devel/tools/scripts/egg_info_dev
|
||||
|
||||
TEST_DIRS ?= awx/main/tests/unit awx/main/tests/functional awx/conf/tests awx/sso/tests
|
||||
PYTEST_ARGS ?= -n auto
|
||||
## Run all API unit tests.
|
||||
test:
|
||||
if [ "$(VENV_BASE)" ]; then \
|
||||
. $(VENV_BASE)/awx/bin/activate; \
|
||||
fi; \
|
||||
PYTHONDONTWRITEBYTECODE=1 py.test -p no:cacheprovider $(PYTEST_ARGS) $(TEST_DIRS)
|
||||
PYTHONDONTWRITEBYTECODE=1 py.test -p no:cacheprovider $(PARALLEL_TESTS) $(TEST_DIRS)
|
||||
cd awxkit && $(VENV_BASE)/awx/bin/tox -re py3
|
||||
awx-manage check_migrations --dry-run --check -n 'missing_migration_file'
|
||||
|
||||
live_test:
|
||||
cd awx/main/tests/live && py.test tests/
|
||||
|
||||
## Run all API unit tests with coverage enabled.
|
||||
test_coverage:
|
||||
$(MAKE) test PYTEST_ADDOPTS="--create-db $(COVERAGE_ARGS)"
|
||||
@if [ "${GITHUB_ACTIONS}" = "true" ]; \
|
||||
then \
|
||||
echo 'cov-report-files=awxkit/coverage.xml,reports/coverage.xml' >> "${GITHUB_OUTPUT}"; \
|
||||
echo 'test-result-files=awxkit/report.xml,reports/junit.xml' >> "${GITHUB_OUTPUT}"; \
|
||||
fi
|
||||
|
||||
test_migrations:
|
||||
if [ "$(VENV_BASE)" ]; then \
|
||||
. $(VENV_BASE)/awx/bin/activate; \
|
||||
fi; \
|
||||
PYTHONDONTWRITEBYTECODE=1 py.test -p no:cacheprovider --migrations -m migration_test $(PYTEST_ARGS) $(TEST_DIRS)
|
||||
PYTHONDONTWRITEBYTECODE=1 py.test -p no:cacheprovider --migrations -m migration_test --create-db $(PARALLEL_TESTS) $(COVERAGE_ARGS) $(TEST_DIRS)
|
||||
@if [ "${GITHUB_ACTIONS}" = "true" ]; \
|
||||
then \
|
||||
echo 'cov-report-files=reports/coverage.xml' >> "${GITHUB_OUTPUT}"; \
|
||||
echo 'test-result-files=reports/junit.xml' >> "${GITHUB_OUTPUT}"; \
|
||||
fi
|
||||
|
||||
## Runs AWX_DOCKER_CMD inside a new docker container.
|
||||
docker-runner:
|
||||
docker run -u $(shell id -u) --rm -v $(shell pwd):/awx_devel/:Z --workdir=/awx_devel $(DEVEL_IMAGE_NAME) $(AWX_DOCKER_CMD)
|
||||
docker run -u $(shell id -u) --rm -v $(shell pwd):/awx_devel/:Z $(AWX_DOCKER_ARGS) --workdir=/awx_devel $(DEVEL_IMAGE_NAME) $(AWX_DOCKER_CMD)
|
||||
|
||||
test_collection:
|
||||
rm -f $(shell ls -d $(VENV_BASE)/awx/lib/python* | head -n 1)/no-global-site-packages.txt
|
||||
@@ -368,7 +380,12 @@ test_collection:
|
||||
fi && \
|
||||
if ! [ -x "$(shell command -v ansible-playbook)" ]; then pip install ansible-core; fi
|
||||
ansible --version
|
||||
py.test $(COLLECTION_TEST_DIRS) -v
|
||||
py.test $(COLLECTION_TEST_DIRS) $(COVERAGE_ARGS) -v
|
||||
@if [ "${GITHUB_ACTIONS}" = "true" ]; \
|
||||
then \
|
||||
echo 'cov-report-files=reports/coverage.xml' >> "${GITHUB_OUTPUT}"; \
|
||||
echo 'test-result-files=reports/junit.xml' >> "${GITHUB_OUTPUT}"; \
|
||||
fi
|
||||
# The python path needs to be modified so that the tests can find Ansible within the container
|
||||
# First we will use anything expility set as PYTHONPATH
|
||||
# Second we will load any libraries out of the virtualenv (if it's unspecified that should be ok because python should not load out of an empty directory)
|
||||
@@ -403,23 +420,29 @@ test_collection_sanity:
|
||||
if ! [ -x "$(shell command -v ansible-test)" ]; then pip install ansible-core; fi
|
||||
ansible --version
|
||||
COLLECTION_VERSION=1.0.0 $(MAKE) install_collection
|
||||
cd $(COLLECTION_INSTALL) && ansible-test sanity $(COLLECTION_SANITY_ARGS)
|
||||
cd $(COLLECTION_INSTALL) && \
|
||||
ansible-test sanity $(COLLECTION_SANITY_ARGS) --coverage --junit && \
|
||||
ansible-test coverage xml --requirements --group-by command --group-by version
|
||||
@if [ "${GITHUB_ACTIONS}" = "true" ]; \
|
||||
then \
|
||||
echo cov-report-files="$$(find "$(COLLECTION_INSTALL)/tests/output/reports/" -type f -name 'coverage=sanity*.xml' -print0 | tr '\0' ',' | sed 's#,$$##')" >> "${GITHUB_OUTPUT}"; \
|
||||
echo test-result-files="$$(find "$(COLLECTION_INSTALL)/tests/output/junit/" -type f -name '*.xml' -print0 | tr '\0' ',' | sed 's#,$$##')" >> "${GITHUB_OUTPUT}"; \
|
||||
fi
|
||||
|
||||
test_collection_integration: install_collection
|
||||
cd $(COLLECTION_INSTALL) && ansible-test integration -vvv $(COLLECTION_TEST_TARGET)
|
||||
cd $(COLLECTION_INSTALL) && \
|
||||
ansible-test integration --coverage -vvv $(COLLECTION_TEST_TARGET) && \
|
||||
ansible-test coverage xml --requirements --group-by command --group-by version
|
||||
@if [ "${GITHUB_ACTIONS}" = "true" ]; \
|
||||
then \
|
||||
echo cov-report-files="$$(find "$(COLLECTION_INSTALL)/tests/output/reports/" -type f -name 'coverage=integration*.xml' -print0 | tr '\0' ',' | sed 's#,$$##')" >> "${GITHUB_OUTPUT}"; \
|
||||
fi
|
||||
|
||||
test_unit:
|
||||
@if [ "$(VENV_BASE)" ]; then \
|
||||
. $(VENV_BASE)/awx/bin/activate; \
|
||||
fi; \
|
||||
py.test awx/main/tests/unit awx/conf/tests/unit awx/sso/tests/unit
|
||||
|
||||
## Run all API unit tests with coverage enabled.
|
||||
test_coverage:
|
||||
@if [ "$(VENV_BASE)" ]; then \
|
||||
. $(VENV_BASE)/awx/bin/activate; \
|
||||
fi; \
|
||||
py.test --create-db --cov=awx --cov-report=xml --junitxml=./reports/junit.xml $(TEST_DIRS)
|
||||
py.test awx/main/tests/unit awx/conf/tests/unit
|
||||
|
||||
## Output test coverage as HTML (into htmlcov directory).
|
||||
coverage_html:
|
||||
@@ -473,21 +496,18 @@ docker-compose-sources: .git/hooks/pre-commit
|
||||
fi;
|
||||
|
||||
$(ANSIBLE_PLAYBOOK) -i tools/docker-compose/inventory tools/docker-compose/ansible/sources.yml \
|
||||
-e awx_image=$(DEV_DOCKER_TAG_BASE)/awx_devel \
|
||||
-e awx_image=$(DEV_DOCKER_TAG_BASE)/$(GIT_REPO_NAME)_devel \
|
||||
-e awx_image_tag=$(COMPOSE_TAG) \
|
||||
-e receptor_image=$(RECEPTOR_IMAGE) \
|
||||
-e control_plane_node_count=$(CONTROL_PLANE_NODE_COUNT) \
|
||||
-e execution_node_count=$(EXECUTION_NODE_COUNT) \
|
||||
-e minikube_container_group=$(MINIKUBE_CONTAINER_GROUP) \
|
||||
-e enable_pgbouncer=$(PGBOUNCER) \
|
||||
-e enable_keycloak=$(KEYCLOAK) \
|
||||
-e enable_ldap=$(LDAP) \
|
||||
-e enable_splunk=$(SPLUNK) \
|
||||
-e enable_prometheus=$(PROMETHEUS) \
|
||||
-e enable_grafana=$(GRAFANA) \
|
||||
-e enable_vault=$(VAULT) \
|
||||
-e vault_tls=$(VAULT_TLS) \
|
||||
-e enable_tacacs=$(TACACS) \
|
||||
-e enable_otel=$(OTEL) \
|
||||
-e enable_loki=$(LOKI) \
|
||||
-e install_editable_dependencies=$(EDITABLE_DEPENDENCIES) \
|
||||
@@ -498,8 +518,7 @@ docker-compose: awx/projects docker-compose-sources
|
||||
ansible-galaxy install --ignore-certs -r tools/docker-compose/ansible/requirements.yml;
|
||||
$(ANSIBLE_PLAYBOOK) -i tools/docker-compose/inventory tools/docker-compose/ansible/initialize_containers.yml \
|
||||
-e enable_vault=$(VAULT) \
|
||||
-e vault_tls=$(VAULT_TLS) \
|
||||
-e enable_ldap=$(LDAP); \
|
||||
-e vault_tls=$(VAULT_TLS); \
|
||||
$(MAKE) docker-compose-up
|
||||
|
||||
docker-compose-up:
|
||||
@@ -547,6 +566,7 @@ Dockerfile.dev: tools/ansible/roles/dockerfile/templates/Dockerfile.j2
|
||||
## Build awx_devel image for docker compose development environment
|
||||
docker-compose-build: Dockerfile.dev
|
||||
DOCKER_BUILDKIT=1 docker build \
|
||||
--ssh default=$(SSH_AUTH_SOCK) \
|
||||
-f Dockerfile.dev \
|
||||
-t $(DEVEL_IMAGE_NAME) \
|
||||
--build-arg BUILDKIT_INLINE_CACHE=1 \
|
||||
@@ -558,6 +578,7 @@ docker-compose-buildx: Dockerfile.dev
|
||||
- docker buildx create --name docker-compose-buildx
|
||||
docker buildx use docker-compose-buildx
|
||||
- docker buildx build \
|
||||
--ssh default=$(SSH_AUTH_SOCK) \
|
||||
--push \
|
||||
--build-arg BUILDKIT_INLINE_CACHE=1 \
|
||||
$(DOCKER_DEVEL_CACHE_FLAG) \
|
||||
@@ -571,28 +592,13 @@ docker-clean:
|
||||
-$(foreach image_id,$(shell docker images --filter=reference='*/*/*awx_devel*' --filter=reference='*/*awx_devel*' --filter=reference='*awx_devel*' -aq),docker rmi --force $(image_id);)
|
||||
|
||||
docker-clean-volumes: docker-compose-clean docker-compose-container-group-clean
|
||||
docker volume rm -f tools_var_lib_awx tools_awx_db tools_awx_db_15 tools_vault_1 tools_ldap_1 tools_grafana_storage tools_prometheus_storage $(shell docker volume ls --filter name=tools_redis_socket_ -q)
|
||||
docker volume rm -f tools_var_lib_awx tools_awx_db tools_awx_db_15 tools_vault_1 tools_grafana_storage tools_prometheus_storage $(shell docker volume ls --filter name=tools_redis_socket_ -q)
|
||||
|
||||
docker-refresh: docker-clean docker-compose
|
||||
|
||||
## Docker Development Environment with Elastic Stack Connected
|
||||
docker-compose-elk: awx/projects docker-compose-sources
|
||||
$(DOCKER_COMPOSE) -f tools/docker-compose/_sources/docker-compose.yml -f tools/elastic/docker-compose.logstash-link.yml -f tools/elastic/docker-compose.elastic-override.yml up --no-recreate
|
||||
|
||||
docker-compose-cluster-elk: awx/projects docker-compose-sources
|
||||
$(DOCKER_COMPOSE) -f tools/docker-compose/_sources/docker-compose.yml -f tools/elastic/docker-compose.logstash-link-cluster.yml -f tools/elastic/docker-compose.elastic-override.yml up --no-recreate
|
||||
|
||||
docker-compose-container-group:
|
||||
MINIKUBE_CONTAINER_GROUP=true $(MAKE) docker-compose
|
||||
|
||||
clean-elk:
|
||||
docker stop tools_kibana_1
|
||||
docker stop tools_logstash_1
|
||||
docker stop tools_elasticsearch_1
|
||||
docker rm tools_logstash_1
|
||||
docker rm tools_elasticsearch_1
|
||||
docker rm tools_kibana_1
|
||||
|
||||
VERSION:
|
||||
@echo "awx: $(VERSION)"
|
||||
|
||||
@@ -620,6 +626,7 @@ Dockerfile: tools/ansible/roles/dockerfile/templates/Dockerfile.j2
|
||||
## Build awx image for deployment on Kubernetes environment.
|
||||
awx-kube-build: Dockerfile
|
||||
DOCKER_BUILDKIT=1 docker build -f Dockerfile \
|
||||
--ssh default=$(SSH_AUTH_SOCK) \
|
||||
--build-arg VERSION=$(VERSION) \
|
||||
--build-arg SETUPTOOLS_SCM_PRETEND_VERSION=$(VERSION) \
|
||||
--build-arg HEADLESS=$(HEADLESS) \
|
||||
@@ -631,6 +638,7 @@ awx-kube-buildx: Dockerfile
|
||||
- docker buildx create --name awx-kube-buildx
|
||||
docker buildx use awx-kube-buildx
|
||||
- docker buildx build \
|
||||
--ssh default=$(SSH_AUTH_SOCK) \
|
||||
--push \
|
||||
--build-arg VERSION=$(VERSION) \
|
||||
--build-arg SETUPTOOLS_SCM_PRETEND_VERSION=$(VERSION) \
|
||||
@@ -654,6 +662,7 @@ Dockerfile.kube-dev: tools/ansible/roles/dockerfile/templates/Dockerfile.j2
|
||||
## Build awx_kube_devel image for development on local Kubernetes environment.
|
||||
awx-kube-dev-build: Dockerfile.kube-dev
|
||||
DOCKER_BUILDKIT=1 docker build -f Dockerfile.kube-dev \
|
||||
--ssh default=$(SSH_AUTH_SOCK) \
|
||||
--build-arg BUILDKIT_INLINE_CACHE=1 \
|
||||
$(DOCKER_KUBE_DEV_CACHE_FLAG) \
|
||||
-t $(IMAGE_KUBE_DEV) .
|
||||
@@ -663,6 +672,7 @@ awx-kube-dev-buildx: Dockerfile.kube-dev
|
||||
- docker buildx create --name awx-kube-dev-buildx
|
||||
docker buildx use awx-kube-dev-buildx
|
||||
- docker buildx build \
|
||||
--ssh default=$(SSH_AUTH_SOCK) \
|
||||
--push \
|
||||
--build-arg BUILDKIT_INLINE_CACHE=1 \
|
||||
$(DOCKER_KUBE_DEV_CACHE_FLAG) \
|
||||
|
||||
24
README.md
24
README.md
@@ -1,8 +1,19 @@
|
||||
[](https://github.com/ansible/awx/actions/workflows/ci.yml) [](https://docs.ansible.com/ansible/latest/community/code_of_conduct.html) [](https://github.com/ansible/awx/blob/devel/LICENSE.md) [](https://groups.google.com/g/awx-project)
|
||||
[](https://github.com/ansible/awx/actions/workflows/ci.yml) [](https://codecov.io/github/ansible/awx) [](https://docs.ansible.com/ansible/latest/community/code_of_conduct.html) [](https://github.com/ansible/awx/blob/devel/LICENSE.md) [](https://forum.ansible.com/tag/awx)
|
||||
[](https://chat.ansible.im/#/welcome) [](https://forum.ansible.com)
|
||||
|
||||
<img src="https://raw.githubusercontent.com/ansible/awx-logos/master/awx/ui/client/assets/logo-login.svg?sanitize=true" width=200 alt="AWX" />
|
||||
|
||||
> [!CAUTION]
|
||||
> The last release of this repository was released on Jul 2, 2024.
|
||||
> **Releases of this project are now paused during a large scale refactoring.**
|
||||
> For more information, follow [the Forum](https://forum.ansible.com/) and - more specifically - see the various communications on the matter:
|
||||
>
|
||||
> * [Blog: Upcoming Changes to the AWX Project](https://www.ansible.com/blog/upcoming-changes-to-the-awx-project/)
|
||||
> * [Streamlining AWX Releases](https://forum.ansible.com/t/streamlining-awx-releases/6894) Primary update
|
||||
> * [Refactoring AWX into a Pluggable, Service-Oriented Architecture](https://forum.ansible.com/t/refactoring-awx-into-a-pluggable-service-oriented-architecture/7404)
|
||||
> * [Upcoming changes to AWX Operator installation methods](https://forum.ansible.com/t/upcoming-changes-to-awx-operator-installation-methods/7598)
|
||||
> * [AWX UI and credential types transitioning to the new pluggable architecture](https://forum.ansible.com/t/awx-ui-and-credential-types-transitioning-to-the-new-pluggable-architecture/8027)
|
||||
|
||||
AWX provides a web-based user interface, REST API, and task engine built on top of [Ansible](https://github.com/ansible/ansible). It is one of the upstream projects for [Red Hat Ansible Automation Platform](https://www.ansible.com/products/automation-platform).
|
||||
|
||||
To install AWX, please view the [Install guide](./INSTALL.md).
|
||||
@@ -18,9 +29,9 @@ Contributing
|
||||
|
||||
- Refer to the [Contributing guide](./CONTRIBUTING.md) to get started developing, testing, and building AWX.
|
||||
- All code submissions are made through pull requests against the `devel` branch.
|
||||
- All contributors must use git commit --signoff for any commit to be merged and agree that usage of --signoff constitutes agreement with the terms of [DCO 1.1](./DCO_1_1.md)
|
||||
- All contributors must use `git commit --signoff` for any commit to be merged and agree that usage of `--signoff` constitutes agreement with the terms of [DCO 1.1](./DCO_1_1.md)
|
||||
- Take care to make sure no merge commits are in the submission, and use `git rebase` vs. `git merge` for this reason.
|
||||
- If submitting a large code change, it's a good idea to join the `#ansible-awx` channel on web.libera.chat and talk about what you would like to do or add first. This not only helps everyone know what's going on, but it also helps save time and effort if the community decides some changes are needed.
|
||||
- If submitting a large code change, it's a good idea to join discuss via the [Ansible Forum](https://forum.ansible.com/tag/awx). This helps everyone know what's going on, and it also helps save time and effort if the community decides some changes are needed.
|
||||
|
||||
Reporting Issues
|
||||
----------------
|
||||
@@ -30,12 +41,11 @@ If you're experiencing a problem that you feel is a bug in AWX or have ideas for
|
||||
Code of Conduct
|
||||
---------------
|
||||
|
||||
We ask all of our community members and contributors to adhere to the [Ansible code of conduct](http://docs.ansible.com/ansible/latest/community/code_of_conduct.html). If you have questions or need assistance, please reach out to our community team at [codeofconduct@ansible.com](mailto:codeofconduct@ansible.com)
|
||||
We require all of our community members and contributors to adhere to the [Ansible code of conduct](http://docs.ansible.com/ansible/latest/community/code_of_conduct.html). If you have questions or need assistance, please reach out to our community team at [codeofconduct@ansible.com](mailto:codeofconduct@ansible.com)
|
||||
|
||||
Get Involved
|
||||
------------
|
||||
|
||||
We welcome your feedback and ideas. Here's how to reach us with feedback and questions:
|
||||
We welcome your feedback and ideas via the [Ansible Forum](https://forum.ansible.com/tag/awx).
|
||||
|
||||
- Join the [Ansible AWX channel on Matrix](https://matrix.to/#/#awx:ansible.com)
|
||||
- Join the [Ansible Community Forum](https://forum.ansible.com)
|
||||
For a full list of all the ways to talk with the Ansible Community, see the [AWX Communication guide](https://ansible.readthedocs.io/projects/awx/en/latest/contributor/communication.html).
|
||||
|
||||
@@ -5,6 +5,7 @@ from __future__ import absolute_import, unicode_literals
|
||||
import os
|
||||
import sys
|
||||
import warnings
|
||||
from importlib.metadata import PackageNotFoundError, version as _get_version
|
||||
|
||||
|
||||
def get_version():
|
||||
@@ -34,10 +35,8 @@ def version_file():
|
||||
|
||||
|
||||
try:
|
||||
import pkg_resources
|
||||
|
||||
__version__ = pkg_resources.get_distribution('awx').version
|
||||
except pkg_resources.DistributionNotFound:
|
||||
__version__ = _get_version('awx')
|
||||
except PackageNotFoundError:
|
||||
__version__ = get_version()
|
||||
|
||||
__all__ = ['__version__']
|
||||
@@ -61,90 +60,16 @@ else:
|
||||
from django.db import connection
|
||||
|
||||
|
||||
def find_commands(management_dir):
|
||||
# Modified version of function from django/core/management/__init__.py.
|
||||
command_dir = os.path.join(management_dir, 'commands')
|
||||
commands = []
|
||||
try:
|
||||
for f in os.listdir(command_dir):
|
||||
if f.startswith('_'):
|
||||
continue
|
||||
elif f.endswith('.py') and f[:-3] not in commands:
|
||||
commands.append(f[:-3])
|
||||
elif f.endswith('.pyc') and f[:-4] not in commands: # pragma: no cover
|
||||
commands.append(f[:-4])
|
||||
except OSError:
|
||||
pass
|
||||
return commands
|
||||
|
||||
|
||||
def oauth2_getattribute(self, attr):
|
||||
# Custom method to override
|
||||
# oauth2_provider.settings.OAuth2ProviderSettings.__getattribute__
|
||||
from django.conf import settings
|
||||
from oauth2_provider.settings import DEFAULTS
|
||||
|
||||
val = None
|
||||
if (isinstance(attr, str)) and (attr in DEFAULTS) and (not attr.startswith('_')):
|
||||
# certain Django OAuth Toolkit migrations actually reference
|
||||
# setting lookups for references to model classes (e.g.,
|
||||
# oauth2_settings.REFRESH_TOKEN_MODEL)
|
||||
# If we're doing an OAuth2 setting lookup *while running* a migration,
|
||||
# don't do our usual database settings lookup
|
||||
val = settings.OAUTH2_PROVIDER.get(attr)
|
||||
if val is None:
|
||||
val = object.__getattribute__(self, attr)
|
||||
return val
|
||||
|
||||
|
||||
def prepare_env():
|
||||
# Update the default settings environment variable based on current mode.
|
||||
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'awx.settings.%s' % MODE)
|
||||
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'awx.settings')
|
||||
os.environ.setdefault('AWX_MODE', MODE)
|
||||
# Hide DeprecationWarnings when running in production. Need to first load
|
||||
# settings to apply our filter after Django's own warnings filter.
|
||||
from django.conf import settings
|
||||
|
||||
if not settings.DEBUG: # pragma: no cover
|
||||
warnings.simplefilter('ignore', DeprecationWarning)
|
||||
# Monkeypatch Django find_commands to also work with .pyc files.
|
||||
import django.core.management
|
||||
|
||||
django.core.management.find_commands = find_commands
|
||||
|
||||
# Monkeypatch Oauth2 toolkit settings class to check for settings
|
||||
# in django.conf settings each time, not just once during import
|
||||
import oauth2_provider.settings
|
||||
|
||||
oauth2_provider.settings.OAuth2ProviderSettings.__getattribute__ = oauth2_getattribute
|
||||
|
||||
# Use the AWX_TEST_DATABASE_* environment variables to specify the test
|
||||
# database settings to use when management command is run as an external
|
||||
# program via unit tests.
|
||||
for opt in ('ENGINE', 'NAME', 'USER', 'PASSWORD', 'HOST', 'PORT'): # pragma: no cover
|
||||
if os.environ.get('AWX_TEST_DATABASE_%s' % opt, None):
|
||||
settings.DATABASES['default'][opt] = os.environ['AWX_TEST_DATABASE_%s' % opt]
|
||||
# Disable capturing all SQL queries in memory when in DEBUG mode.
|
||||
if settings.DEBUG and not getattr(settings, 'SQL_DEBUG', True):
|
||||
from django.db.backends.base.base import BaseDatabaseWrapper
|
||||
from django.db.backends.utils import CursorWrapper
|
||||
|
||||
BaseDatabaseWrapper.make_debug_cursor = lambda self, cursor: CursorWrapper(cursor, self)
|
||||
|
||||
# Use the default devserver addr/port defined in settings for runserver.
|
||||
default_addr = getattr(settings, 'DEVSERVER_DEFAULT_ADDR', '127.0.0.1')
|
||||
default_port = getattr(settings, 'DEVSERVER_DEFAULT_PORT', 8000)
|
||||
from django.core.management.commands import runserver as core_runserver
|
||||
|
||||
original_handle = core_runserver.Command.handle
|
||||
|
||||
def handle(self, *args, **options):
|
||||
if not options.get('addrport'):
|
||||
options['addrport'] = '%s:%d' % (default_addr, int(default_port))
|
||||
elif options.get('addrport').isdigit():
|
||||
options['addrport'] = '%s:%d' % (default_addr, int(options['addrport']))
|
||||
return original_handle(self, *args, **options)
|
||||
|
||||
core_runserver.Command.handle = handle
|
||||
|
||||
|
||||
def manage():
|
||||
|
||||
@@ -11,9 +11,6 @@ from django.utils.encoding import smart_str
|
||||
# Django REST Framework
|
||||
from rest_framework import authentication
|
||||
|
||||
# Django-OAuth-Toolkit
|
||||
from oauth2_provider.contrib.rest_framework import OAuth2Authentication
|
||||
|
||||
logger = logging.getLogger('awx.api.authentication')
|
||||
|
||||
|
||||
@@ -36,16 +33,3 @@ class LoggedBasicAuthentication(authentication.BasicAuthentication):
|
||||
class SessionAuthentication(authentication.SessionAuthentication):
|
||||
def authenticate_header(self, request):
|
||||
return 'Session'
|
||||
|
||||
|
||||
class LoggedOAuth2Authentication(OAuth2Authentication):
|
||||
def authenticate(self, request):
|
||||
ret = super(LoggedOAuth2Authentication, self).authenticate(request)
|
||||
if ret:
|
||||
user, token = ret
|
||||
username = user.username if user else '<none>'
|
||||
logger.info(
|
||||
smart_str(u"User {} performed a {} to {} through the API using OAuth 2 token {}.".format(username, request.method, request.path, token.pk))
|
||||
)
|
||||
setattr(user, 'oauth_scopes', [x for x in token.scope.split() if x])
|
||||
return ret
|
||||
|
||||
@@ -6,9 +6,6 @@ from rest_framework import serializers
|
||||
|
||||
# AWX
|
||||
from awx.conf import fields, register, register_validate
|
||||
from awx.api.fields import OAuth2ProviderField
|
||||
from oauth2_provider.settings import oauth2_settings
|
||||
from awx.sso.common import is_remote_auth_enabled
|
||||
|
||||
|
||||
register(
|
||||
@@ -35,10 +32,7 @@ register(
|
||||
'DISABLE_LOCAL_AUTH',
|
||||
field_class=fields.BooleanField,
|
||||
label=_('Disable the built-in authentication system'),
|
||||
help_text=_(
|
||||
"Controls whether users are prevented from using the built-in authentication system. "
|
||||
"You probably want to do this if you are using an LDAP or SAML integration."
|
||||
),
|
||||
help_text=_("Controls whether users are prevented from using the built-in authentication system. "),
|
||||
category=_('Authentication'),
|
||||
category_slug='authentication',
|
||||
)
|
||||
@@ -50,41 +44,6 @@ register(
|
||||
category=_('Authentication'),
|
||||
category_slug='authentication',
|
||||
)
|
||||
register(
|
||||
'OAUTH2_PROVIDER',
|
||||
field_class=OAuth2ProviderField,
|
||||
default={
|
||||
'ACCESS_TOKEN_EXPIRE_SECONDS': oauth2_settings.ACCESS_TOKEN_EXPIRE_SECONDS,
|
||||
'AUTHORIZATION_CODE_EXPIRE_SECONDS': oauth2_settings.AUTHORIZATION_CODE_EXPIRE_SECONDS,
|
||||
'REFRESH_TOKEN_EXPIRE_SECONDS': oauth2_settings.REFRESH_TOKEN_EXPIRE_SECONDS,
|
||||
},
|
||||
label=_('OAuth 2 Timeout Settings'),
|
||||
help_text=_(
|
||||
'Dictionary for customizing OAuth 2 timeouts, available items are '
|
||||
'`ACCESS_TOKEN_EXPIRE_SECONDS`, the duration of access tokens in the number '
|
||||
'of seconds, `AUTHORIZATION_CODE_EXPIRE_SECONDS`, the duration of '
|
||||
'authorization codes in the number of seconds, and `REFRESH_TOKEN_EXPIRE_SECONDS`, '
|
||||
'the duration of refresh tokens, after expired access tokens, '
|
||||
'in the number of seconds.'
|
||||
),
|
||||
category=_('Authentication'),
|
||||
category_slug='authentication',
|
||||
unit=_('seconds'),
|
||||
)
|
||||
register(
|
||||
'ALLOW_OAUTH2_FOR_EXTERNAL_USERS',
|
||||
field_class=fields.BooleanField,
|
||||
default=False,
|
||||
label=_('Allow External Users to Create OAuth2 Tokens'),
|
||||
help_text=_(
|
||||
'For security reasons, users from external auth providers (LDAP, SAML, '
|
||||
'SSO, Radius, and others) are not allowed to create OAuth2 tokens. '
|
||||
'To change this behavior, enable this setting. Existing tokens will '
|
||||
'not be deleted when this setting is toggled off.'
|
||||
),
|
||||
category=_('Authentication'),
|
||||
category_slug='authentication',
|
||||
)
|
||||
register(
|
||||
'LOGIN_REDIRECT_OVERRIDE',
|
||||
field_class=fields.CharField,
|
||||
@@ -109,7 +68,7 @@ register(
|
||||
|
||||
|
||||
def authentication_validate(serializer, attrs):
|
||||
if attrs.get('DISABLE_LOCAL_AUTH', False) and not is_remote_auth_enabled():
|
||||
if attrs.get('DISABLE_LOCAL_AUTH', False):
|
||||
raise serializers.ValidationError(_("There are no remote authentication systems configured."))
|
||||
return attrs
|
||||
|
||||
|
||||
@@ -9,7 +9,6 @@ from django.core.exceptions import ObjectDoesNotExist
|
||||
from rest_framework import serializers
|
||||
|
||||
# AWX
|
||||
from awx.conf import fields
|
||||
from awx.main.models import Credential
|
||||
|
||||
__all__ = ['BooleanNullField', 'CharNullField', 'ChoiceNullField', 'VerbatimField']
|
||||
@@ -79,19 +78,6 @@ class VerbatimField(serializers.Field):
|
||||
return value
|
||||
|
||||
|
||||
class OAuth2ProviderField(fields.DictField):
|
||||
default_error_messages = {'invalid_key_names': _('Invalid key names: {invalid_key_names}')}
|
||||
valid_key_names = {'ACCESS_TOKEN_EXPIRE_SECONDS', 'AUTHORIZATION_CODE_EXPIRE_SECONDS', 'REFRESH_TOKEN_EXPIRE_SECONDS'}
|
||||
child = fields.IntegerField(min_value=1)
|
||||
|
||||
def to_internal_value(self, data):
|
||||
data = super(OAuth2ProviderField, self).to_internal_value(data)
|
||||
invalid_flags = set(data.keys()) - self.valid_key_names
|
||||
if invalid_flags:
|
||||
self.fail('invalid_key_names', invalid_key_names=', '.join(list(invalid_flags)))
|
||||
return data
|
||||
|
||||
|
||||
class DeprecatedCredentialField(serializers.IntegerField):
|
||||
def __init__(self, **kwargs):
|
||||
kwargs['allow_null'] = True
|
||||
|
||||
@@ -13,8 +13,8 @@ from django.contrib.contenttypes.models import ContentType
|
||||
from django.core.exceptions import FieldDoesNotExist
|
||||
from django.db import connection, transaction
|
||||
from django.db.models.fields.related import OneToOneRel
|
||||
from django.http import QueryDict
|
||||
from django.shortcuts import get_object_or_404
|
||||
from django.http import QueryDict, JsonResponse
|
||||
from django.shortcuts import get_object_or_404, redirect
|
||||
from django.template.loader import render_to_string
|
||||
from django.utils.encoding import smart_str
|
||||
from django.utils.safestring import mark_safe
|
||||
@@ -30,10 +30,13 @@ from rest_framework.permissions import IsAuthenticated
|
||||
from rest_framework.renderers import StaticHTMLRenderer
|
||||
from rest_framework.negotiation import DefaultContentNegotiation
|
||||
|
||||
# Shared code for the AWX platform
|
||||
from awx_plugins.interfaces._temporary_private_licensing_api import detect_server_product_name
|
||||
|
||||
# django-ansible-base
|
||||
from ansible_base.rest_filters.rest_framework.field_lookup_backend import FieldLookupBackend
|
||||
from ansible_base.lib.utils.models import get_all_field_names
|
||||
from ansible_base.lib.utils.requests import get_remote_host
|
||||
from ansible_base.lib.utils.requests import get_remote_host, is_proxied_request
|
||||
from ansible_base.rbac.models import RoleEvaluation, RoleDefinition
|
||||
from ansible_base.rbac.permission_registry import permission_registry
|
||||
from ansible_base.jwt_consumer.common.util import validate_x_trusted_proxy_header
|
||||
@@ -43,7 +46,6 @@ from awx.main.models import UnifiedJob, UnifiedJobTemplate, User, Role, Credenti
|
||||
from awx.main.models.rbac import give_creator_permissions
|
||||
from awx.main.access import optimize_queryset
|
||||
from awx.main.utils import camelcase_to_underscore, get_search_fields, getattrd, get_object_or_400, decrypt_field, get_awx_version
|
||||
from awx.main.utils.licensing import server_product_name
|
||||
from awx.main.utils.proxy import is_proxy_in_headers, delete_headers_starting_with_http
|
||||
from awx.main.views import ApiErrorView
|
||||
from awx.api.serializers import ResourceAccessListElementSerializer, CopySerializer
|
||||
@@ -79,7 +81,14 @@ analytics_logger = logging.getLogger('awx.analytics.performance')
|
||||
|
||||
|
||||
class LoggedLoginView(auth_views.LoginView):
|
||||
|
||||
def get(self, request, *args, **kwargs):
|
||||
if is_proxied_request():
|
||||
next = request.GET.get('next', "")
|
||||
if next:
|
||||
next = f"?next={next}"
|
||||
return redirect(f"/{next}")
|
||||
|
||||
# The django.auth.contrib login form doesn't perform the content
|
||||
# negotiation we've come to expect from DRF; add in code to catch
|
||||
# situations where Accept != text/html (or */*) and reply with
|
||||
@@ -95,6 +104,15 @@ class LoggedLoginView(auth_views.LoginView):
|
||||
return super(LoggedLoginView, self).get(request, *args, **kwargs)
|
||||
|
||||
def post(self, request, *args, **kwargs):
|
||||
if is_proxied_request():
|
||||
# Give a message, saying to login via AAP
|
||||
return JsonResponse(
|
||||
{
|
||||
'detail': _('Please log in via Platform Authentication.'),
|
||||
},
|
||||
status=status.HTTP_401_UNAUTHORIZED,
|
||||
)
|
||||
|
||||
ret = super(LoggedLoginView, self).post(request, *args, **kwargs)
|
||||
ip = get_remote_host(request) # request.META.get('REMOTE_ADDR', None)
|
||||
if request.user.is_authenticated:
|
||||
@@ -113,10 +131,15 @@ class LoggedLoginView(auth_views.LoginView):
|
||||
|
||||
|
||||
class LoggedLogoutView(auth_views.LogoutView):
|
||||
|
||||
success_url_allowed_hosts = set(settings.LOGOUT_ALLOWED_HOSTS.split(",")) if settings.LOGOUT_ALLOWED_HOSTS else set()
|
||||
|
||||
def dispatch(self, request, *args, **kwargs):
|
||||
if is_proxied_request():
|
||||
# 1) We intentionally don't obey ?next= here, just always redirect to platform login
|
||||
# 2) Hack to prevent rewrites of Location header
|
||||
qs = "?__gateway_no_rewrite__=1&next=/"
|
||||
return redirect(f"/api/gateway/v1/logout/{qs}")
|
||||
|
||||
original_user = getattr(request, 'user', None)
|
||||
ret = super(LoggedLogoutView, self).dispatch(request, *args, **kwargs)
|
||||
current_user = getattr(request, 'user', None)
|
||||
@@ -138,10 +161,10 @@ def get_view_description(view, html=False):
|
||||
|
||||
|
||||
def get_default_schema():
|
||||
if settings.SETTINGS_MODULE == 'awx.settings.development':
|
||||
from awx.api.swagger import AutoSchema
|
||||
if settings.DYNACONF.is_development_mode:
|
||||
from awx.api.swagger import schema_view
|
||||
|
||||
return AutoSchema()
|
||||
return schema_view
|
||||
else:
|
||||
return views.APIView.schema
|
||||
|
||||
@@ -244,7 +267,8 @@ class APIView(views.APIView):
|
||||
if hasattr(self, '__init_request_error__'):
|
||||
response = self.handle_exception(self.__init_request_error__)
|
||||
if response.status_code == 401:
|
||||
response.data['detail'] += _(' To establish a login session, visit') + ' /api/login/.'
|
||||
if response.data and 'detail' in response.data:
|
||||
response.data['detail'] += _(' To establish a login session, visit') + ' /api/login/.'
|
||||
logger.info(status_msg)
|
||||
else:
|
||||
logger.warning(status_msg)
|
||||
@@ -253,7 +277,7 @@ class APIView(views.APIView):
|
||||
time_started = getattr(self, 'time_started', None)
|
||||
if request.user.is_authenticated:
|
||||
response['X-API-Product-Version'] = get_awx_version()
|
||||
response['X-API-Product-Name'] = server_product_name()
|
||||
response['X-API-Product-Name'] = detect_server_product_name()
|
||||
|
||||
response['X-API-Node'] = settings.CLUSTER_HOST_ID
|
||||
if time_started:
|
||||
@@ -350,12 +374,6 @@ class APIView(views.APIView):
|
||||
kwargs.pop('version')
|
||||
return super(APIView, self).dispatch(request, *args, **kwargs)
|
||||
|
||||
def check_permissions(self, request):
|
||||
if request.method not in ('GET', 'OPTIONS', 'HEAD'):
|
||||
if 'write' not in getattr(request.user, 'oauth_scopes', ['write']):
|
||||
raise PermissionDenied()
|
||||
return super(APIView, self).check_permissions(request)
|
||||
|
||||
|
||||
class GenericAPIView(generics.GenericAPIView, APIView):
|
||||
# Base class for all model-based views.
|
||||
@@ -826,7 +844,7 @@ class ResourceAccessList(ParentMixin, ListAPIView):
|
||||
if settings.ANSIBLE_BASE_ROLE_SYSTEM_ACTIVATED:
|
||||
ancestors = set(RoleEvaluation.objects.filter(content_type_id=content_type.id, object_id=obj.id).values_list('role_id', flat=True))
|
||||
qs = User.objects.filter(has_roles__in=ancestors) | User.objects.filter(is_superuser=True)
|
||||
auditor_role = RoleDefinition.objects.filter(name="System Auditor").first()
|
||||
auditor_role = RoleDefinition.objects.filter(name="Platform Auditor").first()
|
||||
if auditor_role:
|
||||
qs |= User.objects.filter(role_assignments__role_definition=auditor_role)
|
||||
return qs.distinct()
|
||||
|
||||
@@ -10,7 +10,7 @@ from rest_framework import permissions
|
||||
|
||||
# AWX
|
||||
from awx.main.access import check_user_access
|
||||
from awx.main.models import Inventory, UnifiedJob
|
||||
from awx.main.models import Inventory, UnifiedJob, Organization
|
||||
from awx.main.utils import get_object_or_400
|
||||
|
||||
logger = logging.getLogger('awx.api.permissions')
|
||||
@@ -228,12 +228,19 @@ class InventoryInventorySourcesUpdatePermission(ModelAccessPermission):
|
||||
class UserPermission(ModelAccessPermission):
|
||||
def check_post_permissions(self, request, view, obj=None):
|
||||
if not request.data:
|
||||
return request.user.admin_of_organizations.exists()
|
||||
return Organization.access_qs(request.user, 'change').exists()
|
||||
elif request.user.is_superuser:
|
||||
return True
|
||||
raise PermissionDenied()
|
||||
|
||||
|
||||
class IsSystemAdmin(permissions.BasePermission):
|
||||
def has_permission(self, request, view):
|
||||
if not (request.user and request.user.is_authenticated):
|
||||
return False
|
||||
return request.user.is_superuser
|
||||
|
||||
|
||||
class IsSystemAdminOrAuditor(permissions.BasePermission):
|
||||
"""
|
||||
Allows write access only to system admin users.
|
||||
|
||||
@@ -6,14 +6,12 @@ import copy
|
||||
import json
|
||||
import logging
|
||||
import re
|
||||
import yaml
|
||||
import urllib.parse
|
||||
from collections import Counter, OrderedDict
|
||||
from datetime import timedelta
|
||||
from uuid import uuid4
|
||||
|
||||
# OAuth2
|
||||
from oauthlib import oauth2
|
||||
from oauthlib.common import generate_token
|
||||
|
||||
# Jinja
|
||||
from jinja2 import sandbox, StrictUndefined
|
||||
from jinja2.exceptions import TemplateSyntaxError, UndefinedError, SecurityError
|
||||
@@ -50,7 +48,7 @@ from ansible_base.rbac import permission_registry
|
||||
|
||||
# AWX
|
||||
from awx.main.access import get_user_capabilities
|
||||
from awx.main.constants import ACTIVE_STATES, CENSOR_VALUE, org_role_to_permission
|
||||
from awx.main.constants import ACTIVE_STATES, org_role_to_permission
|
||||
from awx.main.models import (
|
||||
ActivityStream,
|
||||
AdHocCommand,
|
||||
@@ -79,14 +77,11 @@ from awx.main.models import (
|
||||
Label,
|
||||
Notification,
|
||||
NotificationTemplate,
|
||||
OAuth2AccessToken,
|
||||
OAuth2Application,
|
||||
Organization,
|
||||
Project,
|
||||
ProjectUpdate,
|
||||
ProjectUpdateEvent,
|
||||
ReceptorAddress,
|
||||
RefreshToken,
|
||||
Role,
|
||||
Schedule,
|
||||
SystemJob,
|
||||
@@ -102,7 +97,6 @@ from awx.main.models import (
|
||||
WorkflowJobTemplate,
|
||||
WorkflowJobTemplateNode,
|
||||
StdoutMaxBytesExceeded,
|
||||
CLOUD_INVENTORY_SOURCES,
|
||||
)
|
||||
from awx.main.models.base import VERBOSITY_CHOICES, NEW_JOB_TYPE_CHOICES
|
||||
from awx.main.models.rbac import role_summary_fields_generator, give_creator_permissions, get_role_codenames, to_permissions, get_role_from_object_role
|
||||
@@ -119,8 +113,11 @@ from awx.main.utils import (
|
||||
truncate_stdout,
|
||||
get_licenser,
|
||||
)
|
||||
|
||||
from awx.main.utils.filters import SmartFilter
|
||||
from awx.main.utils.plugins import load_combined_inventory_source_options
|
||||
from awx.main.utils.named_url_graph import reset_counters
|
||||
from awx.main.utils.inventory_vars import update_group_variables
|
||||
from awx.main.scheduler.task_manager_models import TaskManagerModels
|
||||
from awx.main.redact import UriCleaner, REPLACE_STR
|
||||
from awx.main.signals import update_inventory_computed_fields
|
||||
@@ -134,8 +131,6 @@ from awx.api.fields import BooleanNullField, CharNullField, ChoiceNullField, Ver
|
||||
# AWX Utils
|
||||
from awx.api.validators import HostnameRegexValidator
|
||||
|
||||
from awx.sso.common import get_external_account
|
||||
|
||||
logger = logging.getLogger('awx.api.serializers')
|
||||
|
||||
# Fields that should be summarized regardless of object type.
|
||||
@@ -634,15 +629,41 @@ class BaseSerializer(serializers.ModelSerializer, metaclass=BaseSerializerMetacl
|
||||
return exclusions
|
||||
|
||||
def validate(self, attrs):
|
||||
"""
|
||||
Apply serializer validation. Called by DRF.
|
||||
|
||||
Can be extended by subclasses. Or consider overwriting
|
||||
`validate_with_obj` in subclasses, which provides access to the model
|
||||
object and exception handling for field validation.
|
||||
|
||||
:param dict attrs: The names and values of the model form fields.
|
||||
:raise rest_framework.exceptions.ValidationError: If the validation
|
||||
fails.
|
||||
|
||||
The exception must contain a dict with the names of the form fields
|
||||
which failed validation as keys, and a list of error messages as
|
||||
values. This ensures that the error messages are rendered near the
|
||||
relevant fields.
|
||||
:return: The names and values from the model form fields, possibly
|
||||
modified by the validations.
|
||||
:rtype: dict
|
||||
"""
|
||||
attrs = super(BaseSerializer, self).validate(attrs)
|
||||
# Create/update a model instance and run its full_clean() method to
|
||||
# do any validation implemented on the model class.
|
||||
exclusions = self.get_validation_exclusions(self.instance)
|
||||
# Create a new model instance or take the existing one if it exists,
|
||||
# and update its attributes with the respective field values from
|
||||
# attrs.
|
||||
obj = self.instance or self.Meta.model()
|
||||
for k, v in attrs.items():
|
||||
if k not in exclusions and k != 'canonical_address_port':
|
||||
setattr(obj, k, v)
|
||||
try:
|
||||
# Create/update a model instance and run its full_clean() method to
|
||||
# do any validation implemented on the model class.
|
||||
exclusions = self.get_validation_exclusions(self.instance)
|
||||
obj = self.instance or self.Meta.model()
|
||||
for k, v in attrs.items():
|
||||
if k not in exclusions and k != 'canonical_address_port':
|
||||
setattr(obj, k, v)
|
||||
# Run serializer validators which need the model object for
|
||||
# validation.
|
||||
self.validate_with_obj(attrs, obj)
|
||||
# Apply any validations implemented on the model class.
|
||||
obj.full_clean(exclude=exclusions)
|
||||
# full_clean may modify values on the instance; copy those changes
|
||||
# back to attrs so they are saved.
|
||||
@@ -671,6 +692,32 @@ class BaseSerializer(serializers.ModelSerializer, metaclass=BaseSerializerMetacl
|
||||
raise ValidationError(d)
|
||||
return attrs
|
||||
|
||||
def validate_with_obj(self, attrs, obj):
|
||||
"""
|
||||
Overwrite this if you need the model instance for your validation.
|
||||
|
||||
:param dict attrs: The names and values of the model form fields.
|
||||
:param obj: An instance of the class's meta model.
|
||||
|
||||
If the serializer runs on a newly created object, obj contains only
|
||||
the attrs from its serializer. If the serializer runs because an
|
||||
object has been edited, obj is the existing model instance with all
|
||||
attributes and values available.
|
||||
:raise django.core.exceptionsValidationError: Raise this if your
|
||||
validation fails.
|
||||
|
||||
To make the error appear at the respective form field, instantiate
|
||||
the Exception with a dict containing the field name as key and the
|
||||
error message as value.
|
||||
|
||||
Example: ``ValidationError({"password": "Not good enough!"})``
|
||||
|
||||
If the exception contains just a string, the message cannot be
|
||||
related to a field and is rendered at the top of the model form.
|
||||
:return: None
|
||||
"""
|
||||
return
|
||||
|
||||
def reverse(self, *args, **kwargs):
|
||||
kwargs['request'] = self.context.get('request')
|
||||
return reverse(*args, **kwargs)
|
||||
@@ -687,7 +734,22 @@ class EmptySerializer(serializers.Serializer):
|
||||
pass
|
||||
|
||||
|
||||
class UnifiedJobTemplateSerializer(BaseSerializer):
|
||||
class OpaQueryPathMixin(serializers.Serializer):
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
|
||||
def validate_opa_query_path(self, value):
|
||||
# Decode the URL and re-encode it
|
||||
decoded_value = urllib.parse.unquote(value)
|
||||
re_encoded_value = urllib.parse.quote(decoded_value, safe='/')
|
||||
|
||||
if value != re_encoded_value:
|
||||
raise serializers.ValidationError(_("The URL must be properly encoded."))
|
||||
|
||||
return value
|
||||
|
||||
|
||||
class UnifiedJobTemplateSerializer(BaseSerializer, OpaQueryPathMixin):
|
||||
# As a base serializer, the capabilities prefetch is not used directly,
|
||||
# instead they are derived from the Workflow Job Template Serializer and the Job Template Serializer, respectively.
|
||||
capabilities_prefetch = []
|
||||
@@ -961,8 +1023,6 @@ class UnifiedJobStdoutSerializer(UnifiedJobSerializer):
|
||||
|
||||
class UserSerializer(BaseSerializer):
|
||||
password = serializers.CharField(required=False, default='', help_text=_('Field used to change the password.'))
|
||||
ldap_dn = serializers.CharField(source='profile.ldap_dn', read_only=True)
|
||||
external_account = serializers.SerializerMethodField(help_text=_('Set if the account is managed by an external service'))
|
||||
is_system_auditor = serializers.BooleanField(default=False)
|
||||
show_capabilities = ['edit', 'delete']
|
||||
|
||||
@@ -979,22 +1039,13 @@ class UserSerializer(BaseSerializer):
|
||||
'is_superuser',
|
||||
'is_system_auditor',
|
||||
'password',
|
||||
'ldap_dn',
|
||||
'last_login',
|
||||
'external_account',
|
||||
)
|
||||
extra_kwargs = {'last_login': {'read_only': True}}
|
||||
|
||||
def to_representation(self, obj):
|
||||
ret = super(UserSerializer, self).to_representation(obj)
|
||||
if self.get_external_account(obj):
|
||||
# If this is an external account it shouldn't have a password field
|
||||
ret.pop('password', None)
|
||||
else:
|
||||
# If its an internal account lets assume there is a password and return $encrypted$ to the user
|
||||
ret['password'] = '$encrypted$'
|
||||
if obj and type(self) is UserSerializer:
|
||||
ret['auth'] = obj.social_auth.values('provider', 'uid')
|
||||
ret['password'] = '$encrypted$'
|
||||
return ret
|
||||
|
||||
def get_validation_exclusions(self, obj=None):
|
||||
@@ -1003,7 +1054,6 @@ class UserSerializer(BaseSerializer):
|
||||
return ret
|
||||
|
||||
def validate_password(self, value):
|
||||
django_validate_password(value)
|
||||
if not self.instance and value in (None, ''):
|
||||
raise serializers.ValidationError(_('Password required for new User.'))
|
||||
|
||||
@@ -1026,11 +1076,52 @@ class UserSerializer(BaseSerializer):
|
||||
|
||||
return value
|
||||
|
||||
def validate_with_obj(self, attrs, obj):
|
||||
"""
|
||||
Validate the password with the Django password validators
|
||||
|
||||
To enable the Django password validators, configure
|
||||
`settings.AUTH_PASSWORD_VALIDATORS` as described in the [Django
|
||||
docs](https://docs.djangoproject.com/en/5.1/topics/auth/passwords/#enabling-password-validation)
|
||||
|
||||
:param dict attrs: The User form field names and their values as a dict.
|
||||
Example::
|
||||
|
||||
{
|
||||
'username': 'TestUsername', 'first_name': 'FirstName',
|
||||
'last_name': 'LastName', 'email': 'First.Last@my.org',
|
||||
'is_superuser': False, 'is_system_auditor': False,
|
||||
'password': 'secret123'
|
||||
}
|
||||
|
||||
:param obj: The User model instance.
|
||||
:raises django.core.exceptions.ValidationError: Raise this if at least
|
||||
one Django password validator fails.
|
||||
|
||||
The exception contains a dict ``{"password": <error-message>``}
|
||||
which indicates that the password field has failed validation, and
|
||||
the reason for failure.
|
||||
:return: None.
|
||||
"""
|
||||
# We must do this here instead of in `validate_password` bacause some
|
||||
# django password validators need access to other model instance fields,
|
||||
# e.g. ``username`` for the ``UserAttributeSimilarityValidator``.
|
||||
password = attrs.get("password")
|
||||
# Skip validation if no password has been entered. This may happen when
|
||||
# an existing User is edited.
|
||||
if password and password != '$encrypted$':
|
||||
# Apply validators from settings.AUTH_PASSWORD_VALIDATORS. This may
|
||||
# raise ValidationError.
|
||||
#
|
||||
# If the validation fails, re-raise the exception with adjusted
|
||||
# content to make the error appear near the password field.
|
||||
try:
|
||||
django_validate_password(password, user=obj)
|
||||
except DjangoValidationError as exc:
|
||||
raise DjangoValidationError({"password": exc.messages})
|
||||
|
||||
def _update_password(self, obj, new_password):
|
||||
# For now we're not raising an error, just not saving password for
|
||||
# users managed by LDAP who already have an unusable password set.
|
||||
# Get external password will return something like ldap or enterprise or None if the user isn't external. We only want to allow a password update for a None option
|
||||
if new_password and new_password != '$encrypted$' and not self.get_external_account(obj):
|
||||
if new_password and new_password != '$encrypted$':
|
||||
obj.set_password(new_password)
|
||||
obj.save(update_fields=['password'])
|
||||
|
||||
@@ -1045,9 +1136,6 @@ class UserSerializer(BaseSerializer):
|
||||
obj.set_unusable_password()
|
||||
obj.save(update_fields=['password'])
|
||||
|
||||
def get_external_account(self, obj):
|
||||
return get_external_account(obj)
|
||||
|
||||
def create(self, validated_data):
|
||||
new_password = validated_data.pop('password', None)
|
||||
is_system_auditor = validated_data.pop('is_system_auditor', None)
|
||||
@@ -1078,44 +1166,10 @@ class UserSerializer(BaseSerializer):
|
||||
roles=self.reverse('api:user_roles_list', kwargs={'pk': obj.pk}),
|
||||
activity_stream=self.reverse('api:user_activity_stream_list', kwargs={'pk': obj.pk}),
|
||||
access_list=self.reverse('api:user_access_list', kwargs={'pk': obj.pk}),
|
||||
tokens=self.reverse('api:o_auth2_token_list', kwargs={'pk': obj.pk}),
|
||||
authorized_tokens=self.reverse('api:user_authorized_token_list', kwargs={'pk': obj.pk}),
|
||||
personal_tokens=self.reverse('api:user_personal_token_list', kwargs={'pk': obj.pk}),
|
||||
)
|
||||
)
|
||||
return res
|
||||
|
||||
def _validate_ldap_managed_field(self, value, field_name):
|
||||
if not getattr(settings, 'AUTH_LDAP_SERVER_URI', None):
|
||||
return value
|
||||
try:
|
||||
is_ldap_user = bool(self.instance and self.instance.profile.ldap_dn)
|
||||
except AttributeError:
|
||||
is_ldap_user = False
|
||||
if is_ldap_user:
|
||||
ldap_managed_fields = ['username']
|
||||
ldap_managed_fields.extend(getattr(settings, 'AUTH_LDAP_USER_ATTR_MAP', {}).keys())
|
||||
ldap_managed_fields.extend(getattr(settings, 'AUTH_LDAP_USER_FLAGS_BY_GROUP', {}).keys())
|
||||
if field_name in ldap_managed_fields:
|
||||
if value != getattr(self.instance, field_name):
|
||||
raise serializers.ValidationError(_('Unable to change %s on user managed by LDAP.') % field_name)
|
||||
return value
|
||||
|
||||
def validate_username(self, value):
|
||||
return self._validate_ldap_managed_field(value, 'username')
|
||||
|
||||
def validate_first_name(self, value):
|
||||
return self._validate_ldap_managed_field(value, 'first_name')
|
||||
|
||||
def validate_last_name(self, value):
|
||||
return self._validate_ldap_managed_field(value, 'last_name')
|
||||
|
||||
def validate_email(self, value):
|
||||
return self._validate_ldap_managed_field(value, 'email')
|
||||
|
||||
def validate_is_superuser(self, value):
|
||||
return self._validate_ldap_managed_field(value, 'is_superuser')
|
||||
|
||||
|
||||
class UserActivityStreamSerializer(UserSerializer):
|
||||
"""Changes to system auditor status are shown as separate entries,
|
||||
@@ -1128,205 +1182,12 @@ class UserActivityStreamSerializer(UserSerializer):
|
||||
fields = ('*', '-is_system_auditor')
|
||||
|
||||
|
||||
class BaseOAuth2TokenSerializer(BaseSerializer):
|
||||
refresh_token = serializers.SerializerMethodField()
|
||||
token = serializers.SerializerMethodField()
|
||||
ALLOWED_SCOPES = ['read', 'write']
|
||||
|
||||
class Meta:
|
||||
model = OAuth2AccessToken
|
||||
fields = ('*', '-name', 'description', 'user', 'token', 'refresh_token', 'application', 'expires', 'scope')
|
||||
read_only_fields = ('user', 'token', 'expires', 'refresh_token')
|
||||
extra_kwargs = {'scope': {'allow_null': False, 'required': False}, 'user': {'allow_null': False, 'required': True}}
|
||||
|
||||
def get_token(self, obj):
|
||||
request = self.context.get('request', None)
|
||||
try:
|
||||
if request.method == 'POST':
|
||||
return obj.token
|
||||
else:
|
||||
return CENSOR_VALUE
|
||||
except ObjectDoesNotExist:
|
||||
return ''
|
||||
|
||||
def get_refresh_token(self, obj):
|
||||
request = self.context.get('request', None)
|
||||
try:
|
||||
if not obj.refresh_token:
|
||||
return None
|
||||
elif request.method == 'POST':
|
||||
return getattr(obj.refresh_token, 'token', '')
|
||||
else:
|
||||
return CENSOR_VALUE
|
||||
except ObjectDoesNotExist:
|
||||
return None
|
||||
|
||||
def get_related(self, obj):
|
||||
ret = super(BaseOAuth2TokenSerializer, self).get_related(obj)
|
||||
if obj.user:
|
||||
ret['user'] = self.reverse('api:user_detail', kwargs={'pk': obj.user.pk})
|
||||
if obj.application:
|
||||
ret['application'] = self.reverse('api:o_auth2_application_detail', kwargs={'pk': obj.application.pk})
|
||||
ret['activity_stream'] = self.reverse('api:o_auth2_token_activity_stream_list', kwargs={'pk': obj.pk})
|
||||
return ret
|
||||
|
||||
def _is_valid_scope(self, value):
|
||||
if not value or (not isinstance(value, str)):
|
||||
return False
|
||||
words = value.split()
|
||||
for word in words:
|
||||
if words.count(word) > 1:
|
||||
return False # do not allow duplicates
|
||||
if word not in self.ALLOWED_SCOPES:
|
||||
return False
|
||||
return True
|
||||
|
||||
def validate_scope(self, value):
|
||||
if not self._is_valid_scope(value):
|
||||
raise serializers.ValidationError(_('Must be a simple space-separated string with allowed scopes {}.').format(self.ALLOWED_SCOPES))
|
||||
return value
|
||||
|
||||
def create(self, validated_data):
|
||||
validated_data['user'] = self.context['request'].user
|
||||
try:
|
||||
return super(BaseOAuth2TokenSerializer, self).create(validated_data)
|
||||
except oauth2.AccessDeniedError as e:
|
||||
raise PermissionDenied(str(e))
|
||||
|
||||
|
||||
class UserAuthorizedTokenSerializer(BaseOAuth2TokenSerializer):
|
||||
class Meta:
|
||||
extra_kwargs = {
|
||||
'scope': {'allow_null': False, 'required': False},
|
||||
'user': {'allow_null': False, 'required': True},
|
||||
'application': {'allow_null': False, 'required': True},
|
||||
}
|
||||
|
||||
def create(self, validated_data):
|
||||
current_user = self.context['request'].user
|
||||
validated_data['token'] = generate_token()
|
||||
validated_data['expires'] = now() + timedelta(seconds=settings.OAUTH2_PROVIDER['ACCESS_TOKEN_EXPIRE_SECONDS'])
|
||||
obj = super(UserAuthorizedTokenSerializer, self).create(validated_data)
|
||||
obj.save()
|
||||
if obj.application:
|
||||
RefreshToken.objects.create(user=current_user, token=generate_token(), application=obj.application, access_token=obj)
|
||||
return obj
|
||||
|
||||
|
||||
class OAuth2TokenSerializer(BaseOAuth2TokenSerializer):
|
||||
def create(self, validated_data):
|
||||
current_user = self.context['request'].user
|
||||
validated_data['token'] = generate_token()
|
||||
validated_data['expires'] = now() + timedelta(seconds=settings.OAUTH2_PROVIDER['ACCESS_TOKEN_EXPIRE_SECONDS'])
|
||||
obj = super(OAuth2TokenSerializer, self).create(validated_data)
|
||||
if obj.application and obj.application.user:
|
||||
obj.user = obj.application.user
|
||||
obj.save()
|
||||
if obj.application:
|
||||
RefreshToken.objects.create(user=current_user, token=generate_token(), application=obj.application, access_token=obj)
|
||||
return obj
|
||||
|
||||
|
||||
class OAuth2TokenDetailSerializer(OAuth2TokenSerializer):
|
||||
class Meta:
|
||||
read_only_fields = ('*', 'user', 'application')
|
||||
|
||||
|
||||
class UserPersonalTokenSerializer(BaseOAuth2TokenSerializer):
|
||||
class Meta:
|
||||
read_only_fields = ('user', 'token', 'expires', 'application')
|
||||
|
||||
def create(self, validated_data):
|
||||
validated_data['token'] = generate_token()
|
||||
validated_data['expires'] = now() + timedelta(seconds=settings.OAUTH2_PROVIDER['ACCESS_TOKEN_EXPIRE_SECONDS'])
|
||||
validated_data['application'] = None
|
||||
obj = super(UserPersonalTokenSerializer, self).create(validated_data)
|
||||
obj.save()
|
||||
return obj
|
||||
|
||||
|
||||
class OAuth2ApplicationSerializer(BaseSerializer):
|
||||
show_capabilities = ['edit', 'delete']
|
||||
|
||||
class Meta:
|
||||
model = OAuth2Application
|
||||
fields = (
|
||||
'*',
|
||||
'description',
|
||||
'-user',
|
||||
'client_id',
|
||||
'client_secret',
|
||||
'client_type',
|
||||
'redirect_uris',
|
||||
'authorization_grant_type',
|
||||
'skip_authorization',
|
||||
'organization',
|
||||
)
|
||||
read_only_fields = ('client_id', 'client_secret')
|
||||
read_only_on_update_fields = ('user', 'authorization_grant_type')
|
||||
extra_kwargs = {
|
||||
'user': {'allow_null': True, 'required': False},
|
||||
'organization': {'allow_null': False},
|
||||
'authorization_grant_type': {'allow_null': False, 'label': _('Authorization Grant Type')},
|
||||
'client_secret': {'label': _('Client Secret')},
|
||||
'client_type': {'label': _('Client Type')},
|
||||
'redirect_uris': {'label': _('Redirect URIs')},
|
||||
'skip_authorization': {'label': _('Skip Authorization')},
|
||||
}
|
||||
|
||||
def to_representation(self, obj):
|
||||
ret = super(OAuth2ApplicationSerializer, self).to_representation(obj)
|
||||
request = self.context.get('request', None)
|
||||
if request.method != 'POST' and obj.client_type == 'confidential':
|
||||
ret['client_secret'] = CENSOR_VALUE
|
||||
if obj.client_type == 'public':
|
||||
ret.pop('client_secret', None)
|
||||
return ret
|
||||
|
||||
def get_related(self, obj):
|
||||
res = super(OAuth2ApplicationSerializer, self).get_related(obj)
|
||||
res.update(
|
||||
dict(
|
||||
tokens=self.reverse('api:o_auth2_application_token_list', kwargs={'pk': obj.pk}),
|
||||
activity_stream=self.reverse('api:o_auth2_application_activity_stream_list', kwargs={'pk': obj.pk}),
|
||||
)
|
||||
)
|
||||
if obj.organization_id:
|
||||
res.update(
|
||||
dict(
|
||||
organization=self.reverse('api:organization_detail', kwargs={'pk': obj.organization_id}),
|
||||
)
|
||||
)
|
||||
return res
|
||||
|
||||
def get_modified(self, obj):
|
||||
if obj is None:
|
||||
return None
|
||||
return obj.updated
|
||||
|
||||
def _summary_field_tokens(self, obj):
|
||||
token_list = [{'id': x.pk, 'token': CENSOR_VALUE, 'scope': x.scope} for x in obj.oauth2accesstoken_set.all()[:10]]
|
||||
if has_model_field_prefetched(obj, 'oauth2accesstoken_set'):
|
||||
token_count = len(obj.oauth2accesstoken_set.all())
|
||||
else:
|
||||
if len(token_list) < 10:
|
||||
token_count = len(token_list)
|
||||
else:
|
||||
token_count = obj.oauth2accesstoken_set.count()
|
||||
return {'count': token_count, 'results': token_list}
|
||||
|
||||
def get_summary_fields(self, obj):
|
||||
ret = super(OAuth2ApplicationSerializer, self).get_summary_fields(obj)
|
||||
ret['tokens'] = self._summary_field_tokens(obj)
|
||||
return ret
|
||||
|
||||
|
||||
class OrganizationSerializer(BaseSerializer):
|
||||
class OrganizationSerializer(BaseSerializer, OpaQueryPathMixin):
|
||||
show_capabilities = ['edit', 'delete']
|
||||
|
||||
class Meta:
|
||||
model = Organization
|
||||
fields = ('*', 'max_hosts', 'custom_virtualenv', 'default_environment')
|
||||
fields = ('*', 'max_hosts', 'custom_virtualenv', 'default_environment', 'opa_query_path')
|
||||
read_only_fields = ('*', 'custom_virtualenv')
|
||||
|
||||
def get_related(self, obj):
|
||||
@@ -1341,7 +1202,6 @@ class OrganizationSerializer(BaseSerializer):
|
||||
admins=self.reverse('api:organization_admins_list', kwargs={'pk': obj.pk}),
|
||||
teams=self.reverse('api:organization_teams_list', kwargs={'pk': obj.pk}),
|
||||
credentials=self.reverse('api:organization_credential_list', kwargs={'pk': obj.pk}),
|
||||
applications=self.reverse('api:organization_applications_list', kwargs={'pk': obj.pk}),
|
||||
activity_stream=self.reverse('api:organization_activity_stream_list', kwargs={'pk': obj.pk}),
|
||||
notification_templates=self.reverse('api:organization_notification_templates_list', kwargs={'pk': obj.pk}),
|
||||
notification_templates_started=self.reverse('api:organization_notification_templates_started_list', kwargs={'pk': obj.pk}),
|
||||
@@ -1681,7 +1541,7 @@ class LabelsListMixin(object):
|
||||
return res
|
||||
|
||||
|
||||
class InventorySerializer(LabelsListMixin, BaseSerializerWithVariables):
|
||||
class InventorySerializer(LabelsListMixin, BaseSerializerWithVariables, OpaQueryPathMixin):
|
||||
show_capabilities = ['edit', 'delete', 'adhoc', 'copy']
|
||||
capabilities_prefetch = ['admin', 'adhoc', {'copy': 'organization.inventory_admin'}]
|
||||
|
||||
@@ -1702,6 +1562,7 @@ class InventorySerializer(LabelsListMixin, BaseSerializerWithVariables):
|
||||
'inventory_sources_with_failures',
|
||||
'pending_deletion',
|
||||
'prevent_instance_group_fallback',
|
||||
'opa_query_path',
|
||||
)
|
||||
|
||||
def get_related(self, obj):
|
||||
@@ -1771,8 +1632,68 @@ class InventorySerializer(LabelsListMixin, BaseSerializerWithVariables):
|
||||
|
||||
if kind == 'smart' and not host_filter:
|
||||
raise serializers.ValidationError({'host_filter': _('Smart inventories must specify host_filter')})
|
||||
|
||||
return super(InventorySerializer, self).validate(attrs)
|
||||
|
||||
@staticmethod
|
||||
def _update_variables(variables, inventory_id):
|
||||
"""
|
||||
Update the inventory variables of the 'all'-group.
|
||||
|
||||
The variables field contains vars from the inventory dialog, hence
|
||||
representing the "all"-group variables.
|
||||
|
||||
Since this is not an update from an inventory source, we update the
|
||||
variables when the inventory details form is saved.
|
||||
|
||||
A user edit on the inventory variables is considered a reset of the
|
||||
variables update history. Particularly if the user removes a variable by
|
||||
editing the inventory variables field, the variable is not supposed to
|
||||
reappear with a value from a previous inventory source update.
|
||||
|
||||
We achieve this by forcing `reset=True` on such an update.
|
||||
|
||||
As a side-effect, variables which have been set by source updates and
|
||||
have survived a user-edit (i.e. they have not been deleted from the
|
||||
variables field) will be assumed to originate from the user edit and are
|
||||
thus no longer deleted from the inventory when they are removed from
|
||||
their original source!
|
||||
|
||||
Note that we use the inventory source id -1 for user-edit updates
|
||||
because a regular inventory source cannot have an id of -1 since
|
||||
PostgreSQL assigns pk's starting from 1 (if this assumption doesn't hold
|
||||
true, we have to assign another special value for invsrc_id).
|
||||
|
||||
:param str variables: The variables as plain text in yaml or json
|
||||
format.
|
||||
:param int inventory_id: The primary key of the related inventory
|
||||
object.
|
||||
"""
|
||||
variables_dict = parse_yaml_or_json(variables, silent_failure=False)
|
||||
logger.debug(f"InventorySerializer._update_variables: {inventory_id=} {variables_dict=}, {variables=}")
|
||||
update_group_variables(
|
||||
group_id=None, # `None` denotes the 'all' group (which doesn't have a pk).
|
||||
newvars=variables_dict,
|
||||
dbvars=None,
|
||||
invsrc_id=-1,
|
||||
inventory_id=inventory_id,
|
||||
reset=True,
|
||||
)
|
||||
|
||||
def create(self, validated_data):
|
||||
"""Called when a new inventory has to be created."""
|
||||
logger.debug(f"InventorySerializer.create({validated_data=}) >>>>")
|
||||
obj = super().create(validated_data)
|
||||
self._update_variables(validated_data.get("variables") or "", obj.id)
|
||||
return obj
|
||||
|
||||
def update(self, obj, validated_data):
|
||||
"""Called when an existing inventory is updated."""
|
||||
logger.debug(f"InventorySerializer.update({validated_data=}) >>>>")
|
||||
obj = super().update(obj, validated_data)
|
||||
self._update_variables(validated_data.get("variables") or "", obj.id)
|
||||
return obj
|
||||
|
||||
|
||||
class ConstructedFieldMixin(serializers.Field):
|
||||
def get_attribute(self, instance):
|
||||
@@ -1814,7 +1735,7 @@ class ConstructedInventorySerializer(InventorySerializer):
|
||||
required=False,
|
||||
allow_null=True,
|
||||
min_value=0,
|
||||
max_value=2,
|
||||
max_value=5,
|
||||
default=None,
|
||||
help_text=_('The verbosity level for the related auto-created inventory source, special to constructed inventory'),
|
||||
)
|
||||
@@ -2062,10 +1983,12 @@ class GroupSerializer(BaseSerializerWithVariables):
|
||||
return res
|
||||
|
||||
def validate(self, attrs):
|
||||
# Do not allow the group name to conflict with an existing host name.
|
||||
name = force_str(attrs.get('name', self.instance and self.instance.name or ''))
|
||||
inventory = attrs.get('inventory', self.instance and self.instance.inventory or '')
|
||||
if Host.objects.filter(name=name, inventory=inventory).exists():
|
||||
raise serializers.ValidationError(_('A Host with that name already exists.'))
|
||||
#
|
||||
return super(GroupSerializer, self).validate(attrs)
|
||||
|
||||
def validate_name(self, value):
|
||||
@@ -2350,6 +2273,7 @@ class GroupVariableDataSerializer(BaseVariableDataSerializer):
|
||||
|
||||
class InventorySourceOptionsSerializer(BaseSerializer):
|
||||
credential = DeprecatedCredentialField(help_text=_('Cloud credential to use for inventory updates.'))
|
||||
source = serializers.ChoiceField(choices=[])
|
||||
|
||||
class Meta:
|
||||
fields = (
|
||||
@@ -2371,6 +2295,14 @@ class InventorySourceOptionsSerializer(BaseSerializer):
|
||||
)
|
||||
read_only_fields = ('*', 'custom_virtualenv')
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
|
||||
if 'source' in self.fields:
|
||||
source_options = load_combined_inventory_source_options()
|
||||
|
||||
self.fields['source'].choices = [(plugin, description) for plugin, description in source_options.items()]
|
||||
|
||||
def get_related(self, obj):
|
||||
res = super(InventorySourceOptionsSerializer, self).get_related(obj)
|
||||
if obj.credential: # TODO: remove when 'credential' field is removed
|
||||
@@ -2907,7 +2839,7 @@ class ResourceAccessListElementSerializer(UserSerializer):
|
||||
{
|
||||
"role": {
|
||||
"id": None,
|
||||
"name": _("System Auditor"),
|
||||
"name": _("Platform Auditor"),
|
||||
"description": _("Can view all aspects of the system"),
|
||||
"user_capabilities": {"unattach": False},
|
||||
},
|
||||
@@ -3095,11 +3027,6 @@ class CredentialSerializer(BaseSerializer):
|
||||
ret.remove(field)
|
||||
return ret
|
||||
|
||||
def validate_organization(self, org):
|
||||
if self.instance and (not self.instance.managed) and self.instance.credential_type.kind == 'galaxy' and org is None:
|
||||
raise serializers.ValidationError(_("Galaxy credentials must be owned by an Organization."))
|
||||
return org
|
||||
|
||||
def validate_credential_type(self, credential_type):
|
||||
if self.instance and credential_type.pk != self.instance.credential_type.pk:
|
||||
for related_objects in (
|
||||
@@ -3175,9 +3102,6 @@ class CredentialSerializerCreate(CredentialSerializer):
|
||||
if attrs.get('team'):
|
||||
attrs['organization'] = attrs['team'].organization
|
||||
|
||||
if 'credential_type' in attrs and attrs['credential_type'].kind == 'galaxy' and list(owner_fields) != ['organization']:
|
||||
raise serializers.ValidationError({"organization": _("Galaxy credentials must be owned by an Organization.")})
|
||||
|
||||
return super(CredentialSerializerCreate, self).validate(attrs)
|
||||
|
||||
def create(self, validated_data):
|
||||
@@ -3395,6 +3319,7 @@ class JobTemplateSerializer(JobTemplateMixin, UnifiedJobTemplateSerializer, JobO
|
||||
'webhook_service',
|
||||
'webhook_credential',
|
||||
'prevent_instance_group_fallback',
|
||||
'opa_query_path',
|
||||
)
|
||||
read_only_fields = ('*', 'custom_virtualenv')
|
||||
|
||||
@@ -3596,11 +3521,17 @@ class JobRelaunchSerializer(BaseSerializer):
|
||||
choices=[('all', _('No change to job limit')), ('failed', _('All failed and unreachable hosts'))],
|
||||
write_only=True,
|
||||
)
|
||||
job_type = serializers.ChoiceField(
|
||||
required=False,
|
||||
allow_null=True,
|
||||
choices=NEW_JOB_TYPE_CHOICES,
|
||||
write_only=True,
|
||||
)
|
||||
credential_passwords = VerbatimField(required=True, write_only=True)
|
||||
|
||||
class Meta:
|
||||
model = Job
|
||||
fields = ('passwords_needed_to_start', 'retry_counts', 'hosts', 'credential_passwords')
|
||||
fields = ('passwords_needed_to_start', 'retry_counts', 'hosts', 'job_type', 'credential_passwords')
|
||||
|
||||
def validate_credential_passwords(self, value):
|
||||
pnts = self.instance.passwords_needed_to_start
|
||||
@@ -5550,7 +5481,7 @@ class ScheduleSerializer(LaunchConfigurationBaseSerializer, SchedulePreviewSeria
|
||||
return summary_fields
|
||||
|
||||
def validate_unified_job_template(self, value):
|
||||
if type(value) == InventorySource and value.source not in CLOUD_INVENTORY_SOURCES:
|
||||
if type(value) == InventorySource and value.source not in load_combined_inventory_source_options():
|
||||
raise serializers.ValidationError(_('Inventory Source must be a cloud resource.'))
|
||||
elif type(value) == Project and value.scm_type == '':
|
||||
raise serializers.ValidationError(_('Manual Project cannot have a schedule set.'))
|
||||
@@ -6059,6 +5990,34 @@ class InstanceGroupSerializer(BaseSerializer):
|
||||
raise serializers.ValidationError(_('Only Kubernetes credentials can be associated with an Instance Group'))
|
||||
return value
|
||||
|
||||
def validate_pod_spec_override(self, value):
|
||||
if not value:
|
||||
return value
|
||||
|
||||
# value should be empty for non-container groups
|
||||
if self.instance and not self.instance.is_container_group:
|
||||
raise serializers.ValidationError(_('pod_spec_override is only valid for container groups'))
|
||||
|
||||
pod_spec_override_json = {}
|
||||
# defect if the value is yaml or json if yaml convert to json
|
||||
try:
|
||||
# convert yaml to json
|
||||
pod_spec_override_json = yaml.safe_load(value)
|
||||
except yaml.YAMLError:
|
||||
try:
|
||||
pod_spec_override_json = json.loads(value)
|
||||
except json.JSONDecodeError:
|
||||
raise serializers.ValidationError(_('pod_spec_override must be valid yaml or json'))
|
||||
|
||||
# validate the
|
||||
spec = pod_spec_override_json.get('spec', {})
|
||||
automount_service_account_token = spec.get('automountServiceAccountToken', False)
|
||||
|
||||
if automount_service_account_token:
|
||||
raise serializers.ValidationError(_('automountServiceAccountToken is not allowed for security reasons'))
|
||||
|
||||
return value
|
||||
|
||||
def validate(self, attrs):
|
||||
attrs = super(InstanceGroupSerializer, self).validate(attrs)
|
||||
|
||||
@@ -6124,8 +6083,6 @@ class ActivityStreamSerializer(BaseSerializer):
|
||||
('workflow_job_template_node', ('id', 'unified_job_template_id')),
|
||||
('label', ('id', 'name', 'organization_id')),
|
||||
('notification', ('id', 'status', 'notification_type', 'notification_template_id')),
|
||||
('o_auth2_access_token', ('id', 'user_id', 'description', 'application_id', 'scope')),
|
||||
('o_auth2_application', ('id', 'name', 'description')),
|
||||
('credential_type', ('id', 'name', 'description', 'kind', 'managed')),
|
||||
('ad_hoc_command', ('id', 'name', 'status', 'limit')),
|
||||
('workflow_approval', ('id', 'name', 'unified_job_id')),
|
||||
|
||||
@@ -1,62 +1,54 @@
|
||||
import warnings
|
||||
|
||||
from rest_framework.permissions import AllowAny
|
||||
from rest_framework.schemas import SchemaGenerator, AutoSchema as DRFAuthSchema
|
||||
|
||||
from drf_yasg.views import get_schema_view
|
||||
from drf_yasg import openapi
|
||||
from drf_yasg.inspectors import SwaggerAutoSchema
|
||||
from drf_yasg.views import get_schema_view
|
||||
|
||||
|
||||
class SuperUserSchemaGenerator(SchemaGenerator):
|
||||
def has_view_permissions(self, path, method, view):
|
||||
#
|
||||
# Generate the Swagger schema as if you were a superuser and
|
||||
# permissions didn't matter; this short-circuits the schema path
|
||||
# discovery to include _all_ potential paths in the API.
|
||||
#
|
||||
return True
|
||||
class CustomSwaggerAutoSchema(SwaggerAutoSchema):
|
||||
"""Custom SwaggerAutoSchema to add swagger_topic to tags."""
|
||||
|
||||
|
||||
class AutoSchema(DRFAuthSchema):
|
||||
def get_link(self, path, method, base_url):
|
||||
link = super(AutoSchema, self).get_link(path, method, base_url)
|
||||
def get_tags(self, operation_keys=None):
|
||||
tags = []
|
||||
try:
|
||||
serializer = self.view.get_serializer()
|
||||
if hasattr(self.view, 'get_serializer'):
|
||||
serializer = self.view.get_serializer()
|
||||
else:
|
||||
serializer = None
|
||||
except Exception:
|
||||
serializer = None
|
||||
warnings.warn(
|
||||
'{}.get_serializer() raised an exception during '
|
||||
'schema generation. Serializer fields will not be '
|
||||
'generated for {} {}.'.format(self.view.__class__.__name__, method, path)
|
||||
'generated for {}.'.format(self.view.__class__.__name__, operation_keys)
|
||||
)
|
||||
|
||||
link.__dict__['deprecated'] = getattr(self.view, 'deprecated', False)
|
||||
|
||||
# auto-generate a topic/tag for the serializer based on its model
|
||||
if hasattr(self.view, 'swagger_topic'):
|
||||
link.__dict__['topic'] = str(self.view.swagger_topic).title()
|
||||
tags.append(str(self.view.swagger_topic).title())
|
||||
elif serializer and hasattr(serializer, 'Meta'):
|
||||
link.__dict__['topic'] = str(serializer.Meta.model._meta.verbose_name_plural).title()
|
||||
tags.append(str(serializer.Meta.model._meta.verbose_name_plural).title())
|
||||
elif hasattr(self.view, 'model'):
|
||||
link.__dict__['topic'] = str(self.view.model._meta.verbose_name_plural).title()
|
||||
tags.append(str(self.view.model._meta.verbose_name_plural).title())
|
||||
else:
|
||||
warnings.warn('Could not determine a Swagger tag for path {}'.format(path))
|
||||
return link
|
||||
tags = ['api'] # Fallback to default value
|
||||
|
||||
def get_description(self, path, method):
|
||||
setattr(self.view.request, 'swagger_method', method)
|
||||
description = super(AutoSchema, self).get_description(path, method)
|
||||
return description
|
||||
if not tags:
|
||||
warnings.warn(f'Could not determine tags for {self.view.__class__.__name__}')
|
||||
return tags
|
||||
|
||||
def is_deprecated(self):
|
||||
"""Return `True` if this operation is to be marked as deprecated."""
|
||||
return getattr(self.view, 'deprecated', False)
|
||||
|
||||
|
||||
schema_view = get_schema_view(
|
||||
openapi.Info(
|
||||
title="Snippets API",
|
||||
default_version='v1',
|
||||
description="Test description",
|
||||
terms_of_service="https://www.google.com/policies/terms/",
|
||||
contact=openapi.Contact(email="contact@snippets.local"),
|
||||
license=openapi.License(name="BSD License"),
|
||||
title='AWX API',
|
||||
default_version='v2',
|
||||
description='AWX API Documentation',
|
||||
terms_of_service='https://www.google.com/policies/terms/',
|
||||
contact=openapi.Contact(email='contact@snippets.local'),
|
||||
license=openapi.License(name='Apache License'),
|
||||
),
|
||||
public=True,
|
||||
permission_classes=[AllowAny],
|
||||
|
||||
@@ -1,114 +0,0 @@
|
||||
# Token Handling using OAuth2
|
||||
|
||||
This page lists OAuth 2 utility endpoints used for authorization, token refresh and revoke.
|
||||
Note endpoints other than `/api/o/authorize/` are not meant to be used in browsers and do not
|
||||
support HTTP GET. The endpoints here strictly follow
|
||||
[RFC specs for OAuth2](https://tools.ietf.org/html/rfc6749), so please use that for detailed
|
||||
reference. Note AWX net location default to `http://localhost:8013` in examples:
|
||||
|
||||
|
||||
## Create Token for an Application using Authorization code grant type
|
||||
Given an application "AuthCodeApp" of grant type `authorization-code`,
|
||||
from the client app, the user makes a GET to the Authorize endpoint with
|
||||
|
||||
* `response_type`
|
||||
* `client_id`
|
||||
* `redirect_uris`
|
||||
* `scope`
|
||||
|
||||
AWX will respond with the authorization `code` and `state`
|
||||
to the redirect_uri specified in the application. The client application will then make a POST to the
|
||||
`api/o/token/` endpoint on AWX with
|
||||
|
||||
* `code`
|
||||
* `client_id`
|
||||
* `client_secret`
|
||||
* `grant_type`
|
||||
* `redirect_uri`
|
||||
|
||||
AWX will respond with the `access_token`, `token_type`, `refresh_token`, and `expires_in`. For more
|
||||
information on testing this flow, refer to [django-oauth-toolkit](http://django-oauth-toolkit.readthedocs.io/en/latest/tutorial/tutorial_01.html#test-your-authorization-server).
|
||||
|
||||
|
||||
## Create Token for an Application using Password grant type
|
||||
|
||||
Log in is not required for `password` grant type, so a simple `curl` can be used to acquire a personal access token
|
||||
via `/api/o/token/` with
|
||||
|
||||
* `grant_type`: Required to be "password"
|
||||
* `username`
|
||||
* `password`
|
||||
* `client_id`: Associated application must have grant_type "password"
|
||||
* `client_secret`
|
||||
|
||||
For example:
|
||||
|
||||
```bash
|
||||
curl -X POST \
|
||||
-H "Content-Type: application/x-www-form-urlencoded" \
|
||||
-d "grant_type=password&username=<username>&password=<password>&scope=read" \
|
||||
-u "gwSPoasWSdNkMDtBN3Hu2WYQpPWCO9SwUEsKK22l:fI6ZpfocHYBGfm1tP92r0yIgCyfRdDQt0Tos9L8a4fNsJjQQMwp9569e
|
||||
IaUBsaVDgt2eiwOGe0bg5m5vCSstClZmtdy359RVx2rQK5YlIWyPlrolpt2LEpVeKXWaiybo" \
|
||||
http://localhost:8013/api/o/token/ -i
|
||||
```
|
||||
In the above post request, parameters `username` and `password` are username and password of the related
|
||||
AWX user of the underlying application, and the authentication information is of format
|
||||
`<client_id>:<client_secret>`, where `client_id` and `client_secret` are the corresponding fields of
|
||||
underlying application.
|
||||
|
||||
Upon success, access token, refresh token and other information are given in the response body in JSON
|
||||
format:
|
||||
|
||||
```text
|
||||
{
|
||||
"access_token": "9epHOqHhnXUcgYK8QanOmUQPSgX92g",
|
||||
"token_type": "Bearer",
|
||||
"expires_in": 31536000000,
|
||||
"refresh_token": "jMRX6QvzOTf046KHee3TU5mT3nyXsz",
|
||||
"scope": "read"
|
||||
}
|
||||
```
|
||||
|
||||
|
||||
## Refresh an existing access token
|
||||
|
||||
The `/api/o/token/` endpoint is used for refreshing access token:
|
||||
```bash
|
||||
curl -X POST \
|
||||
-H "Content-Type: application/x-www-form-urlencoded" \
|
||||
-d "grant_type=refresh_token&refresh_token=AL0NK9TTpv0qp54dGbC4VUZtsZ9r8z" \
|
||||
-u "gwSPoasWSdNkMDtBN3Hu2WYQpPWCO9SwUEsKK22l:fI6ZpfocHYBGfm1tP92r0yIgCyfRdDQt0Tos9L8a4fNsJjQQMwp9569eIaUBsaVDgt2eiwOGe0bg5m5vCSstClZmtdy359RVx2rQK5YlIWyPlrolpt2LEpVeKXWaiybo" \
|
||||
http://localhost:8013/api/o/token/ -i
|
||||
```
|
||||
In the above post request, `refresh_token` is provided by `refresh_token` field of the access token
|
||||
above. The authentication information is of format `<client_id>:<client_secret>`, where `client_id`
|
||||
and `client_secret` are the corresponding fields of underlying related application of the access token.
|
||||
|
||||
Upon success, the new (refreshed) access token with the same scope information as the previous one is
|
||||
given in the response body in JSON format:
|
||||
```text
|
||||
{
|
||||
"access_token": "NDInWxGJI4iZgqpsreujjbvzCfJqgR",
|
||||
"token_type": "Bearer",
|
||||
"expires_in": 31536000000,
|
||||
"refresh_token": "DqOrmz8bx3srlHkZNKmDpqA86bnQkT",
|
||||
"scope": "read write"
|
||||
}
|
||||
```
|
||||
Internally, the refresh operation deletes the existing token and a new token is created immediately
|
||||
after, with information like scope and related application identical to the original one. We can
|
||||
verify by checking the new token is present at the `api/v2/tokens` endpoint.
|
||||
|
||||
## Revoke an access token
|
||||
Revoking an access token is the same as deleting the token resource object.
|
||||
Revoking is done by POSTing to `/api/o/revoke_token/` with the token to revoke as parameter:
|
||||
|
||||
```bash
|
||||
curl -X POST -d "token=rQONsve372fQwuc2pn76k3IHDCYpi7" \
|
||||
-H "Content-Type: application/x-www-form-urlencoded" \
|
||||
-u "gwSPoasWSdNkMDtBN3Hu2WYQpPWCO9SwUEsKK22l:fI6ZpfocHYBGfm1tP92r0yIgCyfRdDQt0Tos9L8a4fNsJjQQMwp9569eIaUBsaVDgt2eiwOGe0bg5m5vCSstClZmtdy359RVx2rQK5YlIWyPlrolpt2LEpVeKXWaiybo" \
|
||||
http://localhost:8013/api/o/revoke_token/ -i
|
||||
```
|
||||
`200 OK` means a successful delete.
|
||||
|
||||
|
||||
@@ -1,27 +0,0 @@
|
||||
# Copyright (c) 2017 Ansible, Inc.
|
||||
# All Rights Reserved.
|
||||
|
||||
from django.urls import re_path
|
||||
|
||||
from awx.api.views import (
|
||||
OAuth2ApplicationList,
|
||||
OAuth2ApplicationDetail,
|
||||
ApplicationOAuth2TokenList,
|
||||
OAuth2ApplicationActivityStreamList,
|
||||
OAuth2TokenList,
|
||||
OAuth2TokenDetail,
|
||||
OAuth2TokenActivityStreamList,
|
||||
)
|
||||
|
||||
|
||||
urls = [
|
||||
re_path(r'^applications/$', OAuth2ApplicationList.as_view(), name='o_auth2_application_list'),
|
||||
re_path(r'^applications/(?P<pk>[0-9]+)/$', OAuth2ApplicationDetail.as_view(), name='o_auth2_application_detail'),
|
||||
re_path(r'^applications/(?P<pk>[0-9]+)/tokens/$', ApplicationOAuth2TokenList.as_view(), name='o_auth2_application_token_list'),
|
||||
re_path(r'^applications/(?P<pk>[0-9]+)/activity_stream/$', OAuth2ApplicationActivityStreamList.as_view(), name='o_auth2_application_activity_stream_list'),
|
||||
re_path(r'^tokens/$', OAuth2TokenList.as_view(), name='o_auth2_token_list'),
|
||||
re_path(r'^tokens/(?P<pk>[0-9]+)/$', OAuth2TokenDetail.as_view(), name='o_auth2_token_detail'),
|
||||
re_path(r'^tokens/(?P<pk>[0-9]+)/activity_stream/$', OAuth2TokenActivityStreamList.as_view(), name='o_auth2_token_activity_stream_list'),
|
||||
]
|
||||
|
||||
__all__ = ['urls']
|
||||
@@ -1,45 +0,0 @@
|
||||
# Copyright (c) 2017 Ansible, Inc.
|
||||
# All Rights Reserved.
|
||||
from datetime import timedelta
|
||||
|
||||
from django.utils.timezone import now
|
||||
from django.conf import settings
|
||||
from django.urls import re_path
|
||||
|
||||
from oauthlib import oauth2
|
||||
from oauth2_provider import views
|
||||
|
||||
from awx.main.models import RefreshToken
|
||||
from awx.api.views.root import ApiOAuthAuthorizationRootView
|
||||
|
||||
|
||||
class TokenView(views.TokenView):
|
||||
def create_token_response(self, request):
|
||||
# Django OAuth2 Toolkit has a bug whereby refresh tokens are *never*
|
||||
# properly expired (ugh):
|
||||
#
|
||||
# https://github.com/jazzband/django-oauth-toolkit/issues/746
|
||||
#
|
||||
# This code detects and auto-expires them on refresh grant
|
||||
# requests.
|
||||
if request.POST.get('grant_type') == 'refresh_token' and 'refresh_token' in request.POST:
|
||||
refresh_token = RefreshToken.objects.filter(token=request.POST['refresh_token']).first()
|
||||
if refresh_token:
|
||||
expire_seconds = settings.OAUTH2_PROVIDER.get('REFRESH_TOKEN_EXPIRE_SECONDS', 0)
|
||||
if refresh_token.created + timedelta(seconds=expire_seconds) < now():
|
||||
return request.build_absolute_uri(), {}, 'The refresh token has expired.', '403'
|
||||
try:
|
||||
return super(TokenView, self).create_token_response(request)
|
||||
except oauth2.AccessDeniedError as e:
|
||||
return request.build_absolute_uri(), {}, str(e), '403'
|
||||
|
||||
|
||||
urls = [
|
||||
re_path(r'^$', ApiOAuthAuthorizationRootView.as_view(), name='oauth_authorization_root_view'),
|
||||
re_path(r"^authorize/$", views.AuthorizationView.as_view(), name="authorize"),
|
||||
re_path(r"^token/$", TokenView.as_view(), name="token"),
|
||||
re_path(r"^revoke_token/$", views.RevokeTokenView.as_view(), name="revoke-token"),
|
||||
]
|
||||
|
||||
|
||||
__all__ = ['urls']
|
||||
@@ -25,7 +25,7 @@ from awx.api.views.organization import (
|
||||
OrganizationObjectRolesList,
|
||||
OrganizationAccessList,
|
||||
)
|
||||
from awx.api.views import OrganizationCredentialList, OrganizationApplicationList
|
||||
from awx.api.views import OrganizationCredentialList
|
||||
|
||||
|
||||
urls = [
|
||||
@@ -66,7 +66,6 @@ urls = [
|
||||
re_path(r'^(?P<pk>[0-9]+)/galaxy_credentials/$', OrganizationGalaxyCredentialsList.as_view(), name='organization_galaxy_credentials_list'),
|
||||
re_path(r'^(?P<pk>[0-9]+)/object_roles/$', OrganizationObjectRolesList.as_view(), name='organization_object_roles_list'),
|
||||
re_path(r'^(?P<pk>[0-9]+)/access_list/$', OrganizationAccessList.as_view(), name='organization_access_list'),
|
||||
re_path(r'^(?P<pk>[0-9]+)/applications/$', OrganizationApplicationList.as_view(), name='organization_applications_list'),
|
||||
]
|
||||
|
||||
__all__ = ['urls']
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
|
||||
from django.urls import re_path
|
||||
|
||||
from awx.api.views import RoleList, RoleDetail, RoleUsersList, RoleTeamsList, RoleParentsList, RoleChildrenList
|
||||
from awx.api.views import RoleList, RoleDetail, RoleUsersList, RoleTeamsList
|
||||
|
||||
|
||||
urls = [
|
||||
@@ -11,8 +11,6 @@ urls = [
|
||||
re_path(r'^(?P<pk>[0-9]+)/$', RoleDetail.as_view(), name='role_detail'),
|
||||
re_path(r'^(?P<pk>[0-9]+)/users/$', RoleUsersList.as_view(), name='role_users_list'),
|
||||
re_path(r'^(?P<pk>[0-9]+)/teams/$', RoleTeamsList.as_view(), name='role_teams_list'),
|
||||
re_path(r'^(?P<pk>[0-9]+)/parents/$', RoleParentsList.as_view(), name='role_parents_list'),
|
||||
re_path(r'^(?P<pk>[0-9]+)/children/$', RoleChildrenList.as_view(), name='role_children_list'),
|
||||
]
|
||||
|
||||
__all__ = ['urls']
|
||||
|
||||
@@ -15,7 +15,6 @@ from awx.api.views.root import (
|
||||
ApiV2AttachView,
|
||||
)
|
||||
from awx.api.views import (
|
||||
AuthView,
|
||||
UserMeList,
|
||||
DashboardView,
|
||||
DashboardJobsGraphView,
|
||||
@@ -26,10 +25,6 @@ from awx.api.views import (
|
||||
JobTemplateCredentialsList,
|
||||
SchedulePreview,
|
||||
ScheduleZoneInfo,
|
||||
OAuth2ApplicationList,
|
||||
OAuth2TokenList,
|
||||
ApplicationOAuth2TokenList,
|
||||
OAuth2ApplicationDetail,
|
||||
HostMetricSummaryMonthlyList,
|
||||
)
|
||||
|
||||
@@ -80,8 +75,6 @@ from .schedule import urls as schedule_urls
|
||||
from .activity_stream import urls as activity_stream_urls
|
||||
from .instance import urls as instance_urls
|
||||
from .instance_group import urls as instance_group_urls
|
||||
from .oauth2 import urls as oauth2_urls
|
||||
from .oauth2_root import urls as oauth2_root_urls
|
||||
from .workflow_approval_template import urls as workflow_approval_template_urls
|
||||
from .workflow_approval import urls as workflow_approval_urls
|
||||
from .analytics import urls as analytics_urls
|
||||
@@ -96,17 +89,11 @@ v2_urls = [
|
||||
re_path(r'^job_templates/(?P<pk>[0-9]+)/credentials/$', JobTemplateCredentialsList.as_view(), name='job_template_credentials_list'),
|
||||
re_path(r'^schedules/preview/$', SchedulePreview.as_view(), name='schedule_rrule'),
|
||||
re_path(r'^schedules/zoneinfo/$', ScheduleZoneInfo.as_view(), name='schedule_zoneinfo'),
|
||||
re_path(r'^applications/$', OAuth2ApplicationList.as_view(), name='o_auth2_application_list'),
|
||||
re_path(r'^applications/(?P<pk>[0-9]+)/$', OAuth2ApplicationDetail.as_view(), name='o_auth2_application_detail'),
|
||||
re_path(r'^applications/(?P<pk>[0-9]+)/tokens/$', ApplicationOAuth2TokenList.as_view(), name='application_o_auth2_token_list'),
|
||||
re_path(r'^tokens/$', OAuth2TokenList.as_view(), name='o_auth2_token_list'),
|
||||
re_path(r'^', include(oauth2_urls)),
|
||||
re_path(r'^metrics/$', MetricsView.as_view(), name='metrics_view'),
|
||||
re_path(r'^ping/$', ApiV2PingView.as_view(), name='api_v2_ping_view'),
|
||||
re_path(r'^config/$', ApiV2ConfigView.as_view(), name='api_v2_config_view'),
|
||||
re_path(r'^config/subscriptions/$', ApiV2SubscriptionView.as_view(), name='api_v2_subscription_view'),
|
||||
re_path(r'^config/attach/$', ApiV2AttachView.as_view(), name='api_v2_attach_view'),
|
||||
re_path(r'^auth/$', AuthView.as_view()),
|
||||
re_path(r'^me/$', UserMeList.as_view(), name='user_me_list'),
|
||||
re_path(r'^dashboard/$', DashboardView.as_view(), name='dashboard_view'),
|
||||
re_path(r'^dashboard/graphs/jobs/$', DashboardJobsGraphView.as_view(), name='dashboard_jobs_graph_view'),
|
||||
@@ -166,7 +153,6 @@ urlpatterns = [
|
||||
re_path(r'^(?P<version>(v2))/', include(v2_urls)),
|
||||
re_path(r'^login/$', LoggedLoginView.as_view(template_name='rest_framework/login.html', extra_context={'inside_login_context': True}), name='login'),
|
||||
re_path(r'^logout/$', LoggedLogoutView.as_view(next_page='/api/', redirect_field_name='next'), name='logout'),
|
||||
re_path(r'^o/', include(oauth2_root_urls)),
|
||||
]
|
||||
if MODE == 'development':
|
||||
# Only include these if we are in the development environment
|
||||
|
||||
@@ -14,10 +14,6 @@ from awx.api.views import (
|
||||
UserRolesList,
|
||||
UserActivityStreamList,
|
||||
UserAccessList,
|
||||
OAuth2ApplicationList,
|
||||
OAuth2UserTokenList,
|
||||
UserPersonalTokenList,
|
||||
UserAuthorizedTokenList,
|
||||
)
|
||||
|
||||
urls = [
|
||||
@@ -31,10 +27,6 @@ urls = [
|
||||
re_path(r'^(?P<pk>[0-9]+)/roles/$', UserRolesList.as_view(), name='user_roles_list'),
|
||||
re_path(r'^(?P<pk>[0-9]+)/activity_stream/$', UserActivityStreamList.as_view(), name='user_activity_stream_list'),
|
||||
re_path(r'^(?P<pk>[0-9]+)/access_list/$', UserAccessList.as_view(), name='user_access_list'),
|
||||
re_path(r'^(?P<pk>[0-9]+)/applications/$', OAuth2ApplicationList.as_view(), name='o_auth2_application_list'),
|
||||
re_path(r'^(?P<pk>[0-9]+)/tokens/$', OAuth2UserTokenList.as_view(), name='o_auth2_token_list'),
|
||||
re_path(r'^(?P<pk>[0-9]+)/authorized_tokens/$', UserAuthorizedTokenList.as_view(), name='user_authorized_token_list'),
|
||||
re_path(r'^(?P<pk>[0-9]+)/personal_tokens/$', UserPersonalTokenList.as_view(), name='user_personal_token_list'),
|
||||
]
|
||||
|
||||
__all__ = ['urls']
|
||||
|
||||
@@ -36,7 +36,7 @@ from django.utils.translation import gettext_lazy as _
|
||||
# Django REST Framework
|
||||
from rest_framework.exceptions import APIException, PermissionDenied, ParseError, NotFound
|
||||
from rest_framework.parsers import FormParser
|
||||
from rest_framework.permissions import AllowAny, IsAuthenticated
|
||||
from rest_framework.permissions import IsAuthenticated
|
||||
from rest_framework.renderers import JSONRenderer, StaticHTMLRenderer
|
||||
from rest_framework.response import Response
|
||||
from rest_framework.settings import api_settings
|
||||
@@ -50,19 +50,12 @@ from rest_framework_yaml.renderers import YAMLRenderer
|
||||
# ansi2html
|
||||
from ansi2html import Ansi2HTMLConverter
|
||||
|
||||
# Python Social Auth
|
||||
from social_core.backends.utils import load_backends
|
||||
|
||||
# Django OAuth Toolkit
|
||||
from oauth2_provider.models import get_access_token_model
|
||||
|
||||
import pytz
|
||||
from wsgiref.util import FileWrapper
|
||||
|
||||
# django-ansible-base
|
||||
from ansible_base.lib.utils.requests import get_remote_hosts
|
||||
from ansible_base.rbac.models import RoleEvaluation, ObjectRole
|
||||
from ansible_base.resource_registry.shared_types import OrganizationType, TeamType, UserType
|
||||
from ansible_base.rbac.models import RoleEvaluation
|
||||
|
||||
# AWX
|
||||
from awx.main.tasks.system import send_notifications, update_inventory_computed_fields
|
||||
@@ -91,7 +84,6 @@ from awx.api.generics import (
|
||||
from awx.api.views.labels import LabelSubListCreateAttachDetachView
|
||||
from awx.api.versioning import reverse
|
||||
from awx.main import models
|
||||
from awx.main.models.rbac import get_role_definition
|
||||
from awx.main.utils import (
|
||||
camelcase_to_underscore,
|
||||
extract_ansible_vars,
|
||||
@@ -103,6 +95,7 @@ from awx.main.utils import (
|
||||
)
|
||||
from awx.main.utils.encryption import encrypt_value
|
||||
from awx.main.utils.filters import SmartFilter
|
||||
from awx.main.utils.plugins import compute_cloud_inventory_sources
|
||||
from awx.main.redact import UriCleaner
|
||||
from awx.api.permissions import (
|
||||
JobTemplateCallbackPermission,
|
||||
@@ -676,116 +669,16 @@ class ScheduleUnifiedJobsList(SubListAPIView):
|
||||
name = _('Schedule Jobs List')
|
||||
|
||||
|
||||
class AuthView(APIView):
|
||||
'''List enabled single-sign-on endpoints'''
|
||||
|
||||
authentication_classes = []
|
||||
permission_classes = (AllowAny,)
|
||||
swagger_topic = 'System Configuration'
|
||||
|
||||
def get(self, request):
|
||||
from rest_framework.reverse import reverse
|
||||
|
||||
data = OrderedDict()
|
||||
err_backend, err_message = request.session.get('social_auth_error', (None, None))
|
||||
auth_backends = list(load_backends(settings.AUTHENTICATION_BACKENDS, force_load=True).items())
|
||||
# Return auth backends in consistent order: Google, GitHub, SAML.
|
||||
auth_backends.sort(key=lambda x: 'g' if x[0] == 'google-oauth2' else x[0])
|
||||
for name, backend in auth_backends:
|
||||
login_url = reverse('social:begin', args=(name,))
|
||||
complete_url = request.build_absolute_uri(reverse('social:complete', args=(name,)))
|
||||
backend_data = {'login_url': login_url, 'complete_url': complete_url}
|
||||
if name == 'saml':
|
||||
backend_data['metadata_url'] = reverse('sso:saml_metadata')
|
||||
for idp in sorted(settings.SOCIAL_AUTH_SAML_ENABLED_IDPS.keys()):
|
||||
saml_backend_data = dict(backend_data.items())
|
||||
saml_backend_data['login_url'] = '%s?idp=%s' % (login_url, idp)
|
||||
full_backend_name = '%s:%s' % (name, idp)
|
||||
if (err_backend == full_backend_name or err_backend == name) and err_message:
|
||||
saml_backend_data['error'] = err_message
|
||||
data[full_backend_name] = saml_backend_data
|
||||
else:
|
||||
if err_backend == name and err_message:
|
||||
backend_data['error'] = err_message
|
||||
data[name] = backend_data
|
||||
return Response(data)
|
||||
|
||||
|
||||
def immutablesharedfields(cls):
|
||||
'''
|
||||
Class decorator to prevent modifying shared resources when ALLOW_LOCAL_RESOURCE_MANAGEMENT setting is set to False.
|
||||
|
||||
Works by overriding these view methods:
|
||||
- create
|
||||
- delete
|
||||
- perform_update
|
||||
create and delete are overridden to raise a PermissionDenied exception.
|
||||
perform_update is overridden to check if any shared fields are being modified,
|
||||
and raise a PermissionDenied exception if so.
|
||||
'''
|
||||
# create instead of perform_create because some of our views
|
||||
# override create instead of perform_create
|
||||
if hasattr(cls, 'create'):
|
||||
cls.original_create = cls.create
|
||||
|
||||
@functools.wraps(cls.create)
|
||||
def create_wrapper(*args, **kwargs):
|
||||
if settings.ALLOW_LOCAL_RESOURCE_MANAGEMENT:
|
||||
return cls.original_create(*args, **kwargs)
|
||||
raise PermissionDenied({'detail': _('Creation of this resource is not allowed. Create this resource via the platform ingress.')})
|
||||
|
||||
cls.create = create_wrapper
|
||||
|
||||
if hasattr(cls, 'delete'):
|
||||
cls.original_delete = cls.delete
|
||||
|
||||
@functools.wraps(cls.delete)
|
||||
def delete_wrapper(*args, **kwargs):
|
||||
if settings.ALLOW_LOCAL_RESOURCE_MANAGEMENT:
|
||||
return cls.original_delete(*args, **kwargs)
|
||||
raise PermissionDenied({'detail': _('Deletion of this resource is not allowed. Delete this resource via the platform ingress.')})
|
||||
|
||||
cls.delete = delete_wrapper
|
||||
|
||||
if hasattr(cls, 'perform_update'):
|
||||
cls.original_perform_update = cls.perform_update
|
||||
|
||||
@functools.wraps(cls.perform_update)
|
||||
def update_wrapper(*args, **kwargs):
|
||||
if not settings.ALLOW_LOCAL_RESOURCE_MANAGEMENT:
|
||||
view, serializer = args
|
||||
instance = view.get_object()
|
||||
if instance:
|
||||
if isinstance(instance, models.Organization):
|
||||
shared_fields = OrganizationType._declared_fields.keys()
|
||||
elif isinstance(instance, models.User):
|
||||
shared_fields = UserType._declared_fields.keys()
|
||||
elif isinstance(instance, models.Team):
|
||||
shared_fields = TeamType._declared_fields.keys()
|
||||
attrs = serializer.validated_data
|
||||
for field in shared_fields:
|
||||
if field in attrs and getattr(instance, field) != attrs[field]:
|
||||
raise PermissionDenied({field: _(f"Cannot change shared field '{field}'. Alter this field via the platform ingress.")})
|
||||
return cls.original_perform_update(*args, **kwargs)
|
||||
|
||||
cls.perform_update = update_wrapper
|
||||
|
||||
return cls
|
||||
|
||||
|
||||
@immutablesharedfields
|
||||
class TeamList(ListCreateAPIView):
|
||||
model = models.Team
|
||||
serializer_class = serializers.TeamSerializer
|
||||
|
||||
|
||||
@immutablesharedfields
|
||||
class TeamDetail(RetrieveUpdateDestroyAPIView):
|
||||
model = models.Team
|
||||
serializer_class = serializers.TeamSerializer
|
||||
|
||||
|
||||
@immutablesharedfields
|
||||
class TeamUsersList(BaseUsersList):
|
||||
model = models.User
|
||||
serializer_class = serializers.UserSerializer
|
||||
@@ -827,9 +720,19 @@ class TeamRolesList(SubListAttachDetachAPIView):
|
||||
team = get_object_or_404(models.Team, pk=self.kwargs['pk'])
|
||||
credential_content_type = ContentType.objects.get_for_model(models.Credential)
|
||||
if role.content_type == credential_content_type:
|
||||
if not role.content_object.organization or role.content_object.organization.id != team.organization.id:
|
||||
data = dict(msg=_("You cannot grant credential access to a team when the Organization field isn't set, or belongs to a different organization"))
|
||||
if not role.content_object.organization:
|
||||
data = dict(
|
||||
msg=_("You cannot grant access to a credential that is not assigned to an organization (private credentials cannot be assigned to teams)")
|
||||
)
|
||||
return Response(data, status=status.HTTP_400_BAD_REQUEST)
|
||||
elif role.content_object.organization.id != team.organization.id:
|
||||
if not request.user.is_superuser:
|
||||
data = dict(
|
||||
msg=_(
|
||||
"You cannot grant a team access to a credential in a different organization. Only superusers can grant cross-organization credential access to teams"
|
||||
)
|
||||
)
|
||||
return Response(data, status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
return super(TeamRolesList, self).post(request, *args, **kwargs)
|
||||
|
||||
@@ -856,17 +759,9 @@ class TeamProjectsList(SubListAPIView):
|
||||
def get_queryset(self):
|
||||
team = self.get_parent_object()
|
||||
self.check_parent_access(team)
|
||||
model_ct = ContentType.objects.get_for_model(self.model)
|
||||
parent_ct = ContentType.objects.get_for_model(self.parent_model)
|
||||
|
||||
rd = get_role_definition(team.member_role)
|
||||
role = ObjectRole.objects.filter(object_id=team.id, content_type=parent_ct, role_definition=rd).first()
|
||||
if role is None:
|
||||
# Team has no permissions, therefore team has no projects
|
||||
return self.model.objects.none()
|
||||
else:
|
||||
project_qs = self.model.accessible_objects(self.request.user, 'read_role')
|
||||
return project_qs.filter(id__in=RoleEvaluation.objects.filter(content_type_id=model_ct.id, role=role).values_list('object_id'))
|
||||
my_qs = self.model.accessible_objects(self.request.user, 'read_role')
|
||||
team_qs = models.Project.accessible_objects(team, 'read_role')
|
||||
return my_qs & team_qs
|
||||
|
||||
|
||||
class TeamActivityStreamList(SubListAPIView):
|
||||
@@ -981,13 +876,23 @@ class ProjectTeamsList(ListAPIView):
|
||||
serializer_class = serializers.TeamSerializer
|
||||
|
||||
def get_queryset(self):
|
||||
p = get_object_or_404(models.Project, pk=self.kwargs['pk'])
|
||||
if not self.request.user.can_access(models.Project, 'read', p):
|
||||
parent = get_object_or_404(models.Project, pk=self.kwargs['pk'])
|
||||
if not self.request.user.can_access(models.Project, 'read', parent):
|
||||
raise PermissionDenied()
|
||||
project_ct = ContentType.objects.get_for_model(models.Project)
|
||||
|
||||
project_ct = ContentType.objects.get_for_model(parent)
|
||||
team_ct = ContentType.objects.get_for_model(self.model)
|
||||
all_roles = models.Role.objects.filter(Q(descendents__content_type=project_ct) & Q(descendents__object_id=p.pk), content_type=team_ct)
|
||||
return self.model.accessible_objects(self.request.user, 'read_role').filter(pk__in=[t.content_object.pk for t in all_roles])
|
||||
|
||||
roles_on_project = models.Role.objects.filter(
|
||||
content_type=project_ct,
|
||||
object_id=parent.pk,
|
||||
)
|
||||
|
||||
team_member_parent_roles = models.Role.objects.filter(children__in=roles_on_project, role_field='member_role', content_type=team_ct).distinct()
|
||||
|
||||
team_ids = team_member_parent_roles.values_list('object_id', flat=True)
|
||||
my_qs = self.model.accessible_objects(self.request.user, 'read_role').filter(pk__in=team_ids)
|
||||
return my_qs
|
||||
|
||||
|
||||
class ProjectSchedulesList(SubListCreateAPIView):
|
||||
@@ -1167,7 +1072,6 @@ class ProjectCopy(CopyAPIView):
|
||||
copy_return_serializer_class = serializers.ProjectSerializer
|
||||
|
||||
|
||||
@immutablesharedfields
|
||||
class UserList(ListCreateAPIView):
|
||||
model = models.User
|
||||
serializer_class = serializers.UserSerializer
|
||||
@@ -1185,121 +1089,6 @@ class UserMeList(ListAPIView):
|
||||
return self.model.objects.filter(pk=self.request.user.pk)
|
||||
|
||||
|
||||
class OAuth2ApplicationList(ListCreateAPIView):
|
||||
name = _("OAuth 2 Applications")
|
||||
|
||||
model = models.OAuth2Application
|
||||
serializer_class = serializers.OAuth2ApplicationSerializer
|
||||
swagger_topic = 'Authentication'
|
||||
|
||||
|
||||
class OAuth2ApplicationDetail(RetrieveUpdateDestroyAPIView):
|
||||
name = _("OAuth 2 Application Detail")
|
||||
|
||||
model = models.OAuth2Application
|
||||
serializer_class = serializers.OAuth2ApplicationSerializer
|
||||
swagger_topic = 'Authentication'
|
||||
|
||||
def update_raw_data(self, data):
|
||||
data.pop('client_secret', None)
|
||||
return super(OAuth2ApplicationDetail, self).update_raw_data(data)
|
||||
|
||||
|
||||
class ApplicationOAuth2TokenList(SubListCreateAPIView):
|
||||
name = _("OAuth 2 Application Tokens")
|
||||
|
||||
model = models.OAuth2AccessToken
|
||||
serializer_class = serializers.OAuth2TokenSerializer
|
||||
parent_model = models.OAuth2Application
|
||||
relationship = 'oauth2accesstoken_set'
|
||||
parent_key = 'application'
|
||||
swagger_topic = 'Authentication'
|
||||
|
||||
|
||||
class OAuth2ApplicationActivityStreamList(SubListAPIView):
|
||||
model = models.ActivityStream
|
||||
serializer_class = serializers.ActivityStreamSerializer
|
||||
parent_model = models.OAuth2Application
|
||||
relationship = 'activitystream_set'
|
||||
swagger_topic = 'Authentication'
|
||||
search_fields = ('changes',)
|
||||
|
||||
|
||||
class OAuth2TokenList(ListCreateAPIView):
|
||||
name = _("OAuth2 Tokens")
|
||||
|
||||
model = models.OAuth2AccessToken
|
||||
serializer_class = serializers.OAuth2TokenSerializer
|
||||
swagger_topic = 'Authentication'
|
||||
|
||||
|
||||
class OAuth2UserTokenList(SubListCreateAPIView):
|
||||
name = _("OAuth2 User Tokens")
|
||||
|
||||
model = models.OAuth2AccessToken
|
||||
serializer_class = serializers.OAuth2TokenSerializer
|
||||
parent_model = models.User
|
||||
relationship = 'main_oauth2accesstoken'
|
||||
parent_key = 'user'
|
||||
swagger_topic = 'Authentication'
|
||||
|
||||
|
||||
class UserAuthorizedTokenList(SubListCreateAPIView):
|
||||
name = _("OAuth2 User Authorized Access Tokens")
|
||||
|
||||
model = models.OAuth2AccessToken
|
||||
serializer_class = serializers.UserAuthorizedTokenSerializer
|
||||
parent_model = models.User
|
||||
relationship = 'oauth2accesstoken_set'
|
||||
parent_key = 'user'
|
||||
swagger_topic = 'Authentication'
|
||||
|
||||
def get_queryset(self):
|
||||
return get_access_token_model().objects.filter(application__isnull=False, user=self.request.user)
|
||||
|
||||
|
||||
class OrganizationApplicationList(SubListCreateAPIView):
|
||||
name = _("Organization OAuth2 Applications")
|
||||
|
||||
model = models.OAuth2Application
|
||||
serializer_class = serializers.OAuth2ApplicationSerializer
|
||||
parent_model = models.Organization
|
||||
relationship = 'applications'
|
||||
parent_key = 'organization'
|
||||
swagger_topic = 'Authentication'
|
||||
|
||||
|
||||
class UserPersonalTokenList(SubListCreateAPIView):
|
||||
name = _("OAuth2 Personal Access Tokens")
|
||||
|
||||
model = models.OAuth2AccessToken
|
||||
serializer_class = serializers.UserPersonalTokenSerializer
|
||||
parent_model = models.User
|
||||
relationship = 'main_oauth2accesstoken'
|
||||
parent_key = 'user'
|
||||
swagger_topic = 'Authentication'
|
||||
|
||||
def get_queryset(self):
|
||||
return get_access_token_model().objects.filter(application__isnull=True, user=self.request.user)
|
||||
|
||||
|
||||
class OAuth2TokenDetail(RetrieveUpdateDestroyAPIView):
|
||||
name = _("OAuth Token Detail")
|
||||
|
||||
model = models.OAuth2AccessToken
|
||||
serializer_class = serializers.OAuth2TokenDetailSerializer
|
||||
swagger_topic = 'Authentication'
|
||||
|
||||
|
||||
class OAuth2TokenActivityStreamList(SubListAPIView):
|
||||
model = models.ActivityStream
|
||||
serializer_class = serializers.ActivityStreamSerializer
|
||||
parent_model = models.OAuth2AccessToken
|
||||
relationship = 'activitystream_set'
|
||||
swagger_topic = 'Authentication'
|
||||
search_fields = ('changes',)
|
||||
|
||||
|
||||
class UserTeamsList(SubListAPIView):
|
||||
model = models.Team
|
||||
serializer_class = serializers.TeamSerializer
|
||||
@@ -1339,14 +1128,6 @@ class UserRolesList(SubListAttachDetachAPIView):
|
||||
role = get_object_or_400(models.Role, pk=sub_id)
|
||||
|
||||
content_types = ContentType.objects.get_for_models(models.Organization, models.Team, models.Credential) # dict of {model: content_type}
|
||||
# Prevent user to be associated with team/org when ALLOW_LOCAL_RESOURCE_MANAGEMENT is False
|
||||
if not settings.ALLOW_LOCAL_RESOURCE_MANAGEMENT:
|
||||
for model in [models.Organization, models.Team]:
|
||||
ct = content_types[model]
|
||||
if role.content_type == ct and role.role_field in ['member_role', 'admin_role']:
|
||||
data = dict(msg=_(f"Cannot directly modify user membership to {ct.model}. Direct shared resource management disabled"))
|
||||
return Response(data, status=status.HTTP_403_FORBIDDEN)
|
||||
|
||||
credential_content_type = content_types[models.Credential]
|
||||
if role.content_type == credential_content_type:
|
||||
if 'disassociate' not in request.data and role.content_object.organization and user not in role.content_object.organization.member_role:
|
||||
@@ -1381,7 +1162,6 @@ class UserOrganizationsList(OrganizationCountsMixin, SubListAPIView):
|
||||
model = models.Organization
|
||||
serializer_class = serializers.OrganizationSerializer
|
||||
parent_model = models.User
|
||||
relationship = 'organizations'
|
||||
|
||||
def get_queryset(self):
|
||||
parent = self.get_parent_object()
|
||||
@@ -1395,7 +1175,6 @@ class UserAdminOfOrganizationsList(OrganizationCountsMixin, SubListAPIView):
|
||||
model = models.Organization
|
||||
serializer_class = serializers.OrganizationSerializer
|
||||
parent_model = models.User
|
||||
relationship = 'admin_of_organizations'
|
||||
|
||||
def get_queryset(self):
|
||||
parent = self.get_parent_object()
|
||||
@@ -1419,7 +1198,6 @@ class UserActivityStreamList(SubListAPIView):
|
||||
return qs.filter(Q(actor=parent) | Q(user__in=[parent]))
|
||||
|
||||
|
||||
@immutablesharedfields
|
||||
class UserDetail(RetrieveUpdateDestroyAPIView):
|
||||
model = models.User
|
||||
serializer_class = serializers.UserSerializer
|
||||
@@ -2234,9 +2012,9 @@ class InventorySourceNotificationTemplatesAnyList(SubListCreateAttachDetachAPIVi
|
||||
|
||||
def post(self, request, *args, **kwargs):
|
||||
parent = self.get_parent_object()
|
||||
if parent.source not in models.CLOUD_INVENTORY_SOURCES:
|
||||
if parent.source not in compute_cloud_inventory_sources():
|
||||
return Response(
|
||||
dict(msg=_("Notification Templates can only be assigned when source is one of {}.").format(models.CLOUD_INVENTORY_SOURCES, parent.source)),
|
||||
dict(msg=_("Notification Templates can only be assigned when source is one of {}.").format(compute_cloud_inventory_sources(), parent.source)),
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
return super(InventorySourceNotificationTemplatesAnyList, self).post(request, *args, **kwargs)
|
||||
@@ -3590,6 +3368,7 @@ class JobRelaunch(RetrieveAPIView):
|
||||
|
||||
copy_kwargs = {}
|
||||
retry_hosts = serializer.validated_data.get('hosts', None)
|
||||
job_type = serializer.validated_data.get('job_type', None)
|
||||
if retry_hosts and retry_hosts != 'all':
|
||||
if obj.status in ACTIVE_STATES:
|
||||
return Response(
|
||||
@@ -3610,6 +3389,8 @@ class JobRelaunch(RetrieveAPIView):
|
||||
)
|
||||
copy_kwargs['limit'] = ','.join(retry_host_list)
|
||||
|
||||
if job_type:
|
||||
copy_kwargs['job_type'] = job_type
|
||||
new_job = obj.copy_unified_job(**copy_kwargs)
|
||||
result = new_job.signal_start(**serializer.validated_data['credential_passwords'])
|
||||
if not result:
|
||||
@@ -4391,13 +4172,6 @@ class RoleUsersList(SubListAttachDetachAPIView):
|
||||
role = self.get_parent_object()
|
||||
|
||||
content_types = ContentType.objects.get_for_models(models.Organization, models.Team, models.Credential) # dict of {model: content_type}
|
||||
if not settings.ALLOW_LOCAL_RESOURCE_MANAGEMENT:
|
||||
for model in [models.Organization, models.Team]:
|
||||
ct = content_types[model]
|
||||
if role.content_type == ct and role.role_field in ['member_role', 'admin_role']:
|
||||
data = dict(msg=_(f"Cannot directly modify user membership to {ct.model}. Direct shared resource management disabled"))
|
||||
return Response(data, status=status.HTTP_403_FORBIDDEN)
|
||||
|
||||
credential_content_type = content_types[models.Credential]
|
||||
if role.content_type == credential_content_type:
|
||||
if 'disassociate' not in request.data and role.content_object.organization and user not in role.content_object.organization.member_role:
|
||||
@@ -4439,9 +4213,21 @@ class RoleTeamsList(SubListAttachDetachAPIView):
|
||||
|
||||
credential_content_type = ContentType.objects.get_for_model(models.Credential)
|
||||
if role.content_type == credential_content_type:
|
||||
if not role.content_object.organization or role.content_object.organization.id != team.organization.id:
|
||||
data = dict(msg=_("You cannot grant credential access to a team when the Organization field isn't set, or belongs to a different organization"))
|
||||
# Private credentials (no organization) are never allowed for teams
|
||||
if not role.content_object.organization:
|
||||
data = dict(
|
||||
msg=_("You cannot grant access to a credential that is not assigned to an organization (private credentials cannot be assigned to teams)")
|
||||
)
|
||||
return Response(data, status=status.HTTP_400_BAD_REQUEST)
|
||||
# Cross-organization credentials are only allowed for superusers
|
||||
elif role.content_object.organization.id != team.organization.id:
|
||||
if not request.user.is_superuser:
|
||||
data = dict(
|
||||
msg=_(
|
||||
"You cannot grant a team access to a credential in a different organization. Only superusers can grant cross-organization credential access to teams"
|
||||
)
|
||||
)
|
||||
return Response(data, status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
action = 'attach'
|
||||
if request.data.get('disassociate', None):
|
||||
@@ -4461,34 +4247,6 @@ class RoleTeamsList(SubListAttachDetachAPIView):
|
||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||
|
||||
|
||||
class RoleParentsList(SubListAPIView):
|
||||
deprecated = True
|
||||
model = models.Role
|
||||
serializer_class = serializers.RoleSerializer
|
||||
parent_model = models.Role
|
||||
relationship = 'parents'
|
||||
permission_classes = (IsAuthenticated,)
|
||||
search_fields = ('role_field', 'content_type__model')
|
||||
|
||||
def get_queryset(self):
|
||||
role = models.Role.objects.get(pk=self.kwargs['pk'])
|
||||
return models.Role.filter_visible_roles(self.request.user, role.parents.all())
|
||||
|
||||
|
||||
class RoleChildrenList(SubListAPIView):
|
||||
deprecated = True
|
||||
model = models.Role
|
||||
serializer_class = serializers.RoleSerializer
|
||||
parent_model = models.Role
|
||||
relationship = 'children'
|
||||
permission_classes = (IsAuthenticated,)
|
||||
search_fields = ('role_field', 'content_type__model')
|
||||
|
||||
def get_queryset(self):
|
||||
role = models.Role.objects.get(pk=self.kwargs['pk'])
|
||||
return models.Role.filter_visible_roles(self.request.user, role.children.all())
|
||||
|
||||
|
||||
# Create view functions for all of the class-based views to simplify inclusion
|
||||
# in URL patterns and reverse URL lookups, converting CamelCase names to
|
||||
# lowercase_with_underscore (e.g. MyView.as_view() becomes my_view).
|
||||
|
||||
@@ -10,6 +10,7 @@ from awx.api.generics import APIView, Response
|
||||
from awx.api.permissions import AnalyticsPermission
|
||||
from awx.api.versioning import reverse
|
||||
from awx.main.utils import get_awx_version
|
||||
from awx.main.utils.analytics_proxy import OIDCClient
|
||||
from rest_framework import status
|
||||
|
||||
from collections import OrderedDict
|
||||
@@ -179,33 +180,59 @@ class AnalyticsGenericView(APIView):
|
||||
|
||||
return Response(response.content, status=response.status_code)
|
||||
|
||||
@staticmethod
|
||||
def _base_auth_request(request: requests.Request, method: str, url: str, user: str, pw: str, headers: dict[str, str]) -> requests.Response:
|
||||
response = requests.request(
|
||||
method,
|
||||
url,
|
||||
auth=(user, pw),
|
||||
verify=settings.INSIGHTS_CERT_PATH,
|
||||
params=getattr(request, 'query_params', {}),
|
||||
headers=headers,
|
||||
json=getattr(request, 'data', {}),
|
||||
timeout=(31, 31),
|
||||
)
|
||||
return response
|
||||
|
||||
def _send_to_analytics(self, request, method):
|
||||
try:
|
||||
headers = self._request_headers(request)
|
||||
|
||||
self._get_setting('INSIGHTS_TRACKING_STATE', False, ERROR_UPLOAD_NOT_ENABLED)
|
||||
url = self._get_analytics_url(request.path)
|
||||
rh_user = self._get_setting('REDHAT_USERNAME', None, ERROR_MISSING_USER)
|
||||
rh_password = self._get_setting('REDHAT_PASSWORD', None, ERROR_MISSING_PASSWORD)
|
||||
|
||||
if method not in ["GET", "POST", "OPTIONS"]:
|
||||
return self._error_response(ERROR_UNSUPPORTED_METHOD, method, remote=False, status_code=status.HTTP_500_INTERNAL_SERVER_ERROR)
|
||||
else:
|
||||
response = requests.request(
|
||||
url = self._get_analytics_url(request.path)
|
||||
using_subscriptions_credentials = False
|
||||
try:
|
||||
rh_user = getattr(settings, 'REDHAT_USERNAME', None)
|
||||
rh_password = getattr(settings, 'REDHAT_PASSWORD', None)
|
||||
if not (rh_user and rh_password):
|
||||
rh_user = self._get_setting('SUBSCRIPTIONS_CLIENT_ID', None, ERROR_MISSING_USER)
|
||||
rh_password = self._get_setting('SUBSCRIPTIONS_CLIENT_SECRET', None, ERROR_MISSING_PASSWORD)
|
||||
using_subscriptions_credentials = True
|
||||
|
||||
client = OIDCClient(rh_user, rh_password)
|
||||
response = client.make_request(
|
||||
method,
|
||||
url,
|
||||
auth=(rh_user, rh_password),
|
||||
verify=settings.INSIGHTS_CERT_PATH,
|
||||
params=request.query_params,
|
||||
headers=headers,
|
||||
json=request.data,
|
||||
verify=settings.INSIGHTS_CERT_PATH,
|
||||
params=getattr(request, 'query_params', {}),
|
||||
json=getattr(request, 'data', {}),
|
||||
timeout=(31, 31),
|
||||
)
|
||||
except requests.RequestException:
|
||||
# subscriptions credentials are not valid for basic auth, so just return 401
|
||||
if using_subscriptions_credentials:
|
||||
response = Response(status=status.HTTP_401_UNAUTHORIZED)
|
||||
else:
|
||||
logger.error("Automation Analytics API request failed, trying base auth method")
|
||||
response = self._base_auth_request(request, method, url, rh_user, rh_password, headers)
|
||||
#
|
||||
# Missing or wrong user/pass
|
||||
#
|
||||
if response.status_code == status.HTTP_401_UNAUTHORIZED:
|
||||
text = (response.text or '').rstrip("\n")
|
||||
text = response.get('text', '').rstrip("\n")
|
||||
return self._error_response(ERROR_UNAUTHORIZED, text, remote=True, remote_status_code=response.status_code)
|
||||
#
|
||||
# Not found, No entitlement or No data in Analytics
|
||||
|
||||
@@ -12,7 +12,7 @@ import re
|
||||
import asn1
|
||||
from awx.api import serializers
|
||||
from awx.api.generics import GenericAPIView, Response
|
||||
from awx.api.permissions import IsSystemAdminOrAuditor
|
||||
from awx.api.permissions import IsSystemAdmin
|
||||
from awx.main import models
|
||||
from cryptography import x509
|
||||
from cryptography.hazmat.primitives import hashes, serialization
|
||||
@@ -48,7 +48,7 @@ class InstanceInstallBundle(GenericAPIView):
|
||||
name = _('Install Bundle')
|
||||
model = models.Instance
|
||||
serializer_class = serializers.InstanceSerializer
|
||||
permission_classes = (IsSystemAdminOrAuditor,)
|
||||
permission_classes = (IsSystemAdmin,)
|
||||
|
||||
def get(self, request, *args, **kwargs):
|
||||
instance_obj = self.get_object()
|
||||
|
||||
@@ -15,6 +15,7 @@ from rest_framework.response import Response
|
||||
from rest_framework import status
|
||||
|
||||
from awx.main.constants import ACTIVE_STATES
|
||||
from awx.main.models import Organization
|
||||
from awx.main.utils import get_object_or_400
|
||||
from awx.main.models.ha import Instance, InstanceGroup, schedule_policy_task
|
||||
from awx.main.models.organization import Team
|
||||
@@ -60,6 +61,21 @@ class UnifiedJobDeletionMixin(object):
|
||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||
|
||||
|
||||
class OrganizationInstanceGroupMembershipMixin(object):
|
||||
"""
|
||||
This mixin overloads attach/detach so that it calls Organization.save(),
|
||||
to ensure instance group updates are persisted
|
||||
"""
|
||||
|
||||
def unattach(self, request, *args, **kwargs):
|
||||
with transaction.atomic():
|
||||
organization_queryset = Organization.objects.select_for_update()
|
||||
organization = organization_queryset.get(pk=self.get_parent_object().id)
|
||||
response = super(OrganizationInstanceGroupMembershipMixin, self).unattach(request, *args, **kwargs)
|
||||
organization.save()
|
||||
return response
|
||||
|
||||
|
||||
class InstanceGroupMembershipMixin(object):
|
||||
"""
|
||||
This mixin overloads attach/detach so that it calls InstanceGroup.save(),
|
||||
|
||||
@@ -52,19 +52,16 @@ from awx.api.serializers import (
|
||||
WorkflowJobTemplateSerializer,
|
||||
CredentialSerializer,
|
||||
)
|
||||
from awx.api.views.mixin import RelatedJobsPreventDeleteMixin, OrganizationCountsMixin
|
||||
from awx.api.views import immutablesharedfields
|
||||
from awx.api.views.mixin import RelatedJobsPreventDeleteMixin, OrganizationCountsMixin, OrganizationInstanceGroupMembershipMixin
|
||||
|
||||
logger = logging.getLogger('awx.api.views.organization')
|
||||
|
||||
|
||||
@immutablesharedfields
|
||||
class OrganizationList(OrganizationCountsMixin, ListCreateAPIView):
|
||||
model = Organization
|
||||
serializer_class = OrganizationSerializer
|
||||
|
||||
|
||||
@immutablesharedfields
|
||||
class OrganizationDetail(RelatedJobsPreventDeleteMixin, RetrieveUpdateDestroyAPIView):
|
||||
model = Organization
|
||||
serializer_class = OrganizationSerializer
|
||||
@@ -107,7 +104,6 @@ class OrganizationInventoriesList(SubListAPIView):
|
||||
relationship = 'inventories'
|
||||
|
||||
|
||||
@immutablesharedfields
|
||||
class OrganizationUsersList(BaseUsersList):
|
||||
model = User
|
||||
serializer_class = UserSerializer
|
||||
@@ -116,7 +112,6 @@ class OrganizationUsersList(BaseUsersList):
|
||||
ordering = ('username',)
|
||||
|
||||
|
||||
@immutablesharedfields
|
||||
class OrganizationAdminsList(BaseUsersList):
|
||||
model = User
|
||||
serializer_class = UserSerializer
|
||||
@@ -155,7 +150,6 @@ class OrganizationWorkflowJobTemplatesList(SubListCreateAPIView):
|
||||
parent_key = 'organization'
|
||||
|
||||
|
||||
@immutablesharedfields
|
||||
class OrganizationTeamsList(SubListCreateAttachDetachAPIView):
|
||||
model = Team
|
||||
serializer_class = TeamSerializer
|
||||
@@ -202,7 +196,7 @@ class OrganizationNotificationTemplatesApprovalList(OrganizationNotificationTemp
|
||||
relationship = 'notification_templates_approvals'
|
||||
|
||||
|
||||
class OrganizationInstanceGroupsList(SubListAttachDetachAPIView):
|
||||
class OrganizationInstanceGroupsList(OrganizationInstanceGroupMembershipMixin, SubListAttachDetachAPIView):
|
||||
model = InstanceGroup
|
||||
serializer_class = InstanceGroupSerializer
|
||||
parent_model = Organization
|
||||
|
||||
@@ -8,6 +8,8 @@ import operator
|
||||
from collections import OrderedDict
|
||||
|
||||
from django.conf import settings
|
||||
from django.core.cache import cache
|
||||
from django.db import connection
|
||||
from django.utils.encoding import smart_str
|
||||
from django.utils.decorators import method_decorator
|
||||
from django.views.decorators.csrf import ensure_csrf_cookie
|
||||
@@ -26,12 +28,14 @@ from awx.api.generics import APIView
|
||||
from awx.conf.registry import settings_registry
|
||||
from awx.main.analytics import all_collectors
|
||||
from awx.main.ha import is_ha_environment
|
||||
from awx.main.tasks.system import clear_setting_cache
|
||||
from awx.main.utils import get_awx_version, get_custom_venv_choices
|
||||
from awx.main.utils.licensing import validate_entitlement_manifest
|
||||
from awx.api.versioning import URLPathVersioning, is_optional_api_urlpattern_prefix_request, reverse, drf_reverse
|
||||
from awx.api.versioning import URLPathVersioning, reverse, drf_reverse
|
||||
from awx.main.constants import PRIVILEGE_ESCALATION_METHODS
|
||||
from awx.main.models import Project, Organization, Instance, InstanceGroup, JobTemplate
|
||||
from awx.main.utils import set_environ
|
||||
from awx.main.utils.analytics_proxy import TokenError
|
||||
from awx.main.utils.licensing import get_licenser
|
||||
|
||||
logger = logging.getLogger('awx.api.views.root')
|
||||
@@ -51,8 +55,6 @@ class ApiRootView(APIView):
|
||||
data['description'] = _('AWX REST API')
|
||||
data['current_version'] = v2
|
||||
data['available_versions'] = dict(v2=v2)
|
||||
if not is_optional_api_urlpattern_prefix_request(request):
|
||||
data['oauth2'] = drf_reverse('api:oauth_authorization_root_view')
|
||||
data['custom_logo'] = settings.CUSTOM_LOGO
|
||||
data['custom_login_info'] = settings.CUSTOM_LOGIN_INFO
|
||||
data['login_redirect_override'] = settings.LOGIN_REDIRECT_OVERRIDE
|
||||
@@ -61,20 +63,6 @@ class ApiRootView(APIView):
|
||||
return Response(data)
|
||||
|
||||
|
||||
class ApiOAuthAuthorizationRootView(APIView):
|
||||
permission_classes = (AllowAny,)
|
||||
name = _("API OAuth 2 Authorization Root")
|
||||
versioning_class = None
|
||||
swagger_topic = 'Authentication'
|
||||
|
||||
def get(self, request, format=None):
|
||||
data = OrderedDict()
|
||||
data['authorize'] = drf_reverse('api:authorize')
|
||||
data['token'] = drf_reverse('api:token')
|
||||
data['revoke_token'] = drf_reverse('api:revoke-token')
|
||||
return Response(data)
|
||||
|
||||
|
||||
class ApiVersionRootView(APIView):
|
||||
permission_classes = (AllowAny,)
|
||||
swagger_topic = 'Versioning'
|
||||
@@ -99,8 +87,6 @@ class ApiVersionRootView(APIView):
|
||||
data['credentials'] = reverse('api:credential_list', request=request)
|
||||
data['credential_types'] = reverse('api:credential_type_list', request=request)
|
||||
data['credential_input_sources'] = reverse('api:credential_input_source_list', request=request)
|
||||
data['applications'] = reverse('api:o_auth2_application_list', request=request)
|
||||
data['tokens'] = reverse('api:o_auth2_token_list', request=request)
|
||||
data['metrics'] = reverse('api:metrics_view', request=request)
|
||||
data['inventory'] = reverse('api:inventory_list', request=request)
|
||||
data['constructed_inventory'] = reverse('api:constructed_inventory_list', request=request)
|
||||
@@ -194,19 +180,52 @@ class ApiV2SubscriptionView(APIView):
|
||||
|
||||
def post(self, request):
|
||||
data = request.data.copy()
|
||||
if data.get('subscriptions_password') == '$encrypted$':
|
||||
data['subscriptions_password'] = settings.SUBSCRIPTIONS_PASSWORD
|
||||
|
||||
try:
|
||||
user, pw = data.get('subscriptions_username'), data.get('subscriptions_password')
|
||||
user = None
|
||||
pw = None
|
||||
basic_auth = False
|
||||
# determine if the credentials are for basic auth or not
|
||||
if data.get('subscriptions_client_id'):
|
||||
user, pw = data.get('subscriptions_client_id'), data.get('subscriptions_client_secret')
|
||||
if pw == '$encrypted$':
|
||||
pw = settings.SUBSCRIPTIONS_CLIENT_SECRET
|
||||
elif data.get('subscriptions_username'):
|
||||
user, pw = data.get('subscriptions_username'), data.get('subscriptions_password')
|
||||
if pw == '$encrypted$':
|
||||
pw = settings.SUBSCRIPTIONS_PASSWORD
|
||||
basic_auth = True
|
||||
|
||||
if not user or not pw:
|
||||
return Response({"error": _("Missing subscription credentials")}, status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
with set_environ(**settings.AWX_TASK_ENV):
|
||||
validated = get_licenser().validate_rh(user, pw)
|
||||
if user:
|
||||
settings.SUBSCRIPTIONS_USERNAME = data['subscriptions_username']
|
||||
if pw:
|
||||
settings.SUBSCRIPTIONS_PASSWORD = data['subscriptions_password']
|
||||
validated = get_licenser().validate_rh(user, pw, basic_auth)
|
||||
|
||||
# update settings if the credentials were valid
|
||||
if basic_auth:
|
||||
if user:
|
||||
settings.SUBSCRIPTIONS_USERNAME = user
|
||||
if pw:
|
||||
settings.SUBSCRIPTIONS_PASSWORD = pw
|
||||
# mutual exclusion for basic auth and service account
|
||||
# only one should be set at a given time so that
|
||||
# config/attach/ knows which credentials to use
|
||||
settings.SUBSCRIPTIONS_CLIENT_ID = ""
|
||||
settings.SUBSCRIPTIONS_CLIENT_SECRET = ""
|
||||
else:
|
||||
if user:
|
||||
settings.SUBSCRIPTIONS_CLIENT_ID = user
|
||||
if pw:
|
||||
settings.SUBSCRIPTIONS_CLIENT_SECRET = pw
|
||||
# mutual exclusion for basic auth and service account
|
||||
settings.SUBSCRIPTIONS_USERNAME = ""
|
||||
settings.SUBSCRIPTIONS_PASSWORD = ""
|
||||
except Exception as exc:
|
||||
msg = _("Invalid Subscription")
|
||||
if isinstance(exc, requests.exceptions.HTTPError) and getattr(getattr(exc, 'response', None), 'status_code', None) == 401:
|
||||
if isinstance(exc, TokenError) or (
|
||||
isinstance(exc, requests.exceptions.HTTPError) and getattr(getattr(exc, 'response', None), 'status_code', None) == 401
|
||||
):
|
||||
msg = _("The provided credentials are invalid (HTTP 401).")
|
||||
elif isinstance(exc, requests.exceptions.ProxyError):
|
||||
msg = _("Unable to connect to proxy server.")
|
||||
@@ -233,16 +252,25 @@ class ApiV2AttachView(APIView):
|
||||
|
||||
def post(self, request):
|
||||
data = request.data.copy()
|
||||
pool_id = data.get('pool_id', None)
|
||||
if not pool_id:
|
||||
return Response({"error": _("No subscription pool ID provided.")}, status=status.HTTP_400_BAD_REQUEST)
|
||||
user = getattr(settings, 'SUBSCRIPTIONS_USERNAME', None)
|
||||
pw = getattr(settings, 'SUBSCRIPTIONS_PASSWORD', None)
|
||||
if pool_id and user and pw:
|
||||
subscription_id = data.get('subscription_id', None)
|
||||
if not subscription_id:
|
||||
return Response({"error": _("No subscription ID provided.")}, status=status.HTTP_400_BAD_REQUEST)
|
||||
# Ensure we always use the latest subscription credentials
|
||||
cache.delete_many(['SUBSCRIPTIONS_CLIENT_ID', 'SUBSCRIPTIONS_CLIENT_SECRET', 'SUBSCRIPTIONS_USERNAME', 'SUBSCRIPTIONS_PASSWORD'])
|
||||
user = getattr(settings, 'SUBSCRIPTIONS_CLIENT_ID', None)
|
||||
pw = getattr(settings, 'SUBSCRIPTIONS_CLIENT_SECRET', None)
|
||||
basic_auth = False
|
||||
if not (user and pw):
|
||||
user = getattr(settings, 'SUBSCRIPTIONS_USERNAME', None)
|
||||
pw = getattr(settings, 'SUBSCRIPTIONS_PASSWORD', None)
|
||||
basic_auth = True
|
||||
if not (user and pw):
|
||||
return Response({"error": _("Missing subscription credentials")}, status=status.HTTP_400_BAD_REQUEST)
|
||||
if subscription_id and user and pw:
|
||||
data = request.data.copy()
|
||||
try:
|
||||
with set_environ(**settings.AWX_TASK_ENV):
|
||||
validated = get_licenser().validate_rh(user, pw)
|
||||
validated = get_licenser().validate_rh(user, pw, basic_auth)
|
||||
except Exception as exc:
|
||||
msg = _("Invalid Subscription")
|
||||
if isinstance(exc, requests.exceptions.HTTPError) and getattr(getattr(exc, 'response', None), 'status_code', None) == 401:
|
||||
@@ -256,10 +284,12 @@ class ApiV2AttachView(APIView):
|
||||
else:
|
||||
logger.exception(smart_str(u"Invalid subscription submitted."), extra=dict(actor=request.user.username))
|
||||
return Response({"error": msg}, status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
for sub in validated:
|
||||
if sub['pool_id'] == pool_id:
|
||||
if sub['subscription_id'] == subscription_id:
|
||||
sub['valid_key'] = True
|
||||
settings.LICENSE = sub
|
||||
connection.on_commit(lambda: clear_setting_cache.delay(['LICENSE']))
|
||||
return Response(sub)
|
||||
|
||||
return Response({"error": _("Error processing subscription metadata.")}, status=status.HTTP_400_BAD_REQUEST)
|
||||
@@ -279,7 +309,6 @@ class ApiV2ConfigView(APIView):
|
||||
'''Return various sitewide configuration settings'''
|
||||
|
||||
license_data = get_licenser().validate()
|
||||
|
||||
if not license_data.get('valid_key', False):
|
||||
license_data = {}
|
||||
|
||||
@@ -295,15 +324,6 @@ class ApiV2ConfigView(APIView):
|
||||
become_methods=PRIVILEGE_ESCALATION_METHODS,
|
||||
)
|
||||
|
||||
# If LDAP is enabled, user_ldap_fields will return a list of field
|
||||
# names that are managed by LDAP and should be read-only for users with
|
||||
# a non-empty ldap_dn attribute.
|
||||
if getattr(settings, 'AUTH_LDAP_SERVER_URI', None):
|
||||
user_ldap_fields = ['username', 'password']
|
||||
user_ldap_fields.extend(getattr(settings, 'AUTH_LDAP_USER_ATTR_MAP', {}).keys())
|
||||
user_ldap_fields.extend(getattr(settings, 'AUTH_LDAP_USER_FLAGS_BY_GROUP', {}).keys())
|
||||
data['user_ldap_fields'] = user_ldap_fields
|
||||
|
||||
if (
|
||||
request.user.is_superuser
|
||||
or request.user.is_system_auditor
|
||||
@@ -352,6 +372,7 @@ class ApiV2ConfigView(APIView):
|
||||
|
||||
try:
|
||||
license_data_validated = get_licenser().license_from_manifest(license_data)
|
||||
connection.on_commit(lambda: clear_setting_cache.delay(['LICENSE']))
|
||||
except Exception:
|
||||
logger.warning(smart_str(u"Invalid subscription submitted."), extra=dict(actor=request.user.username))
|
||||
return Response({"error": _("Invalid License")}, status=status.HTTP_400_BAD_REQUEST)
|
||||
@@ -370,6 +391,7 @@ class ApiV2ConfigView(APIView):
|
||||
def delete(self, request):
|
||||
try:
|
||||
settings.LICENSE = {}
|
||||
connection.on_commit(lambda: clear_setting_cache.delay(['LICENSE']))
|
||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||
except Exception:
|
||||
# FIX: Log
|
||||
|
||||
@@ -10,7 +10,7 @@ from django.core.validators import URLValidator, _lazy_re_compile
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
|
||||
# Django REST Framework
|
||||
from rest_framework.fields import BooleanField, CharField, ChoiceField, DictField, DateTimeField, EmailField, IntegerField, ListField # noqa
|
||||
from rest_framework.fields import BooleanField, CharField, ChoiceField, DictField, DateTimeField, EmailField, IntegerField, ListField, FloatField # noqa
|
||||
from rest_framework.serializers import PrimaryKeyRelatedField # noqa
|
||||
|
||||
# AWX
|
||||
@@ -207,7 +207,8 @@ class URLField(CharField):
|
||||
if self.allow_plain_hostname:
|
||||
try:
|
||||
url_parts = urlparse.urlsplit(value)
|
||||
if url_parts.hostname and '.' not in url_parts.hostname:
|
||||
looks_like_ipv6 = bool(url_parts.netloc and url_parts.netloc.startswith('[') and url_parts.netloc.endswith(']'))
|
||||
if not looks_like_ipv6 and url_parts.hostname and '.' not in url_parts.hostname:
|
||||
netloc = '{}.local'.format(url_parts.hostname)
|
||||
if url_parts.port:
|
||||
netloc = '{}:{}'.format(netloc, url_parts.port)
|
||||
|
||||
@@ -1,13 +1,11 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
from __future__ import unicode_literals
|
||||
|
||||
# AWX
|
||||
from awx.conf.migrations._ldap_group_type import fill_ldap_group_type_params
|
||||
|
||||
from django.db import migrations
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [('conf', '0005_v330_rename_two_session_settings')]
|
||||
|
||||
operations = [migrations.RunPython(fill_ldap_group_type_params)]
|
||||
# this migration is doing nothing, and is here to preserve migrations files integrity
|
||||
operations = []
|
||||
|
||||
115
awx/conf/migrations/0011_remove_ldap_auth_conf.py
Normal file
115
awx/conf/migrations/0011_remove_ldap_auth_conf.py
Normal file
@@ -0,0 +1,115 @@
|
||||
from django.db import migrations
|
||||
|
||||
LDAP_AUTH_CONF_KEYS = [
|
||||
'AUTH_LDAP_SERVER_URI',
|
||||
'AUTH_LDAP_BIND_DN',
|
||||
'AUTH_LDAP_BIND_PASSWORD',
|
||||
'AUTH_LDAP_START_TLS',
|
||||
'AUTH_LDAP_CONNECTION_OPTIONS',
|
||||
'AUTH_LDAP_USER_SEARCH',
|
||||
'AUTH_LDAP_USER_DN_TEMPLATE',
|
||||
'AUTH_LDAP_USER_ATTR_MAP',
|
||||
'AUTH_LDAP_GROUP_SEARCH',
|
||||
'AUTH_LDAP_GROUP_TYPE',
|
||||
'AUTH_LDAP_GROUP_TYPE_PARAMS',
|
||||
'AUTH_LDAP_REQUIRE_GROUP',
|
||||
'AUTH_LDAP_DENY_GROUP',
|
||||
'AUTH_LDAP_USER_FLAGS_BY_GROUP',
|
||||
'AUTH_LDAP_ORGANIZATION_MAP',
|
||||
'AUTH_LDAP_TEAM_MAP',
|
||||
'AUTH_LDAP_1_SERVER_URI',
|
||||
'AUTH_LDAP_1_BIND_DN',
|
||||
'AUTH_LDAP_1_BIND_PASSWORD',
|
||||
'AUTH_LDAP_1_START_TLS',
|
||||
'AUTH_LDAP_1_CONNECTION_OPTIONS',
|
||||
'AUTH_LDAP_1_USER_SEARCH',
|
||||
'AUTH_LDAP_1_USER_DN_TEMPLATE',
|
||||
'AUTH_LDAP_1_USER_ATTR_MAP',
|
||||
'AUTH_LDAP_1_GROUP_SEARCH',
|
||||
'AUTH_LDAP_1_GROUP_TYPE',
|
||||
'AUTH_LDAP_1_GROUP_TYPE_PARAMS',
|
||||
'AUTH_LDAP_1_REQUIRE_GROUP',
|
||||
'AUTH_LDAP_1_DENY_GROUP',
|
||||
'AUTH_LDAP_1_USER_FLAGS_BY_GROUP',
|
||||
'AUTH_LDAP_1_ORGANIZATION_MAP',
|
||||
'AUTH_LDAP_1_TEAM_MAP',
|
||||
'AUTH_LDAP_2_SERVER_URI',
|
||||
'AUTH_LDAP_2_BIND_DN',
|
||||
'AUTH_LDAP_2_BIND_PASSWORD',
|
||||
'AUTH_LDAP_2_START_TLS',
|
||||
'AUTH_LDAP_2_CONNECTION_OPTIONS',
|
||||
'AUTH_LDAP_2_USER_SEARCH',
|
||||
'AUTH_LDAP_2_USER_DN_TEMPLATE',
|
||||
'AUTH_LDAP_2_USER_ATTR_MAP',
|
||||
'AUTH_LDAP_2_GROUP_SEARCH',
|
||||
'AUTH_LDAP_2_GROUP_TYPE',
|
||||
'AUTH_LDAP_2_GROUP_TYPE_PARAMS',
|
||||
'AUTH_LDAP_2_REQUIRE_GROUP',
|
||||
'AUTH_LDAP_2_DENY_GROUP',
|
||||
'AUTH_LDAP_2_USER_FLAGS_BY_GROUP',
|
||||
'AUTH_LDAP_2_ORGANIZATION_MAP',
|
||||
'AUTH_LDAP_2_TEAM_MAP',
|
||||
'AUTH_LDAP_3_SERVER_URI',
|
||||
'AUTH_LDAP_3_BIND_DN',
|
||||
'AUTH_LDAP_3_BIND_PASSWORD',
|
||||
'AUTH_LDAP_3_START_TLS',
|
||||
'AUTH_LDAP_3_CONNECTION_OPTIONS',
|
||||
'AUTH_LDAP_3_USER_SEARCH',
|
||||
'AUTH_LDAP_3_USER_DN_TEMPLATE',
|
||||
'AUTH_LDAP_3_USER_ATTR_MAP',
|
||||
'AUTH_LDAP_3_GROUP_SEARCH',
|
||||
'AUTH_LDAP_3_GROUP_TYPE',
|
||||
'AUTH_LDAP_3_GROUP_TYPE_PARAMS',
|
||||
'AUTH_LDAP_3_REQUIRE_GROUP',
|
||||
'AUTH_LDAP_3_DENY_GROUP',
|
||||
'AUTH_LDAP_3_USER_FLAGS_BY_GROUP',
|
||||
'AUTH_LDAP_3_ORGANIZATION_MAP',
|
||||
'AUTH_LDAP_3_TEAM_MAP',
|
||||
'AUTH_LDAP_4_SERVER_URI',
|
||||
'AUTH_LDAP_4_BIND_DN',
|
||||
'AUTH_LDAP_4_BIND_PASSWORD',
|
||||
'AUTH_LDAP_4_START_TLS',
|
||||
'AUTH_LDAP_4_CONNECTION_OPTIONS',
|
||||
'AUTH_LDAP_4_USER_SEARCH',
|
||||
'AUTH_LDAP_4_USER_DN_TEMPLATE',
|
||||
'AUTH_LDAP_4_USER_ATTR_MAP',
|
||||
'AUTH_LDAP_4_GROUP_SEARCH',
|
||||
'AUTH_LDAP_4_GROUP_TYPE',
|
||||
'AUTH_LDAP_4_GROUP_TYPE_PARAMS',
|
||||
'AUTH_LDAP_4_REQUIRE_GROUP',
|
||||
'AUTH_LDAP_4_DENY_GROUP',
|
||||
'AUTH_LDAP_4_USER_FLAGS_BY_GROUP',
|
||||
'AUTH_LDAP_4_ORGANIZATION_MAP',
|
||||
'AUTH_LDAP_4_TEAM_MAP',
|
||||
'AUTH_LDAP_5_SERVER_URI',
|
||||
'AUTH_LDAP_5_BIND_DN',
|
||||
'AUTH_LDAP_5_BIND_PASSWORD',
|
||||
'AUTH_LDAP_5_START_TLS',
|
||||
'AUTH_LDAP_5_CONNECTION_OPTIONS',
|
||||
'AUTH_LDAP_5_USER_SEARCH',
|
||||
'AUTH_LDAP_5_USER_DN_TEMPLATE',
|
||||
'AUTH_LDAP_5_USER_ATTR_MAP',
|
||||
'AUTH_LDAP_5_GROUP_SEARCH',
|
||||
'AUTH_LDAP_5_GROUP_TYPE',
|
||||
'AUTH_LDAP_5_GROUP_TYPE_PARAMS',
|
||||
'AUTH_LDAP_5_REQUIRE_GROUP',
|
||||
'AUTH_LDAP_5_DENY_GROUP',
|
||||
'AUTH_LDAP_5_USER_FLAGS_BY_GROUP',
|
||||
'AUTH_LDAP_5_ORGANIZATION_MAP',
|
||||
'AUTH_LDAP_5_TEAM_MAP',
|
||||
]
|
||||
|
||||
|
||||
def remove_ldap_auth_conf(apps, scheme_editor):
|
||||
setting = apps.get_model('conf', 'Setting')
|
||||
setting.objects.filter(key__in=LDAP_AUTH_CONF_KEYS).delete()
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
('conf', '0010_change_to_JSONField'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RunPython(remove_ldap_auth_conf),
|
||||
]
|
||||
20
awx/conf/migrations/0012_remove_oidc_auth_conf.py
Normal file
20
awx/conf/migrations/0012_remove_oidc_auth_conf.py
Normal file
@@ -0,0 +1,20 @@
|
||||
# Generated by Django 4.2.10 on 2024-08-27 19:31
|
||||
|
||||
from django.db import migrations
|
||||
|
||||
OIDC_AUTH_CONF_KEYS = ['SOCIAL_AUTH_OIDC_KEY', 'SOCIAL_AUTH_OIDC_SECRET', 'SOCIAL_AUTH_OIDC_OIDC_ENDPOINT', 'SOCIAL_AUTH_OIDC_VERIFY_SSL']
|
||||
|
||||
|
||||
def remove_oidc_auth_conf(apps, scheme_editor):
|
||||
setting = apps.get_model('conf', 'Setting')
|
||||
setting.objects.filter(key__in=OIDC_AUTH_CONF_KEYS).delete()
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
('conf', '0011_remove_ldap_auth_conf'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RunPython(remove_oidc_auth_conf),
|
||||
]
|
||||
22
awx/conf/migrations/0013_remove_radius_auth_conf.py
Normal file
22
awx/conf/migrations/0013_remove_radius_auth_conf.py
Normal file
@@ -0,0 +1,22 @@
|
||||
from django.db import migrations
|
||||
|
||||
RADIUS_AUTH_CONF_KEYS = [
|
||||
'RADIUS_SERVER',
|
||||
'RADIUS_PORT',
|
||||
'RADIUS_SECRET',
|
||||
]
|
||||
|
||||
|
||||
def remove_radius_auth_conf(apps, scheme_editor):
|
||||
setting = apps.get_model('conf', 'Setting')
|
||||
setting.objects.filter(key__in=RADIUS_AUTH_CONF_KEYS).delete()
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
('conf', '0012_remove_oidc_auth_conf'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RunPython(remove_radius_auth_conf),
|
||||
]
|
||||
39
awx/conf/migrations/0014_remove_saml_auth_conf.py
Normal file
39
awx/conf/migrations/0014_remove_saml_auth_conf.py
Normal file
@@ -0,0 +1,39 @@
|
||||
# Generated by Django 4.2.10 on 2024-08-27 14:20
|
||||
|
||||
from django.db import migrations
|
||||
|
||||
SAML_AUTH_CONF_KEYS = [
|
||||
'SAML_AUTO_CREATE_OBJECTS',
|
||||
'SOCIAL_AUTH_SAML_CALLBACK_URL',
|
||||
'SOCIAL_AUTH_SAML_METADATA_URL',
|
||||
'SOCIAL_AUTH_SAML_SP_ENTITY_ID',
|
||||
'SOCIAL_AUTH_SAML_SP_PUBLIC_CERT',
|
||||
'SOCIAL_AUTH_SAML_SP_PRIVATE_KEY',
|
||||
'SOCIAL_AUTH_SAML_ORG_INFO',
|
||||
'SOCIAL_AUTH_SAML_TECHNICAL_CONTACT',
|
||||
'SOCIAL_AUTH_SAML_SUPPORT_CONTACT',
|
||||
'SOCIAL_AUTH_SAML_ENABLED_IDPS',
|
||||
'SOCIAL_AUTH_SAML_SECURITY_CONFIG',
|
||||
'SOCIAL_AUTH_SAML_SP_EXTRA',
|
||||
'SOCIAL_AUTH_SAML_EXTRA_DATA',
|
||||
'SOCIAL_AUTH_SAML_ORGANIZATION_MAP',
|
||||
'SOCIAL_AUTH_SAML_TEAM_MAP',
|
||||
'SOCIAL_AUTH_SAML_ORGANIZATION_ATTR',
|
||||
'SOCIAL_AUTH_SAML_TEAM_ATTR',
|
||||
'SOCIAL_AUTH_SAML_USER_FLAGS_BY_ATTR',
|
||||
]
|
||||
|
||||
|
||||
def remove_saml_auth_conf(apps, scheme_editor):
|
||||
setting = apps.get_model('conf', 'Setting')
|
||||
setting.objects.filter(key__in=SAML_AUTH_CONF_KEYS).delete()
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
('conf', '0013_remove_radius_auth_conf'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RunPython(remove_saml_auth_conf),
|
||||
]
|
||||
81
awx/conf/migrations/0015_remove_social_oauth_conf.py
Normal file
81
awx/conf/migrations/0015_remove_social_oauth_conf.py
Normal file
@@ -0,0 +1,81 @@
|
||||
# Generated by Django 4.2.10 on 2024-08-13 11:14
|
||||
|
||||
from django.db import migrations
|
||||
|
||||
SOCIAL_OAUTH_CONF_KEYS = [
|
||||
# MICROSOFT AZURE ACTIVE DIRECTORY SETTINGS
|
||||
'SOCIAL_AUTH_AZUREAD_OAUTH2_CALLBACK_URL',
|
||||
'SOCIAL_AUTH_AZUREAD_OAUTH2_KEY',
|
||||
'SOCIAL_AUTH_AZUREAD_OAUTH2_SECRET',
|
||||
'SOCIAL_AUTH_AZUREAD_OAUTH2_ORGANIZATION_MAP',
|
||||
'SOCIAL_AUTH_AZUREAD_OAUTH2_TEAM_MAP',
|
||||
# GOOGLE OAUTH2 AUTHENTICATION SETTINGS
|
||||
'SOCIAL_AUTH_GOOGLE_OAUTH2_CALLBACK_URL',
|
||||
'SOCIAL_AUTH_GOOGLE_OAUTH2_KEY',
|
||||
'SOCIAL_AUTH_GOOGLE_OAUTH2_SECRET',
|
||||
'SOCIAL_AUTH_GOOGLE_OAUTH2_WHITELISTED_DOMAINS',
|
||||
'SOCIAL_AUTH_GOOGLE_OAUTH2_AUTH_EXTRA_ARGUMENTS',
|
||||
'SOCIAL_AUTH_GOOGLE_OAUTH2_ORGANIZATION_MAP',
|
||||
'SOCIAL_AUTH_GOOGLE_OAUTH2_TEAM_MAP',
|
||||
# GITHUB OAUTH2 AUTHENTICATION SETTINGS
|
||||
'SOCIAL_AUTH_GITHUB_CALLBACK_URL',
|
||||
'SOCIAL_AUTH_GITHUB_KEY',
|
||||
'SOCIAL_AUTH_GITHUB_SECRET',
|
||||
'SOCIAL_AUTH_GITHUB_ORGANIZATION_MAP',
|
||||
'SOCIAL_AUTH_GITHUB_TEAM_MAP',
|
||||
# GITHUB ORG OAUTH2 AUTHENTICATION SETTINGS
|
||||
'SOCIAL_AUTH_GITHUB_ORG_CALLBACK_URL',
|
||||
'SOCIAL_AUTH_GITHUB_ORG_KEY',
|
||||
'SOCIAL_AUTH_GITHUB_ORG_SECRET',
|
||||
'SOCIAL_AUTH_GITHUB_ORG_NAME',
|
||||
'SOCIAL_AUTH_GITHUB_ORG_ORGANIZATION_MAP',
|
||||
'SOCIAL_AUTH_GITHUB_ORG_TEAM_MAP',
|
||||
# GITHUB TEAM OAUTH2 AUTHENTICATION SETTINGS
|
||||
'SOCIAL_AUTH_GITHUB_TEAM_CALLBACK_URL',
|
||||
'SOCIAL_AUTH_GITHUB_TEAM_KEY',
|
||||
'SOCIAL_AUTH_GITHUB_TEAM_SECRET',
|
||||
'SOCIAL_AUTH_GITHUB_TEAM_ID',
|
||||
'SOCIAL_AUTH_GITHUB_TEAM_ORGANIZATION_MAP',
|
||||
'SOCIAL_AUTH_GITHUB_TEAM_TEAM_MAP',
|
||||
# GITHUB ENTERPRISE OAUTH2 AUTHENTICATION SETTINGS
|
||||
'SOCIAL_AUTH_GITHUB_ENTERPRISE_CALLBACK_URL',
|
||||
'SOCIAL_AUTH_GITHUB_ENTERPRISE_URL',
|
||||
'SOCIAL_AUTH_GITHUB_ENTERPRISE_API_URL',
|
||||
'SOCIAL_AUTH_GITHUB_ENTERPRISE_KEY',
|
||||
'SOCIAL_AUTH_GITHUB_ENTERPRISE_SECRET',
|
||||
'SOCIAL_AUTH_GITHUB_ENTERPRISE_ORGANIZATION_MAP',
|
||||
'SOCIAL_AUTH_GITHUB_ENTERPRISE_TEAM_MAP',
|
||||
# GITHUB ENTERPRISE ORG OAUTH2 AUTHENTICATION SETTINGS
|
||||
'SOCIAL_AUTH_GITHUB_ENTERPRISE_ORG_CALLBACK_URL',
|
||||
'SOCIAL_AUTH_GITHUB_ENTERPRISE_ORG_URL',
|
||||
'SOCIAL_AUTH_GITHUB_ENTERPRISE_ORG_API_URL',
|
||||
'SOCIAL_AUTH_GITHUB_ENTERPRISE_ORG_KEY',
|
||||
'SOCIAL_AUTH_GITHUB_ENTERPRISE_ORG_SECRET',
|
||||
'SOCIAL_AUTH_GITHUB_ENTERPRISE_ORG_NAME',
|
||||
'SOCIAL_AUTH_GITHUB_ENTERPRISE_ORG_ORGANIZATION_MAP',
|
||||
'SOCIAL_AUTH_GITHUB_ENTERPRISE_ORG_TEAM_MAP',
|
||||
# GITHUB ENTERPRISE TEAM OAUTH2 AUTHENTICATION SETTINGS
|
||||
'SOCIAL_AUTH_GITHUB_ENTERPRISE_TEAM_CALLBACK_URL',
|
||||
'SOCIAL_AUTH_GITHUB_ENTERPRISE_TEAM_URL',
|
||||
'SOCIAL_AUTH_GITHUB_ENTERPRISE_TEAM_API_URL',
|
||||
'SOCIAL_AUTH_GITHUB_ENTERPRISE_TEAM_KEY',
|
||||
'SOCIAL_AUTH_GITHUB_ENTERPRISE_TEAM_SECRET',
|
||||
'SOCIAL_AUTH_GITHUB_ENTERPRISE_TEAM_ID',
|
||||
'SOCIAL_AUTH_GITHUB_ENTERPRISE_TEAM_ORGANIZATION_MAP',
|
||||
'SOCIAL_AUTH_GITHUB_ENTERPRISE_TEAM_TEAM_MAP',
|
||||
]
|
||||
|
||||
|
||||
def remove_social_oauth_conf(apps, scheme_editor):
|
||||
setting = apps.get_model('conf', 'Setting')
|
||||
setting.objects.filter(key__in=SOCIAL_OAUTH_CONF_KEYS).delete()
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
('conf', '0014_remove_saml_auth_conf'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RunPython(remove_social_oauth_conf),
|
||||
]
|
||||
25
awx/conf/migrations/0016_remove_tacacs_plus_auth_conf.py
Normal file
25
awx/conf/migrations/0016_remove_tacacs_plus_auth_conf.py
Normal file
@@ -0,0 +1,25 @@
|
||||
from django.db import migrations
|
||||
|
||||
TACACS_PLUS_AUTH_CONF_KEYS = [
|
||||
'TACACSPLUS_HOST',
|
||||
'TACACSPLUS_PORT',
|
||||
'TACACSPLUS_SECRET',
|
||||
'TACACSPLUS_SESSION_TIMEOUT',
|
||||
'TACACSPLUS_AUTH_PROTOCOL',
|
||||
'TACACSPLUS_REM_ADDR',
|
||||
]
|
||||
|
||||
|
||||
def remove_tacacs_plus_auth_conf(apps, scheme_editor):
|
||||
setting = apps.get_model('conf', 'Setting')
|
||||
setting.objects.filter(key__in=TACACS_PLUS_AUTH_CONF_KEYS).delete()
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
('conf', '0015_remove_social_oauth_conf'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RunPython(remove_tacacs_plus_auth_conf),
|
||||
]
|
||||
@@ -1,31 +0,0 @@
|
||||
import inspect
|
||||
|
||||
from django.conf import settings
|
||||
|
||||
import logging
|
||||
|
||||
|
||||
logger = logging.getLogger('awx.conf.migrations')
|
||||
|
||||
|
||||
def fill_ldap_group_type_params(apps, schema_editor):
|
||||
group_type = getattr(settings, 'AUTH_LDAP_GROUP_TYPE', None)
|
||||
Setting = apps.get_model('conf', 'Setting')
|
||||
|
||||
group_type_params = {'name_attr': 'cn', 'member_attr': 'member'}
|
||||
qs = Setting.objects.filter(key='AUTH_LDAP_GROUP_TYPE_PARAMS')
|
||||
entry = None
|
||||
if qs.exists():
|
||||
entry = qs[0]
|
||||
group_type_params = entry.value
|
||||
else:
|
||||
return # for new installs we prefer to use the default value
|
||||
|
||||
init_attrs = set(inspect.getfullargspec(group_type.__init__).args[1:])
|
||||
for k in list(group_type_params.keys()):
|
||||
if k not in init_attrs:
|
||||
del group_type_params[k]
|
||||
|
||||
entry.value = group_type_params
|
||||
logger.warning(f'Migration updating AUTH_LDAP_GROUP_TYPE_PARAMS with value {entry.value}')
|
||||
entry.save()
|
||||
@@ -27,5 +27,5 @@ def _migrate_setting(apps, old_key, new_key, encrypted=False):
|
||||
|
||||
|
||||
def prefill_rh_credentials(apps, schema_editor):
|
||||
_migrate_setting(apps, 'REDHAT_USERNAME', 'SUBSCRIPTIONS_USERNAME', encrypted=False)
|
||||
_migrate_setting(apps, 'REDHAT_PASSWORD', 'SUBSCRIPTIONS_PASSWORD', encrypted=True)
|
||||
_migrate_setting(apps, 'REDHAT_USERNAME', 'SUBSCRIPTIONS_CLIENT_ID', encrypted=False)
|
||||
_migrate_setting(apps, 'REDHAT_PASSWORD', 'SUBSCRIPTIONS_CLIENT_SECRET', encrypted=True)
|
||||
|
||||
@@ -38,6 +38,7 @@ class SettingsRegistry(object):
|
||||
if setting in self._registry:
|
||||
raise ImproperlyConfigured('Setting "{}" is already registered.'.format(setting))
|
||||
category = kwargs.setdefault('category', None)
|
||||
kwargs.setdefault('required', False) # No setting is ordinarily required
|
||||
category_slug = kwargs.setdefault('category_slug', slugify(category or '') or None)
|
||||
if category_slug in {'all', 'changed', 'user-defaults'}:
|
||||
raise ImproperlyConfigured('"{}" is a reserved category slug.'.format(category_slug))
|
||||
|
||||
@@ -97,10 +97,13 @@ def _ctit_db_wrapper(trans_safe=False):
|
||||
except DatabaseError as e:
|
||||
if trans_safe:
|
||||
cause = e.__cause__
|
||||
if cause and hasattr(cause, 'sqlstate'):
|
||||
sqlstate = getattr(cause, 'sqlstate', None)
|
||||
if cause and sqlstate:
|
||||
sqlstate = cause.sqlstate
|
||||
sqlstate_str = psycopg.errors.lookup(sqlstate)
|
||||
logger.error('SQL Error state: {} - {}'.format(sqlstate, sqlstate_str))
|
||||
else:
|
||||
logger.error(f'Error reading something related to database settings: {str(e)}.')
|
||||
else:
|
||||
logger.exception('Error modifying something related to database settings.')
|
||||
finally:
|
||||
|
||||
@@ -61,18 +61,3 @@ def on_post_delete_setting(sender, **kwargs):
|
||||
key = getattr(instance, '_saved_key_', None)
|
||||
if key:
|
||||
handle_setting_change(key, True)
|
||||
|
||||
|
||||
@receiver(setting_changed)
|
||||
def disable_local_auth(**kwargs):
|
||||
if (kwargs['setting'], kwargs['value']) == ('DISABLE_LOCAL_AUTH', True):
|
||||
from django.contrib.auth.models import User
|
||||
from oauth2_provider.models import RefreshToken
|
||||
from awx.main.models.oauth import OAuth2AccessToken
|
||||
from awx.main.management.commands.revoke_oauth2_tokens import revoke_tokens
|
||||
|
||||
logger.warning("Triggering token invalidation for local users.")
|
||||
|
||||
qs = User.objects.filter(profile__ldap_dn='', enterprise_auth__isnull=True, social_auth__isnull=True)
|
||||
revoke_tokens(RefreshToken.objects.filter(revoked=None, user__in=qs))
|
||||
revoke_tokens(OAuth2AccessToken.objects.filter(user__in=qs))
|
||||
|
||||
@@ -8,7 +8,6 @@ from awx.main.utils.encryption import decrypt_field
|
||||
from awx.conf import fields
|
||||
from awx.conf.registry import settings_registry
|
||||
from awx.conf.models import Setting
|
||||
from awx.sso import fields as sso_fields
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
@@ -103,24 +102,6 @@ def test_setting_singleton_update(api_request, dummy_setting):
|
||||
assert response.data['FOO_BAR'] == 4
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_setting_singleton_update_hybriddictfield_with_forbidden(api_request, dummy_setting):
|
||||
# Some HybridDictField subclasses have a child of _Forbidden,
|
||||
# indicating that only the defined fields can be filled in. Make
|
||||
# sure that the _Forbidden validator doesn't get used for the
|
||||
# fields. See also https://github.com/ansible/awx/issues/4099.
|
||||
with dummy_setting('FOO_BAR', field_class=sso_fields.SAMLOrgAttrField, category='FooBar', category_slug='foobar'), mock.patch(
|
||||
'awx.conf.views.clear_setting_cache'
|
||||
):
|
||||
api_request(
|
||||
'patch',
|
||||
reverse('api:setting_singleton_detail', kwargs={'category_slug': 'foobar'}),
|
||||
data={'FOO_BAR': {'saml_admin_attr': 'Admins', 'saml_attr': 'Orgs'}},
|
||||
)
|
||||
response = api_request('get', reverse('api:setting_singleton_detail', kwargs={'category_slug': 'foobar'}))
|
||||
assert response.data['FOO_BAR'] == {'saml_admin_attr': 'Admins', 'saml_attr': 'Orgs'}
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_setting_singleton_update_dont_change_readonly_fields(api_request, dummy_setting):
|
||||
with dummy_setting('FOO_BAR', field_class=fields.IntegerField, read_only=True, default=4, category='FooBar', category_slug='foobar'), mock.patch(
|
||||
|
||||
@@ -1,25 +0,0 @@
|
||||
import pytest
|
||||
|
||||
from awx.conf.migrations._ldap_group_type import fill_ldap_group_type_params
|
||||
from awx.conf.models import Setting
|
||||
|
||||
from django.apps import apps
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_fill_group_type_params_no_op():
|
||||
fill_ldap_group_type_params(apps, 'dont-use-me')
|
||||
assert Setting.objects.count() == 0
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_keep_old_setting_with_default_value():
|
||||
Setting.objects.create(key='AUTH_LDAP_GROUP_TYPE', value={'name_attr': 'cn', 'member_attr': 'member'})
|
||||
fill_ldap_group_type_params(apps, 'dont-use-me')
|
||||
assert Setting.objects.count() == 1
|
||||
s = Setting.objects.first()
|
||||
assert s.value == {'name_attr': 'cn', 'member_attr': 'member'}
|
||||
|
||||
|
||||
# NOTE: would be good to test the removal of attributes by migration
|
||||
# but this requires fighting with the validator and is not done here
|
||||
@@ -111,7 +111,6 @@ class TestURLField:
|
||||
@pytest.mark.parametrize(
|
||||
"url,schemes,regex, allow_numbers_in_top_level_domain, expect_no_error",
|
||||
[
|
||||
("ldap://www.example.org42", "ldap", None, True, True),
|
||||
("https://www.example.org42", "https", None, False, False),
|
||||
("https://www.example.org", None, regex, None, True),
|
||||
("https://www.example3.org", None, regex, None, False),
|
||||
@@ -129,3 +128,41 @@ class TestURLField:
|
||||
else:
|
||||
with pytest.raises(ValidationError):
|
||||
field.run_validators(url)
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"url, expect_error",
|
||||
[
|
||||
("https://[1:2:3]", True),
|
||||
("http://[1:2:3]", True),
|
||||
("https://[2001:db8:3333:4444:5555:6666:7777:8888", True),
|
||||
("https://2001:db8:3333:4444:5555:6666:7777:8888", True),
|
||||
("https://[2001:db8:3333:4444:5555:6666:7777:8888]", False),
|
||||
("https://[::1]", False),
|
||||
("https://[::]", False),
|
||||
("https://[2001:db8::1]", False),
|
||||
("https://[2001:db8:0:0:0:0:1:1]", False),
|
||||
("https://[fe80::2%eth0]", True), # ipv6 scope identifier
|
||||
("https://[fe80:0:0:0:200:f8ff:fe21:67cf]", False),
|
||||
("https://[::ffff:192.168.1.10]", False),
|
||||
("https://[0:0:0:0:0:ffff:c000:0201]", False),
|
||||
("https://[2001:0db8:000a:0001:0000:0000:0000:0000]", False),
|
||||
("https://[2001:db8:a:1::]", False),
|
||||
("https://[ff02::1]", False),
|
||||
("https://[ff02:0:0:0:0:0:0:1]", False),
|
||||
("https://[fc00::1]", False),
|
||||
("https://[fd12:3456:789a:1::1]", False),
|
||||
("https://[2001:db8::abcd:ef12:3456:7890]", False),
|
||||
("https://[2001:db8:0000:abcd:0000:ef12:0000:3456]", False),
|
||||
("https://[::ffff:10.0.0.1]", False),
|
||||
("https://[2001:db8:cafe::]", False),
|
||||
("https://[2001:db8:cafe:0:0:0:0:0]", False),
|
||||
("https://[fe80::210:f3ff:fedf:4567%3]", True), # ipv6 scope identifier, numerical interface
|
||||
],
|
||||
)
|
||||
def test_ipv6_urls(self, url, expect_error):
|
||||
field = URLField()
|
||||
if expect_error:
|
||||
with pytest.raises(ValidationError, match="Enter a valid URL"):
|
||||
field.run_validators(url)
|
||||
else:
|
||||
field.run_validators(url)
|
||||
|
||||
@@ -17,9 +17,6 @@ from django.core.exceptions import ObjectDoesNotExist, FieldDoesNotExist
|
||||
# Django REST Framework
|
||||
from rest_framework.exceptions import ParseError, PermissionDenied
|
||||
|
||||
# Django OAuth Toolkit
|
||||
from awx.main.models.oauth import OAuth2Application, OAuth2AccessToken
|
||||
|
||||
# django-ansible-base
|
||||
from ansible_base.lib.utils.validation import to_python_boolean
|
||||
from ansible_base.rbac.models import RoleEvaluation
|
||||
@@ -441,10 +438,7 @@ class BaseAccess(object):
|
||||
|
||||
# Actions not possible for reason unrelated to RBAC
|
||||
# Cannot copy with validation errors, or update a manual group/project
|
||||
if 'write' not in getattr(self.user, 'oauth_scopes', ['write']):
|
||||
user_capabilities[display_method] = False # Read tokens cannot take any actions
|
||||
continue
|
||||
elif display_method in ['copy', 'start', 'schedule'] and isinstance(obj, JobTemplate):
|
||||
if display_method in ['copy', 'start', 'schedule'] and isinstance(obj, JobTemplate):
|
||||
if obj.validation_errors:
|
||||
user_capabilities[display_method] = False
|
||||
continue
|
||||
@@ -642,13 +636,12 @@ class UserAccess(BaseAccess):
|
||||
"""
|
||||
|
||||
model = User
|
||||
prefetch_related = (
|
||||
'profile',
|
||||
'resource',
|
||||
)
|
||||
prefetch_related = ('resource',)
|
||||
|
||||
def filtered_queryset(self):
|
||||
if settings.ORG_ADMINS_CAN_SEE_ALL_USERS and (self.user.admin_of_organizations.exists() or self.user.auditor_of_organizations.exists()):
|
||||
if settings.ORG_ADMINS_CAN_SEE_ALL_USERS and (
|
||||
Organization.access_qs(self.user, 'change').exists() or Organization.access_qs(self.user, 'audit').exists()
|
||||
):
|
||||
qs = User.objects.all()
|
||||
else:
|
||||
qs = (
|
||||
@@ -756,82 +749,6 @@ class UserAccess(BaseAccess):
|
||||
return False
|
||||
|
||||
|
||||
class OAuth2ApplicationAccess(BaseAccess):
|
||||
"""
|
||||
I can read, change or delete OAuth 2 applications when:
|
||||
- I am a superuser.
|
||||
- I am the admin of the organization of the user of the application.
|
||||
- I am a user in the organization of the application.
|
||||
I can create OAuth 2 applications when:
|
||||
- I am a superuser.
|
||||
- I am the admin of the organization of the application.
|
||||
"""
|
||||
|
||||
model = OAuth2Application
|
||||
select_related = ('user',)
|
||||
prefetch_related = ('organization', 'oauth2accesstoken_set')
|
||||
|
||||
def filtered_queryset(self):
|
||||
org_access_qs = Organization.access_qs(self.user, 'member')
|
||||
return self.model.objects.filter(organization__in=org_access_qs)
|
||||
|
||||
def can_change(self, obj, data):
|
||||
return self.user.is_superuser or self.check_related('organization', Organization, data, obj=obj, role_field='admin_role', mandatory=True)
|
||||
|
||||
def can_delete(self, obj):
|
||||
return self.user.is_superuser or obj.organization in self.user.admin_of_organizations
|
||||
|
||||
def can_add(self, data):
|
||||
if self.user.is_superuser:
|
||||
return True
|
||||
if not data:
|
||||
return Organization.access_qs(self.user, 'change').exists()
|
||||
return self.check_related('organization', Organization, data, role_field='admin_role', mandatory=True)
|
||||
|
||||
|
||||
class OAuth2TokenAccess(BaseAccess):
|
||||
"""
|
||||
I can read, change or delete an app token when:
|
||||
- I am a superuser.
|
||||
- I am the admin of the organization of the application of the token.
|
||||
- I am the user of the token.
|
||||
I can create an OAuth2 app token when:
|
||||
- I have the read permission of the related application.
|
||||
I can read, change or delete a personal token when:
|
||||
- I am the user of the token
|
||||
- I am the superuser
|
||||
I can create an OAuth2 Personal Access Token when:
|
||||
- I am a user. But I can only create a PAT for myself.
|
||||
"""
|
||||
|
||||
model = OAuth2AccessToken
|
||||
|
||||
select_related = ('user', 'application')
|
||||
prefetch_related = ('refresh_token',)
|
||||
|
||||
def filtered_queryset(self):
|
||||
org_access_qs = Organization.objects.filter(Q(admin_role__members=self.user) | Q(auditor_role__members=self.user))
|
||||
return self.model.objects.filter(application__organization__in=org_access_qs) | self.model.objects.filter(user__id=self.user.pk)
|
||||
|
||||
def can_delete(self, obj):
|
||||
if (self.user.is_superuser) | (obj.user == self.user):
|
||||
return True
|
||||
elif not obj.application:
|
||||
return False
|
||||
return self.user in obj.application.organization.admin_role
|
||||
|
||||
def can_change(self, obj, data):
|
||||
return self.can_delete(obj)
|
||||
|
||||
def can_add(self, data):
|
||||
if 'application' in data:
|
||||
app = get_object_from_data('application', OAuth2Application, data)
|
||||
if app is None:
|
||||
return True
|
||||
return OAuth2ApplicationAccess(self.user).can_read(app)
|
||||
return True
|
||||
|
||||
|
||||
class OrganizationAccess(NotificationAttachMixin, BaseAccess):
|
||||
"""
|
||||
I can see organizations when:
|
||||
@@ -1309,7 +1226,9 @@ class TeamAccess(BaseAccess):
|
||||
)
|
||||
|
||||
def filtered_queryset(self):
|
||||
if settings.ORG_ADMINS_CAN_SEE_ALL_USERS and (self.user.admin_of_organizations.exists() or self.user.auditor_of_organizations.exists()):
|
||||
if settings.ORG_ADMINS_CAN_SEE_ALL_USERS and (
|
||||
Organization.access_qs(self.user, 'change').exists() or Organization.access_qs(self.user, 'audit').exists()
|
||||
):
|
||||
return self.model.objects.all()
|
||||
return self.model.objects.filter(
|
||||
Q(organization__in=Organization.accessible_pk_qs(self.user, 'member_role')) | Q(pk__in=self.model.accessible_pk_qs(self.user, 'read_role'))
|
||||
@@ -1861,6 +1780,11 @@ class SystemJobAccess(BaseAccess):
|
||||
|
||||
model = SystemJob
|
||||
|
||||
def filtered_queryset(self):
|
||||
if self.user.is_superuser or self.user.is_system_auditor:
|
||||
return self.model.objects.all()
|
||||
return self.model.objects.none()
|
||||
|
||||
def can_start(self, obj, validate_license=True):
|
||||
return False # no relaunching of system jobs
|
||||
|
||||
@@ -2178,7 +2102,7 @@ class WorkflowJobAccess(BaseAccess):
|
||||
def filtered_queryset(self):
|
||||
return WorkflowJob.objects.filter(
|
||||
Q(unified_job_template__in=UnifiedJobTemplate.accessible_pk_qs(self.user, 'read_role'))
|
||||
| Q(organization__in=Organization.objects.filter(Q(admin_role__members=self.user)), is_bulk_job=True)
|
||||
| Q(organization__in=Organization.accessible_pk_qs(self.user, 'auditor_role'))
|
||||
)
|
||||
|
||||
def can_read(self, obj):
|
||||
@@ -2576,12 +2500,11 @@ class UnifiedJobAccess(BaseAccess):
|
||||
|
||||
def filtered_queryset(self):
|
||||
inv_pk_qs = Inventory._accessible_pk_qs(Inventory, self.user, 'read_role')
|
||||
org_auditor_qs = Organization.objects.filter(Q(admin_role__members=self.user) | Q(auditor_role__members=self.user))
|
||||
qs = self.model.objects.filter(
|
||||
Q(unified_job_template_id__in=UnifiedJobTemplate.accessible_pk_qs(self.user, 'read_role'))
|
||||
| Q(inventoryupdate__inventory_source__inventory__id__in=inv_pk_qs)
|
||||
| Q(adhoccommand__inventory__id__in=inv_pk_qs)
|
||||
| Q(organization__in=org_auditor_qs)
|
||||
| Q(organization__in=Organization.accessible_pk_qs(self.user, 'auditor_role'))
|
||||
)
|
||||
return qs
|
||||
|
||||
@@ -2645,7 +2568,7 @@ class NotificationTemplateAccess(BaseAccess):
|
||||
if settings.ANSIBLE_BASE_ROLE_SYSTEM_ACTIVATED:
|
||||
return self.model.access_qs(self.user, 'view')
|
||||
return self.model.objects.filter(
|
||||
Q(organization__in=Organization.access_qs(self.user, 'add_notificationtemplate')) | Q(organization__in=self.user.auditor_of_organizations)
|
||||
Q(organization__in=Organization.access_qs(self.user, 'add_notificationtemplate')) | Q(organization__in=Organization.access_qs(self.user, 'audit'))
|
||||
).distinct()
|
||||
|
||||
@check_superuser
|
||||
@@ -2680,7 +2603,7 @@ class NotificationAccess(BaseAccess):
|
||||
def filtered_queryset(self):
|
||||
return self.model.objects.filter(
|
||||
Q(notification_template__organization__in=Organization.access_qs(self.user, 'add_notificationtemplate'))
|
||||
| Q(notification_template__organization__in=self.user.auditor_of_organizations)
|
||||
| Q(notification_template__organization__in=Organization.access_qs(self.user, 'audit'))
|
||||
).distinct()
|
||||
|
||||
def can_delete(self, obj):
|
||||
@@ -2739,8 +2662,6 @@ class ActivityStreamAccess(BaseAccess):
|
||||
'credential_type',
|
||||
'team',
|
||||
'ad_hoc_command',
|
||||
'o_auth2_application',
|
||||
'o_auth2_access_token',
|
||||
'notification_template',
|
||||
'notification',
|
||||
'label',
|
||||
@@ -2826,14 +2747,6 @@ class ActivityStreamAccess(BaseAccess):
|
||||
if team_set:
|
||||
q |= Q(team__in=team_set)
|
||||
|
||||
app_set = OAuth2ApplicationAccess(self.user).filtered_queryset()
|
||||
if app_set:
|
||||
q |= Q(o_auth2_application__in=app_set)
|
||||
|
||||
token_set = OAuth2TokenAccess(self.user).filtered_queryset()
|
||||
if token_set:
|
||||
q |= Q(o_auth2_access_token__in=token_set)
|
||||
|
||||
return qs.filter(q).distinct()
|
||||
|
||||
def can_add(self, data):
|
||||
|
||||
@@ -3,13 +3,13 @@ import logging
|
||||
|
||||
# AWX
|
||||
from awx.main.analytics.subsystem_metrics import DispatcherMetrics, CallbackReceiverMetrics
|
||||
from awx.main.dispatch.publish import task
|
||||
from awx.main.dispatch.publish import task as task_awx
|
||||
from awx.main.dispatch import get_task_queuename
|
||||
|
||||
logger = logging.getLogger('awx.main.scheduler')
|
||||
|
||||
|
||||
@task(queue=get_task_queuename)
|
||||
@task_awx(queue=get_task_queuename)
|
||||
def send_subsystem_metrics():
|
||||
DispatcherMetrics().send_metrics()
|
||||
CallbackReceiverMetrics().send_metrics()
|
||||
|
||||
@@ -142,7 +142,7 @@ def config(since, **kwargs):
|
||||
return {
|
||||
'platform': {
|
||||
'system': platform.system(),
|
||||
'dist': distro.linux_distribution(),
|
||||
'dist': (distro.name(), distro.version(), distro.codename()),
|
||||
'release': platform.release(),
|
||||
'type': install_type,
|
||||
},
|
||||
@@ -444,11 +444,6 @@ def _events_table(since, full_path, until, tbl, where_column, project_job_create
|
||||
return _copy_table(table='events', query=query(fr"replace({tbl}.event_data, '\u', '\u005cu')::jsonb"), path=full_path)
|
||||
|
||||
|
||||
@register('events_table', '1.5', format='csv', description=_('Automation task records'), expensive=four_hour_slicing)
|
||||
def events_table_unpartitioned(since, full_path, until, **kwargs):
|
||||
return _events_table(since, full_path, until, '_unpartitioned_main_jobevent', 'created', **kwargs)
|
||||
|
||||
|
||||
@register('events_table', '1.5', format='csv', description=_('Automation task records'), expensive=four_hour_slicing)
|
||||
def events_table_partitioned_modified(since, full_path, until, **kwargs):
|
||||
return _events_table(since, full_path, until, 'main_jobevent', 'modified', project_job_created=True, **kwargs)
|
||||
|
||||
@@ -16,10 +16,13 @@ from rest_framework.exceptions import PermissionDenied
|
||||
import requests
|
||||
|
||||
from awx.conf.license import get_license
|
||||
|
||||
from ansible_base.lib.utils.db import advisory_lock
|
||||
|
||||
from awx.main.models import Job
|
||||
from awx.main.access import access_registry
|
||||
from awx.main.utils import get_awx_http_client_headers, set_environ, datetime_hook
|
||||
from awx.main.utils.pglock import advisory_lock
|
||||
from awx.main.utils.analytics_proxy import OIDCClient
|
||||
|
||||
__all__ = ['register', 'gather', 'ship']
|
||||
|
||||
@@ -181,7 +184,10 @@ def gather(dest=None, module=None, subset=None, since=None, until=None, collecti
|
||||
logger.log(log_level, "Automation Analytics not enabled. Use --dry-run to gather locally without sending.")
|
||||
return None
|
||||
|
||||
if not (settings.AUTOMATION_ANALYTICS_URL and settings.REDHAT_USERNAME and settings.REDHAT_PASSWORD):
|
||||
if not (
|
||||
settings.AUTOMATION_ANALYTICS_URL
|
||||
and ((settings.REDHAT_USERNAME and settings.REDHAT_PASSWORD) or (settings.SUBSCRIPTIONS_CLIENT_ID and settings.SUBSCRIPTIONS_CLIENT_SECRET))
|
||||
):
|
||||
logger.log(log_level, "Not gathering analytics, configuration is invalid. Use --dry-run to gather locally without sending.")
|
||||
return None
|
||||
|
||||
@@ -318,10 +324,10 @@ def gather(dest=None, module=None, subset=None, since=None, until=None, collecti
|
||||
settings.AUTOMATION_ANALYTICS_LAST_ENTRIES = json.dumps(last_entries, cls=DjangoJSONEncoder)
|
||||
|
||||
if collection_type != 'dry-run':
|
||||
if succeeded:
|
||||
for fpath in tarfiles:
|
||||
if os.path.exists(fpath):
|
||||
os.remove(fpath)
|
||||
for fpath in tarfiles:
|
||||
if os.path.exists(fpath):
|
||||
os.remove(fpath)
|
||||
|
||||
with disable_activity_stream():
|
||||
if not settings.AUTOMATION_ANALYTICS_LAST_GATHER or until > settings.AUTOMATION_ANALYTICS_LAST_GATHER:
|
||||
# `AUTOMATION_ANALYTICS_LAST_GATHER` is set whether collection succeeds or fails;
|
||||
@@ -361,21 +367,35 @@ def ship(path):
|
||||
if not url:
|
||||
logger.error('AUTOMATION_ANALYTICS_URL is not set')
|
||||
return False
|
||||
rh_user = getattr(settings, 'REDHAT_USERNAME', None)
|
||||
rh_password = getattr(settings, 'REDHAT_PASSWORD', None)
|
||||
if not rh_user:
|
||||
logger.error('REDHAT_USERNAME is not set')
|
||||
|
||||
rh_id = getattr(settings, 'REDHAT_USERNAME', None)
|
||||
rh_secret = getattr(settings, 'REDHAT_PASSWORD', None)
|
||||
|
||||
if not (rh_id and rh_secret):
|
||||
rh_id = getattr(settings, 'SUBSCRIPTIONS_CLIENT_ID', None)
|
||||
rh_secret = getattr(settings, 'SUBSCRIPTIONS_CLIENT_SECRET', None)
|
||||
|
||||
if not rh_id:
|
||||
logger.error('Neither REDHAT_USERNAME nor SUBSCRIPTIONS_CLIENT_ID are set')
|
||||
return False
|
||||
if not rh_password:
|
||||
logger.error('REDHAT_PASSWORD is not set')
|
||||
|
||||
if not rh_secret:
|
||||
logger.error('Neither REDHAT_PASSWORD nor SUBSCRIPTIONS_CLIENT_SECRET are set')
|
||||
return False
|
||||
|
||||
with open(path, 'rb') as f:
|
||||
files = {'file': (os.path.basename(path), f, settings.INSIGHTS_AGENT_MIME)}
|
||||
s = requests.Session()
|
||||
s.headers = get_awx_http_client_headers()
|
||||
s.headers.pop('Content-Type')
|
||||
with set_environ(**settings.AWX_TASK_ENV):
|
||||
response = s.post(url, files=files, verify=settings.INSIGHTS_CERT_PATH, auth=(rh_user, rh_password), headers=s.headers, timeout=(31, 31))
|
||||
try:
|
||||
client = OIDCClient(rh_id, rh_secret)
|
||||
response = client.make_request("POST", url, headers=s.headers, files=files, verify=settings.INSIGHTS_CERT_PATH, timeout=(31, 31))
|
||||
except requests.RequestException:
|
||||
logger.error("Automation Analytics API request failed, trying base auth method")
|
||||
response = s.post(url, files=files, verify=settings.INSIGHTS_CERT_PATH, auth=(rh_id, rh_secret), headers=s.headers, timeout=(31, 31))
|
||||
|
||||
# Accept 2XX status_codes
|
||||
if response.status_code >= 300:
|
||||
logger.error('Upload failed with status {}, {}'.format(response.status_code, response.text))
|
||||
|
||||
@@ -128,6 +128,7 @@ def metrics():
|
||||
registry=REGISTRY,
|
||||
)
|
||||
|
||||
LICENSE_EXPIRY = Gauge('awx_license_expiry', 'Time before license expires', registry=REGISTRY)
|
||||
LICENSE_INSTANCE_TOTAL = Gauge('awx_license_instance_total', 'Total number of managed hosts provided by your license', registry=REGISTRY)
|
||||
LICENSE_INSTANCE_FREE = Gauge('awx_license_instance_free', 'Number of remaining managed hosts provided by your license', registry=REGISTRY)
|
||||
|
||||
@@ -148,6 +149,7 @@ def metrics():
|
||||
}
|
||||
)
|
||||
|
||||
LICENSE_EXPIRY.set(str(license_info.get('time_remaining', 0)))
|
||||
LICENSE_INSTANCE_TOTAL.set(str(license_info.get('instance_count', 0)))
|
||||
LICENSE_INSTANCE_FREE.set(str(license_info.get('free_instances', 0)))
|
||||
|
||||
|
||||
@@ -9,6 +9,7 @@ from prometheus_client.core import GaugeMetricFamily, HistogramMetricFamily
|
||||
from prometheus_client.registry import CollectorRegistry
|
||||
from django.conf import settings
|
||||
from django.http import HttpRequest
|
||||
import redis.exceptions
|
||||
from rest_framework.request import Request
|
||||
|
||||
from awx.main.consumers import emit_channel_notification
|
||||
@@ -43,11 +44,12 @@ class MetricsServer(MetricsServerSettings):
|
||||
|
||||
|
||||
class BaseM:
|
||||
def __init__(self, field, help_text):
|
||||
def __init__(self, field, help_text, labels=None):
|
||||
self.field = field
|
||||
self.help_text = help_text
|
||||
self.current_value = 0
|
||||
self.metric_has_changed = False
|
||||
self.labels = labels or {}
|
||||
|
||||
def reset_value(self, conn):
|
||||
conn.hset(root_key, self.field, 0)
|
||||
@@ -68,12 +70,16 @@ class BaseM:
|
||||
value = conn.hget(root_key, self.field)
|
||||
return self.decode_value(value)
|
||||
|
||||
def to_prometheus(self, instance_data):
|
||||
def to_prometheus(self, instance_data, namespace=None):
|
||||
output_text = f"# HELP {self.field} {self.help_text}\n# TYPE {self.field} gauge\n"
|
||||
for instance in instance_data:
|
||||
if self.field in instance_data[instance]:
|
||||
# Build label string
|
||||
labels = f'node="{instance}"'
|
||||
if namespace:
|
||||
labels += f',subsystem="{namespace}"'
|
||||
# on upgrade, if there are stale instances, we can end up with issues where new metrics are not present
|
||||
output_text += f'{self.field}{{node="{instance}"}} {instance_data[instance][self.field]}\n'
|
||||
output_text += f'{self.field}{{{labels}}} {instance_data[instance][self.field]}\n'
|
||||
return output_text
|
||||
|
||||
|
||||
@@ -166,14 +172,17 @@ class HistogramM(BaseM):
|
||||
self.sum.store_value(conn)
|
||||
self.inf.store_value(conn)
|
||||
|
||||
def to_prometheus(self, instance_data):
|
||||
def to_prometheus(self, instance_data, namespace=None):
|
||||
output_text = f"# HELP {self.field} {self.help_text}\n# TYPE {self.field} histogram\n"
|
||||
for instance in instance_data:
|
||||
# Build label string
|
||||
node_label = f'node="{instance}"'
|
||||
subsystem_label = f',subsystem="{namespace}"' if namespace else ''
|
||||
for i, b in enumerate(self.buckets):
|
||||
output_text += f'{self.field}_bucket{{le="{b}",node="{instance}"}} {sum(instance_data[instance][self.field]["counts"][0:i+1])}\n'
|
||||
output_text += f'{self.field}_bucket{{le="+Inf",node="{instance}"}} {instance_data[instance][self.field]["inf"]}\n'
|
||||
output_text += f'{self.field}_count{{node="{instance}"}} {instance_data[instance][self.field]["inf"]}\n'
|
||||
output_text += f'{self.field}_sum{{node="{instance}"}} {instance_data[instance][self.field]["sum"]}\n'
|
||||
output_text += f'{self.field}_bucket{{le="{b}",{node_label}{subsystem_label}}} {sum(instance_data[instance][self.field]["counts"][0:i+1])}\n'
|
||||
output_text += f'{self.field}_bucket{{le="+Inf",{node_label}{subsystem_label}}} {instance_data[instance][self.field]["inf"]}\n'
|
||||
output_text += f'{self.field}_count{{{node_label}{subsystem_label}}} {instance_data[instance][self.field]["inf"]}\n'
|
||||
output_text += f'{self.field}_sum{{{node_label}{subsystem_label}}} {instance_data[instance][self.field]["sum"]}\n'
|
||||
return output_text
|
||||
|
||||
|
||||
@@ -272,26 +281,32 @@ class Metrics(MetricsNamespace):
|
||||
|
||||
def pipe_execute(self):
|
||||
if self.metrics_have_changed is True:
|
||||
duration_to_save = time.perf_counter()
|
||||
duration_pipe_exec = time.perf_counter()
|
||||
for m in self.METRICS:
|
||||
self.METRICS[m].store_value(self.pipe)
|
||||
self.pipe.execute()
|
||||
self.last_pipe_execute = time.time()
|
||||
self.metrics_have_changed = False
|
||||
duration_to_save = time.perf_counter() - duration_to_save
|
||||
self.METRICS['subsystem_metrics_pipe_execute_seconds'].inc(duration_to_save)
|
||||
self.METRICS['subsystem_metrics_pipe_execute_calls'].inc(1)
|
||||
duration_pipe_exec = time.perf_counter() - duration_pipe_exec
|
||||
|
||||
duration_to_save = time.perf_counter()
|
||||
duration_send_metrics = time.perf_counter()
|
||||
self.send_metrics()
|
||||
duration_to_save = time.perf_counter() - duration_to_save
|
||||
self.METRICS['subsystem_metrics_send_metrics_seconds'].inc(duration_to_save)
|
||||
duration_send_metrics = time.perf_counter() - duration_send_metrics
|
||||
|
||||
# Increment operational metrics
|
||||
self.METRICS['subsystem_metrics_pipe_execute_seconds'].inc(duration_pipe_exec)
|
||||
self.METRICS['subsystem_metrics_pipe_execute_calls'].inc(1)
|
||||
self.METRICS['subsystem_metrics_send_metrics_seconds'].inc(duration_send_metrics)
|
||||
|
||||
def send_metrics(self):
|
||||
# more than one thread could be calling this at the same time, so should
|
||||
# acquire redis lock before sending metrics
|
||||
lock = self.conn.lock(root_key + '-' + self._namespace + '_lock')
|
||||
if not lock.acquire(blocking=False):
|
||||
try:
|
||||
lock = self.conn.lock(root_key + '-' + self._namespace + '_lock')
|
||||
if not lock.acquire(blocking=False):
|
||||
return
|
||||
except redis.exceptions.ConnectionError as exc:
|
||||
logger.warning(f'Connection error in send_metrics: {exc}')
|
||||
return
|
||||
try:
|
||||
current_time = time.time()
|
||||
@@ -347,7 +362,13 @@ class Metrics(MetricsNamespace):
|
||||
if instance_data:
|
||||
for field in self.METRICS:
|
||||
if len(metrics_filter) == 0 or field in metrics_filter:
|
||||
output_text += self.METRICS[field].to_prometheus(instance_data)
|
||||
# Add subsystem label only for operational metrics
|
||||
namespace = (
|
||||
self._namespace
|
||||
if field in ['subsystem_metrics_pipe_execute_seconds', 'subsystem_metrics_pipe_execute_calls', 'subsystem_metrics_send_metrics_seconds']
|
||||
else None
|
||||
)
|
||||
output_text += self.METRICS[field].to_prometheus(instance_data, namespace)
|
||||
return output_text
|
||||
|
||||
|
||||
@@ -435,7 +456,10 @@ class CustomToPrometheusMetricsCollector(prometheus_client.registry.Collector):
|
||||
logger.debug(f"No metric data not found in redis for metric namespace '{self._metrics._namespace}'")
|
||||
return None
|
||||
|
||||
host_metrics = instance_data.get(my_hostname)
|
||||
if not (host_metrics := instance_data.get(my_hostname)):
|
||||
logger.debug(f"Metric data for this node '{my_hostname}' not found in redis for metric namespace '{self._metrics._namespace}'")
|
||||
return None
|
||||
|
||||
for _, metric in self._metrics.METRICS.items():
|
||||
entry = host_metrics.get(metric.field)
|
||||
if not entry:
|
||||
@@ -452,14 +476,14 @@ class CustomToPrometheusMetricsCollector(prometheus_client.registry.Collector):
|
||||
class CallbackReceiverMetricsServer(MetricsServer):
|
||||
def __init__(self):
|
||||
registry = CollectorRegistry(auto_describe=True)
|
||||
registry.register(CustomToPrometheusMetricsCollector(DispatcherMetrics(metrics_have_changed=False)))
|
||||
registry.register(CustomToPrometheusMetricsCollector(CallbackReceiverMetrics(metrics_have_changed=False)))
|
||||
super().__init__(settings.METRICS_SERVICE_CALLBACK_RECEIVER, registry)
|
||||
|
||||
|
||||
class DispatcherMetricsServer(MetricsServer):
|
||||
def __init__(self):
|
||||
registry = CollectorRegistry(auto_describe=True)
|
||||
registry.register(CustomToPrometheusMetricsCollector(CallbackReceiverMetrics(metrics_have_changed=False)))
|
||||
registry.register(CustomToPrometheusMetricsCollector(DispatcherMetrics(metrics_have_changed=False)))
|
||||
super().__init__(settings.METRICS_SERVICE_DISPATCHER, registry)
|
||||
|
||||
|
||||
|
||||
@@ -1,8 +1,17 @@
|
||||
import os
|
||||
|
||||
from dispatcherd.config import setup as dispatcher_setup
|
||||
|
||||
from django.apps import AppConfig
|
||||
from django.db import connection
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
from awx.main.utils.common import bypass_in_test, load_all_entry_points_for
|
||||
from awx.main.utils.migration import is_database_synchronized
|
||||
from awx.main.utils.named_url_graph import _customize_graph, generate_graph
|
||||
from awx.conf import register, fields
|
||||
|
||||
from awx_plugins.interfaces._temporary_private_licensing_api import detect_server_product_name
|
||||
|
||||
|
||||
class MainConfig(AppConfig):
|
||||
name = 'awx.main'
|
||||
@@ -34,7 +43,70 @@ class MainConfig(AppConfig):
|
||||
category_slug='named-url',
|
||||
)
|
||||
|
||||
def _load_credential_types_feature(self):
|
||||
"""
|
||||
Create CredentialType records for any discovered credentials.
|
||||
|
||||
Note that Django docs advise _against_ interacting with the database using
|
||||
the ORM models in the ready() path. Specifically, during testing.
|
||||
However, we explicitly use the @bypass_in_test decorator to avoid calling this
|
||||
method during testing.
|
||||
|
||||
Django also advises against running pattern because it runs everywhere i.e.
|
||||
every management command. We use an advisory lock to ensure correctness and
|
||||
we will deal performance if it becomes an issue.
|
||||
"""
|
||||
from awx.main.models.credential import CredentialType
|
||||
|
||||
if is_database_synchronized():
|
||||
CredentialType.setup_tower_managed_defaults(app_config=self)
|
||||
|
||||
@bypass_in_test
|
||||
def load_credential_types_feature(self):
|
||||
from awx.main.models.credential import load_credentials
|
||||
|
||||
load_credentials()
|
||||
return self._load_credential_types_feature()
|
||||
|
||||
def load_inventory_plugins(self):
|
||||
from awx.main.models.inventory import InventorySourceOptions
|
||||
|
||||
is_awx = detect_server_product_name() == 'AWX'
|
||||
extra_entry_point_groups = () if is_awx else ('inventory.supported',)
|
||||
entry_points = load_all_entry_points_for(['inventory', *extra_entry_point_groups])
|
||||
|
||||
for entry_point_name, entry_point in entry_points.items():
|
||||
cls = entry_point.load()
|
||||
InventorySourceOptions.injectors[entry_point_name] = cls
|
||||
|
||||
def configure_dispatcherd(self):
|
||||
"""This implements the default configuration for dispatcherd
|
||||
|
||||
If running the tasking service like awx-manage run_dispatcher,
|
||||
some additional config will be applied on top of this.
|
||||
This configuration provides the minimum such that code can submit
|
||||
tasks to pg_notify to run those tasks.
|
||||
"""
|
||||
from awx.main.dispatch.config import get_dispatcherd_config
|
||||
|
||||
if connection.vendor != 'postgresql':
|
||||
config_dict = get_dispatcherd_config(mock_publish=True)
|
||||
else:
|
||||
config_dict = get_dispatcherd_config()
|
||||
|
||||
dispatcher_setup(config_dict)
|
||||
|
||||
def ready(self):
|
||||
super().ready()
|
||||
|
||||
self.configure_dispatcherd()
|
||||
|
||||
"""
|
||||
Credential loading triggers database operations. There are cases we want to call
|
||||
awx-manage collectstatic without a database. All management commands invoke the ready() code
|
||||
path. Using settings.AWX_SKIP_CREDENTIAL_TYPES_DISCOVER _could_ invoke a database operation.
|
||||
"""
|
||||
if not os.environ.get('AWX_SKIP_CREDENTIAL_TYPES_DISCOVER', None):
|
||||
self.load_credential_types_feature()
|
||||
self.load_named_url_feature()
|
||||
self.load_inventory_plugins()
|
||||
|
||||
200
awx/main/conf.py
200
awx/main/conf.py
@@ -12,6 +12,7 @@ from rest_framework import serializers
|
||||
from awx.conf import fields, register, register_validate
|
||||
from awx.main.models import ExecutionEnvironment
|
||||
from awx.main.constants import SUBSCRIPTION_USAGE_MODEL_UNIQUE_HOSTS
|
||||
from awx.main.tasks.policy import OPA_AUTH_TYPES
|
||||
|
||||
logger = logging.getLogger('awx.main.conf')
|
||||
|
||||
@@ -46,10 +47,7 @@ register(
|
||||
'MANAGE_ORGANIZATION_AUTH',
|
||||
field_class=fields.BooleanField,
|
||||
label=_('Organization Admins Can Manage Users and Teams'),
|
||||
help_text=_(
|
||||
'Controls whether any Organization Admin has the privileges to create and manage users and teams. '
|
||||
'You may want to disable this ability if you are using an LDAP or SAML integration.'
|
||||
),
|
||||
help_text=_('Controls whether any Organization Admin has the privileges to create and manage users and teams.'),
|
||||
category=_('System'),
|
||||
category_slug='system',
|
||||
)
|
||||
@@ -93,7 +91,6 @@ register(
|
||||
),
|
||||
category=_('System'),
|
||||
category_slug='system',
|
||||
required=False,
|
||||
)
|
||||
|
||||
register(
|
||||
@@ -108,6 +105,7 @@ register(
|
||||
),
|
||||
category=_('System'),
|
||||
category_slug='system',
|
||||
hidden=True,
|
||||
)
|
||||
|
||||
register(
|
||||
@@ -127,8 +125,8 @@ register(
|
||||
allow_blank=True,
|
||||
encrypted=False,
|
||||
read_only=False,
|
||||
label=_('Red Hat customer username'),
|
||||
help_text=_('This username is used to send data to Automation Analytics'),
|
||||
label=_('Red Hat Client ID for Analytics'),
|
||||
help_text=_('Client ID used to send data to Automation Analytics'),
|
||||
category=_('System'),
|
||||
category_slug='system',
|
||||
)
|
||||
@@ -140,8 +138,8 @@ register(
|
||||
allow_blank=True,
|
||||
encrypted=True,
|
||||
read_only=False,
|
||||
label=_('Red Hat customer password'),
|
||||
help_text=_('This password is used to send data to Automation Analytics'),
|
||||
label=_('Red Hat Client Secret for Analytics'),
|
||||
help_text=_('Client secret used to send data to Automation Analytics'),
|
||||
category=_('System'),
|
||||
category_slug='system',
|
||||
)
|
||||
@@ -153,10 +151,11 @@ register(
|
||||
allow_blank=True,
|
||||
encrypted=False,
|
||||
read_only=False,
|
||||
label=_('Red Hat or Satellite username'),
|
||||
help_text=_('This username is used to retrieve subscription and content information'), # noqa
|
||||
label=_('Red Hat Username for Subscriptions'),
|
||||
help_text=_('Username used to retrieve subscription and content information'), # noqa
|
||||
category=_('System'),
|
||||
category_slug='system',
|
||||
hidden=True,
|
||||
)
|
||||
|
||||
register(
|
||||
@@ -166,10 +165,40 @@ register(
|
||||
allow_blank=True,
|
||||
encrypted=True,
|
||||
read_only=False,
|
||||
label=_('Red Hat or Satellite password'),
|
||||
help_text=_('This password is used to retrieve subscription and content information'), # noqa
|
||||
label=_('Red Hat Password for Subscriptions'),
|
||||
help_text=_('Password used to retrieve subscription and content information'), # noqa
|
||||
category=_('System'),
|
||||
category_slug='system',
|
||||
hidden=True,
|
||||
)
|
||||
|
||||
|
||||
register(
|
||||
'SUBSCRIPTIONS_CLIENT_ID',
|
||||
field_class=fields.CharField,
|
||||
default='',
|
||||
allow_blank=True,
|
||||
encrypted=False,
|
||||
read_only=False,
|
||||
label=_('Red Hat Client ID for Subscriptions'),
|
||||
help_text=_('Client ID used to retrieve subscription and content information'), # noqa
|
||||
category=_('System'),
|
||||
category_slug='system',
|
||||
hidden=True,
|
||||
)
|
||||
|
||||
register(
|
||||
'SUBSCRIPTIONS_CLIENT_SECRET',
|
||||
field_class=fields.CharField,
|
||||
default='',
|
||||
allow_blank=True,
|
||||
encrypted=True,
|
||||
read_only=False,
|
||||
label=_('Red Hat Client Secret for Subscriptions'),
|
||||
help_text=_('Client secret used to retrieve subscription and content information'), # noqa
|
||||
category=_('System'),
|
||||
category_slug='system',
|
||||
hidden=True,
|
||||
)
|
||||
|
||||
register(
|
||||
@@ -240,7 +269,6 @@ register(
|
||||
help_text=_('List of modules allowed to be used by ad-hoc jobs.'),
|
||||
category=_('Jobs'),
|
||||
category_slug='jobs',
|
||||
required=False,
|
||||
)
|
||||
|
||||
register(
|
||||
@@ -251,7 +279,6 @@ register(
|
||||
('never', _('Never')),
|
||||
('template', _('Only On Job Template Definitions')),
|
||||
],
|
||||
required=True,
|
||||
label=_('When can extra variables contain Jinja templates?'),
|
||||
help_text=_(
|
||||
'Ansible allows variable substitution via the Jinja2 templating '
|
||||
@@ -276,7 +303,6 @@ register(
|
||||
register(
|
||||
'AWX_ISOLATION_SHOW_PATHS',
|
||||
field_class=fields.StringListIsolatedPathField,
|
||||
required=False,
|
||||
label=_('Paths to expose to isolated jobs'),
|
||||
help_text=_(
|
||||
'List of paths that would otherwise be hidden to expose to isolated jobs. Enter one path per line. '
|
||||
@@ -442,7 +468,6 @@ register(
|
||||
register(
|
||||
'AWX_ANSIBLE_CALLBACK_PLUGINS',
|
||||
field_class=fields.StringListField,
|
||||
required=False,
|
||||
label=_('Ansible Callback Plugins'),
|
||||
help_text=_('List of paths to search for extra callback plugins to be used when running jobs. Enter one path per line.'),
|
||||
category=_('Jobs'),
|
||||
@@ -556,7 +581,6 @@ register(
|
||||
help_text=_('Port on Logging Aggregator to send logs to (if required and not provided in Logging Aggregator).'),
|
||||
category=_('Logging'),
|
||||
category_slug='logging',
|
||||
required=False,
|
||||
)
|
||||
register(
|
||||
'LOG_AGGREGATOR_TYPE',
|
||||
@@ -578,7 +602,6 @@ register(
|
||||
help_text=_('Username for external log aggregator (if required; HTTP/s only).'),
|
||||
category=_('Logging'),
|
||||
category_slug='logging',
|
||||
required=False,
|
||||
)
|
||||
register(
|
||||
'LOG_AGGREGATOR_PASSWORD',
|
||||
@@ -590,12 +613,11 @@ register(
|
||||
help_text=_('Password or authentication token for external log aggregator (if required; HTTP/s only).'),
|
||||
category=_('Logging'),
|
||||
category_slug='logging',
|
||||
required=False,
|
||||
)
|
||||
register(
|
||||
'LOG_AGGREGATOR_LOGGERS',
|
||||
field_class=fields.StringListField,
|
||||
default=['awx', 'activity_stream', 'job_events', 'system_tracking', 'broadcast_websocket'],
|
||||
default=['awx', 'activity_stream', 'job_events', 'system_tracking', 'broadcast_websocket', 'job_lifecycle'],
|
||||
label=_('Loggers Sending Data to Log Aggregator Form'),
|
||||
help_text=_(
|
||||
'List of loggers that will send HTTP logs to the collector, these can '
|
||||
@@ -605,6 +627,7 @@ register(
|
||||
'job_events - callback data from Ansible job events\n'
|
||||
'system_tracking - facts gathered from scan jobs\n'
|
||||
'broadcast_websocket - errors pertaining to websockets broadcast metrics\n'
|
||||
'job_lifecycle - logs related to processing of a job\n'
|
||||
),
|
||||
category=_('Logging'),
|
||||
category_slug='logging',
|
||||
@@ -776,7 +799,7 @@ register(
|
||||
allow_null=True,
|
||||
category=_('System'),
|
||||
category_slug='system',
|
||||
required=False,
|
||||
hidden=True,
|
||||
)
|
||||
register(
|
||||
'AUTOMATION_ANALYTICS_LAST_ENTRIES',
|
||||
@@ -868,6 +891,7 @@ register(
|
||||
allow_null=True,
|
||||
category=_('System'),
|
||||
category_slug='system',
|
||||
hidden=True,
|
||||
)
|
||||
|
||||
register(
|
||||
@@ -877,6 +901,7 @@ register(
|
||||
allow_null=True,
|
||||
category=_('System'),
|
||||
category_slug='system',
|
||||
hidden=True,
|
||||
)
|
||||
|
||||
register(
|
||||
@@ -979,3 +1004,134 @@ def csrf_trusted_origins_validate(serializer, attrs):
|
||||
|
||||
|
||||
register_validate('system', csrf_trusted_origins_validate)
|
||||
|
||||
|
||||
register(
|
||||
'OPA_HOST',
|
||||
field_class=fields.CharField,
|
||||
label=_('OPA server hostname'),
|
||||
default='',
|
||||
help_text=_('The hostname used to connect to the OPA server. If empty, policy enforcement will be disabled.'),
|
||||
category=('PolicyAsCode'),
|
||||
category_slug='policyascode',
|
||||
allow_blank=True,
|
||||
)
|
||||
|
||||
register(
|
||||
'OPA_PORT',
|
||||
field_class=fields.IntegerField,
|
||||
label=_('OPA server port'),
|
||||
default=8181,
|
||||
help_text=_('The port used to connect to the OPA server. Defaults to 8181.'),
|
||||
category=('PolicyAsCode'),
|
||||
category_slug='policyascode',
|
||||
)
|
||||
|
||||
register(
|
||||
'OPA_SSL',
|
||||
field_class=fields.BooleanField,
|
||||
label=_('Use SSL for OPA connection'),
|
||||
default=False,
|
||||
help_text=_('Enable or disable the use of SSL to connect to the OPA server. Defaults to false.'),
|
||||
category=('PolicyAsCode'),
|
||||
category_slug='policyascode',
|
||||
)
|
||||
|
||||
register(
|
||||
'OPA_AUTH_TYPE',
|
||||
field_class=fields.ChoiceField,
|
||||
label=_('OPA authentication type'),
|
||||
choices=[OPA_AUTH_TYPES.NONE, OPA_AUTH_TYPES.TOKEN, OPA_AUTH_TYPES.CERTIFICATE],
|
||||
default=OPA_AUTH_TYPES.NONE,
|
||||
help_text=_('The authentication type that will be used to connect to the OPA server: "None", "Token", or "Certificate".'),
|
||||
category=('PolicyAsCode'),
|
||||
category_slug='policyascode',
|
||||
)
|
||||
|
||||
register(
|
||||
'OPA_AUTH_TOKEN',
|
||||
field_class=fields.CharField,
|
||||
label=_('OPA authentication token'),
|
||||
default='',
|
||||
help_text=_(
|
||||
'The token for authentication to the OPA server. Required when OPA_AUTH_TYPE is "Token". If an authorization header is defined in OPA_AUTH_CUSTOM_HEADERS, it will be overridden by OPA_AUTH_TOKEN.'
|
||||
),
|
||||
category=('PolicyAsCode'),
|
||||
category_slug='policyascode',
|
||||
allow_blank=True,
|
||||
encrypted=True,
|
||||
)
|
||||
|
||||
register(
|
||||
'OPA_AUTH_CLIENT_CERT',
|
||||
field_class=fields.CharField,
|
||||
label=_('OPA client certificate content'),
|
||||
default='',
|
||||
help_text=_('The content of the client certificate file for mTLS authentication to the OPA server. Required when OPA_AUTH_TYPE is "Certificate".'),
|
||||
category=('PolicyAsCode'),
|
||||
category_slug='policyascode',
|
||||
allow_blank=True,
|
||||
)
|
||||
|
||||
register(
|
||||
'OPA_AUTH_CLIENT_KEY',
|
||||
field_class=fields.CharField,
|
||||
label=_('OPA client key content'),
|
||||
default='',
|
||||
help_text=_('The content of the client key for mTLS authentication to the OPA server. Required when OPA_AUTH_TYPE is "Certificate".'),
|
||||
category=('PolicyAsCode'),
|
||||
category_slug='policyascode',
|
||||
allow_blank=True,
|
||||
encrypted=True,
|
||||
)
|
||||
|
||||
register(
|
||||
'OPA_AUTH_CA_CERT',
|
||||
field_class=fields.CharField,
|
||||
label=_('OPA CA certificate content'),
|
||||
default='',
|
||||
help_text=_('The content of the CA certificate for mTLS authentication to the OPA server. Required when OPA_AUTH_TYPE is "Certificate".'),
|
||||
category=('PolicyAsCode'),
|
||||
category_slug='policyascode',
|
||||
allow_blank=True,
|
||||
)
|
||||
|
||||
register(
|
||||
'OPA_AUTH_CUSTOM_HEADERS',
|
||||
field_class=fields.DictField,
|
||||
label=_('OPA custom authentication headers'),
|
||||
default={},
|
||||
help_text=_('Optional custom headers included in requests to the OPA server. Defaults to empty dictionary ({}).'),
|
||||
category=('PolicyAsCode'),
|
||||
category_slug='policyascode',
|
||||
)
|
||||
|
||||
register(
|
||||
'OPA_REQUEST_TIMEOUT',
|
||||
field_class=fields.FloatField,
|
||||
label=_('OPA request timeout'),
|
||||
default=1.5,
|
||||
help_text=_('The number of seconds after which the connection to the OPA server will time out. Defaults to 1.5 seconds.'),
|
||||
category=('PolicyAsCode'),
|
||||
category_slug='policyascode',
|
||||
)
|
||||
|
||||
register(
|
||||
'OPA_REQUEST_RETRIES',
|
||||
field_class=fields.IntegerField,
|
||||
label=_('OPA request retry count'),
|
||||
default=2,
|
||||
help_text=_('The number of retry attempts for connecting to the OPA server. Default is 2.'),
|
||||
category=('PolicyAsCode'),
|
||||
category_slug='policyascode',
|
||||
)
|
||||
|
||||
|
||||
def policy_as_code_validate(serializer, attrs):
|
||||
opa_host = attrs.get('OPA_HOST', '')
|
||||
if opa_host and (opa_host.startswith('http://') or opa_host.startswith('https://')):
|
||||
raise serializers.ValidationError({'OPA_HOST': _("OPA_HOST should not include 'http://' or 'https://' prefixes. Please enter only the hostname.")})
|
||||
return attrs
|
||||
|
||||
|
||||
register_validate('policyascode', policy_as_code_validate)
|
||||
|
||||
@@ -6,7 +6,6 @@ import re
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
|
||||
__all__ = [
|
||||
'CLOUD_PROVIDERS',
|
||||
'PRIVILEGE_ESCALATION_METHODS',
|
||||
'ANSI_SGR_PATTERN',
|
||||
'CAN_CANCEL',
|
||||
@@ -14,7 +13,6 @@ __all__ = [
|
||||
'STANDARD_INVENTORY_UPDATE_ENV',
|
||||
]
|
||||
|
||||
CLOUD_PROVIDERS = ('azure_rm', 'ec2', 'gce', 'vmware', 'openstack', 'rhv', 'satellite6', 'controller', 'insights', 'terraform', 'openshift_virtualization')
|
||||
PRIVILEGE_ESCALATION_METHODS = [
|
||||
('sudo', _('Sudo')),
|
||||
('su', _('Su')),
|
||||
@@ -79,6 +77,8 @@ LOGGER_BLOCKLIST = (
|
||||
'awx.main.utils.log',
|
||||
# loggers that may be called getting logging settings
|
||||
'awx.conf',
|
||||
# dispatcherd should only use 1 database connection
|
||||
'dispatcherd',
|
||||
)
|
||||
|
||||
# Reported version for node seen in receptor mesh but for which capacity check
|
||||
|
||||
@@ -1,126 +0,0 @@
|
||||
from .plugin import CredentialPlugin, CertFiles, raise_for_status
|
||||
|
||||
from urllib.parse import quote, urlencode, urljoin
|
||||
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
import requests
|
||||
|
||||
aim_inputs = {
|
||||
'fields': [
|
||||
{
|
||||
'id': 'url',
|
||||
'label': _('CyberArk CCP URL'),
|
||||
'type': 'string',
|
||||
'format': 'url',
|
||||
},
|
||||
{
|
||||
'id': 'webservice_id',
|
||||
'label': _('Web Service ID'),
|
||||
'type': 'string',
|
||||
'help_text': _('The CCP Web Service ID. Leave blank to default to AIMWebService.'),
|
||||
},
|
||||
{
|
||||
'id': 'app_id',
|
||||
'label': _('Application ID'),
|
||||
'type': 'string',
|
||||
'secret': True,
|
||||
},
|
||||
{
|
||||
'id': 'client_key',
|
||||
'label': _('Client Key'),
|
||||
'type': 'string',
|
||||
'secret': True,
|
||||
'multiline': True,
|
||||
},
|
||||
{
|
||||
'id': 'client_cert',
|
||||
'label': _('Client Certificate'),
|
||||
'type': 'string',
|
||||
'secret': True,
|
||||
'multiline': True,
|
||||
},
|
||||
{
|
||||
'id': 'verify',
|
||||
'label': _('Verify SSL Certificates'),
|
||||
'type': 'boolean',
|
||||
'default': True,
|
||||
},
|
||||
],
|
||||
'metadata': [
|
||||
{
|
||||
'id': 'object_query',
|
||||
'label': _('Object Query'),
|
||||
'type': 'string',
|
||||
'help_text': _('Lookup query for the object. Ex: Safe=TestSafe;Object=testAccountName123'),
|
||||
},
|
||||
{'id': 'object_query_format', 'label': _('Object Query Format'), 'type': 'string', 'default': 'Exact', 'choices': ['Exact', 'Regexp']},
|
||||
{
|
||||
'id': 'object_property',
|
||||
'label': _('Object Property'),
|
||||
'type': 'string',
|
||||
'help_text': _('The property of the object to return. Available properties: Username, Password and Address.'),
|
||||
},
|
||||
{
|
||||
'id': 'reason',
|
||||
'label': _('Reason'),
|
||||
'type': 'string',
|
||||
'help_text': _('Object request reason. This is only needed if it is required by the object\'s policy.'),
|
||||
},
|
||||
],
|
||||
'required': ['url', 'app_id', 'object_query'],
|
||||
}
|
||||
|
||||
|
||||
def aim_backend(**kwargs):
|
||||
url = kwargs['url']
|
||||
client_cert = kwargs.get('client_cert', None)
|
||||
client_key = kwargs.get('client_key', None)
|
||||
verify = kwargs['verify']
|
||||
webservice_id = kwargs.get('webservice_id', '')
|
||||
app_id = kwargs['app_id']
|
||||
object_query = kwargs['object_query']
|
||||
object_query_format = kwargs['object_query_format']
|
||||
object_property = kwargs.get('object_property', '')
|
||||
reason = kwargs.get('reason', None)
|
||||
if webservice_id == '':
|
||||
webservice_id = 'AIMWebService'
|
||||
|
||||
query_params = {
|
||||
'AppId': app_id,
|
||||
'Query': object_query,
|
||||
'QueryFormat': object_query_format,
|
||||
}
|
||||
if reason:
|
||||
query_params['reason'] = reason
|
||||
|
||||
request_qs = '?' + urlencode(query_params, quote_via=quote)
|
||||
request_url = urljoin(url, '/'.join([webservice_id, 'api', 'Accounts']))
|
||||
|
||||
with CertFiles(client_cert, client_key) as cert:
|
||||
res = requests.get(
|
||||
request_url + request_qs,
|
||||
timeout=30,
|
||||
cert=cert,
|
||||
verify=verify,
|
||||
allow_redirects=False,
|
||||
)
|
||||
raise_for_status(res)
|
||||
# CCP returns the property name capitalized, username is camel case
|
||||
# so we need to handle that case
|
||||
if object_property == '':
|
||||
object_property = 'Content'
|
||||
elif object_property.lower() == 'username':
|
||||
object_property = 'UserName'
|
||||
elif object_property.lower() == 'password':
|
||||
object_property = 'Content'
|
||||
elif object_property.lower() == 'address':
|
||||
object_property = 'Address'
|
||||
elif object_property not in res:
|
||||
raise KeyError('Property {} not found in object, available properties: Username, Password and Address'.format(object_property))
|
||||
else:
|
||||
object_property = object_property.capitalize()
|
||||
|
||||
return res.json()[object_property]
|
||||
|
||||
|
||||
aim_plugin = CredentialPlugin('CyberArk Central Credential Provider Lookup', inputs=aim_inputs, backend=aim_backend)
|
||||
@@ -1,65 +0,0 @@
|
||||
import boto3
|
||||
from botocore.exceptions import ClientError
|
||||
|
||||
from .plugin import CredentialPlugin
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
|
||||
|
||||
secrets_manager_inputs = {
|
||||
'fields': [
|
||||
{
|
||||
'id': 'aws_access_key',
|
||||
'label': _('AWS Access Key'),
|
||||
'type': 'string',
|
||||
},
|
||||
{
|
||||
'id': 'aws_secret_key',
|
||||
'label': _('AWS Secret Key'),
|
||||
'type': 'string',
|
||||
'secret': True,
|
||||
},
|
||||
],
|
||||
'metadata': [
|
||||
{
|
||||
'id': 'region_name',
|
||||
'label': _('AWS Secrets Manager Region'),
|
||||
'type': 'string',
|
||||
'help_text': _('Region which the secrets manager is located'),
|
||||
},
|
||||
{
|
||||
'id': 'secret_name',
|
||||
'label': _('AWS Secret Name'),
|
||||
'type': 'string',
|
||||
},
|
||||
],
|
||||
'required': ['aws_access_key', 'aws_secret_key', 'region_name', 'secret_name'],
|
||||
}
|
||||
|
||||
|
||||
def aws_secretsmanager_backend(**kwargs):
|
||||
secret_name = kwargs['secret_name']
|
||||
region_name = kwargs['region_name']
|
||||
aws_secret_access_key = kwargs['aws_secret_key']
|
||||
aws_access_key_id = kwargs['aws_access_key']
|
||||
|
||||
session = boto3.session.Session()
|
||||
client = session.client(
|
||||
service_name='secretsmanager', region_name=region_name, aws_secret_access_key=aws_secret_access_key, aws_access_key_id=aws_access_key_id
|
||||
)
|
||||
|
||||
try:
|
||||
get_secret_value_response = client.get_secret_value(SecretId=secret_name)
|
||||
except ClientError as e:
|
||||
raise e
|
||||
# Secrets Manager decrypts the secret value using the associated KMS CMK
|
||||
# Depending on whether the secret was a string or binary, only one of these fields will be populated
|
||||
if 'SecretString' in get_secret_value_response:
|
||||
secret = get_secret_value_response['SecretString']
|
||||
|
||||
else:
|
||||
secret = get_secret_value_response['SecretBinary']
|
||||
|
||||
return secret
|
||||
|
||||
|
||||
aws_secretmanager_plugin = CredentialPlugin('AWS Secrets Manager lookup', inputs=secrets_manager_inputs, backend=aws_secretsmanager_backend)
|
||||
@@ -1,63 +0,0 @@
|
||||
from azure.keyvault.secrets import SecretClient
|
||||
from azure.identity import ClientSecretCredential
|
||||
from msrestazure import azure_cloud
|
||||
|
||||
from .plugin import CredentialPlugin
|
||||
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
|
||||
|
||||
# https://github.com/Azure/msrestazure-for-python/blob/master/msrestazure/azure_cloud.py
|
||||
clouds = [vars(azure_cloud)[n] for n in dir(azure_cloud) if n.startswith("AZURE_") and n.endswith("_CLOUD")]
|
||||
default_cloud = vars(azure_cloud)["AZURE_PUBLIC_CLOUD"]
|
||||
|
||||
|
||||
azure_keyvault_inputs = {
|
||||
'fields': [
|
||||
{
|
||||
'id': 'url',
|
||||
'label': _('Vault URL (DNS Name)'),
|
||||
'type': 'string',
|
||||
'format': 'url',
|
||||
},
|
||||
{'id': 'client', 'label': _('Client ID'), 'type': 'string'},
|
||||
{
|
||||
'id': 'secret',
|
||||
'label': _('Client Secret'),
|
||||
'type': 'string',
|
||||
'secret': True,
|
||||
},
|
||||
{'id': 'tenant', 'label': _('Tenant ID'), 'type': 'string'},
|
||||
{
|
||||
'id': 'cloud_name',
|
||||
'label': _('Cloud Environment'),
|
||||
'help_text': _('Specify which azure cloud environment to use.'),
|
||||
'choices': list(set([default_cloud.name] + [c.name for c in clouds])),
|
||||
'default': default_cloud.name,
|
||||
},
|
||||
],
|
||||
'metadata': [
|
||||
{
|
||||
'id': 'secret_field',
|
||||
'label': _('Secret Name'),
|
||||
'type': 'string',
|
||||
'help_text': _('The name of the secret to look up.'),
|
||||
},
|
||||
{
|
||||
'id': 'secret_version',
|
||||
'label': _('Secret Version'),
|
||||
'type': 'string',
|
||||
'help_text': _('Used to specify a specific secret version (if left empty, the latest version will be used).'),
|
||||
},
|
||||
],
|
||||
'required': ['url', 'client', 'secret', 'tenant', 'secret_field'],
|
||||
}
|
||||
|
||||
|
||||
def azure_keyvault_backend(**kwargs):
|
||||
csc = ClientSecretCredential(tenant_id=kwargs['tenant'], client_id=kwargs['client'], client_secret=kwargs['secret'])
|
||||
kv = SecretClient(credential=csc, vault_url=kwargs['url'])
|
||||
return kv.get_secret(name=kwargs['secret_field'], version=kwargs.get('secret_version', '')).value
|
||||
|
||||
|
||||
azure_keyvault_plugin = CredentialPlugin('Microsoft Azure Key Vault', inputs=azure_keyvault_inputs, backend=azure_keyvault_backend)
|
||||
@@ -1,115 +0,0 @@
|
||||
from .plugin import CredentialPlugin, raise_for_status
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
from urllib.parse import urljoin
|
||||
import requests
|
||||
|
||||
pas_inputs = {
|
||||
'fields': [
|
||||
{
|
||||
'id': 'url',
|
||||
'label': _('Centrify Tenant URL'),
|
||||
'type': 'string',
|
||||
'help_text': _('Centrify Tenant URL'),
|
||||
'format': 'url',
|
||||
},
|
||||
{
|
||||
'id': 'client_id',
|
||||
'label': _('Centrify API User'),
|
||||
'type': 'string',
|
||||
'help_text': _('Centrify API User, having necessary permissions as mentioned in support doc'),
|
||||
},
|
||||
{
|
||||
'id': 'client_password',
|
||||
'label': _('Centrify API Password'),
|
||||
'type': 'string',
|
||||
'help_text': _('Password of Centrify API User with necessary permissions'),
|
||||
'secret': True,
|
||||
},
|
||||
{
|
||||
'id': 'oauth_application_id',
|
||||
'label': _('OAuth2 Application ID'),
|
||||
'type': 'string',
|
||||
'help_text': _('Application ID of the configured OAuth2 Client (defaults to \'awx\')'),
|
||||
'default': 'awx',
|
||||
},
|
||||
{
|
||||
'id': 'oauth_scope',
|
||||
'label': _('OAuth2 Scope'),
|
||||
'type': 'string',
|
||||
'help_text': _('Scope of the configured OAuth2 Client (defaults to \'awx\')'),
|
||||
'default': 'awx',
|
||||
},
|
||||
],
|
||||
'metadata': [
|
||||
{
|
||||
'id': 'account-name',
|
||||
'label': _('Account Name'),
|
||||
'type': 'string',
|
||||
'help_text': _('Local system account or Domain account name enrolled in Centrify Vault. eg. (root or DOMAIN/Administrator)'),
|
||||
},
|
||||
{
|
||||
'id': 'system-name',
|
||||
'label': _('System Name'),
|
||||
'type': 'string',
|
||||
'help_text': _('Machine Name enrolled with in Centrify Portal'),
|
||||
},
|
||||
],
|
||||
'required': ['url', 'account-name', 'system-name', 'client_id', 'client_password'],
|
||||
}
|
||||
|
||||
|
||||
# generate bearer token to authenticate with PAS portal, Input : Client ID, Client Secret
|
||||
def handle_auth(**kwargs):
|
||||
post_data = {"grant_type": "client_credentials", "scope": kwargs['oauth_scope']}
|
||||
response = requests.post(kwargs['endpoint'], data=post_data, auth=(kwargs['client_id'], kwargs['client_password']), verify=True, timeout=(5, 30))
|
||||
raise_for_status(response)
|
||||
try:
|
||||
return response.json()['access_token']
|
||||
except KeyError:
|
||||
raise RuntimeError('OAuth request to tenant was unsuccessful')
|
||||
|
||||
|
||||
# fetch the ID of system with RedRock query, Input : System Name, Account Name
|
||||
def get_ID(**kwargs):
|
||||
endpoint = urljoin(kwargs['url'], '/Redrock/query')
|
||||
name = " Name='{0}' and User='{1}'".format(kwargs['system_name'], kwargs['acc_name'])
|
||||
query = 'Select ID from VaultAccount where {0}'.format(name)
|
||||
post_headers = {"Authorization": "Bearer " + kwargs['access_token'], "X-CENTRIFY-NATIVE-CLIENT": "true"}
|
||||
response = requests.post(endpoint, json={'Script': query}, headers=post_headers, verify=True, timeout=(5, 30))
|
||||
raise_for_status(response)
|
||||
try:
|
||||
result_str = response.json()["Result"]["Results"]
|
||||
return result_str[0]["Row"]["ID"]
|
||||
except (IndexError, KeyError):
|
||||
raise RuntimeError("Error Detected!! Check the Inputs")
|
||||
|
||||
|
||||
# CheckOut Password from Centrify Vault, Input : ID
|
||||
def get_passwd(**kwargs):
|
||||
endpoint = urljoin(kwargs['url'], '/ServerManage/CheckoutPassword')
|
||||
post_headers = {"Authorization": "Bearer " + kwargs['access_token'], "X-CENTRIFY-NATIVE-CLIENT": "true"}
|
||||
response = requests.post(endpoint, json={'ID': kwargs['acc_id']}, headers=post_headers, verify=True, timeout=(5, 30))
|
||||
raise_for_status(response)
|
||||
try:
|
||||
return response.json()["Result"]["Password"]
|
||||
except KeyError:
|
||||
raise RuntimeError("Password Not Found")
|
||||
|
||||
|
||||
def centrify_backend(**kwargs):
|
||||
url = kwargs.get('url')
|
||||
acc_name = kwargs.get('account-name')
|
||||
system_name = kwargs.get('system-name')
|
||||
client_id = kwargs.get('client_id')
|
||||
client_password = kwargs.get('client_password')
|
||||
app_id = kwargs.get('oauth_application_id', 'awx')
|
||||
endpoint = urljoin(url, f'/oauth2/token/{app_id}')
|
||||
endpoint = {'endpoint': endpoint, 'client_id': client_id, 'client_password': client_password, 'oauth_scope': kwargs.get('oauth_scope', 'awx')}
|
||||
token = handle_auth(**endpoint)
|
||||
get_id_args = {'system_name': system_name, 'acc_name': acc_name, 'url': url, 'access_token': token}
|
||||
acc_id = get_ID(**get_id_args)
|
||||
get_pwd_args = {'url': url, 'acc_id': acc_id, 'access_token': token}
|
||||
return get_passwd(**get_pwd_args)
|
||||
|
||||
|
||||
centrify_plugin = CredentialPlugin('Centrify Vault Credential Provider Lookup', inputs=pas_inputs, backend=centrify_backend)
|
||||
@@ -1,112 +0,0 @@
|
||||
from .plugin import CredentialPlugin, CertFiles, raise_for_status
|
||||
|
||||
from urllib.parse import urljoin, quote
|
||||
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
import requests
|
||||
import base64
|
||||
import binascii
|
||||
|
||||
|
||||
conjur_inputs = {
|
||||
'fields': [
|
||||
{
|
||||
'id': 'url',
|
||||
'label': _('Conjur URL'),
|
||||
'type': 'string',
|
||||
'format': 'url',
|
||||
},
|
||||
{
|
||||
'id': 'api_key',
|
||||
'label': _('API Key'),
|
||||
'type': 'string',
|
||||
'secret': True,
|
||||
},
|
||||
{
|
||||
'id': 'account',
|
||||
'label': _('Account'),
|
||||
'type': 'string',
|
||||
},
|
||||
{
|
||||
'id': 'username',
|
||||
'label': _('Username'),
|
||||
'type': 'string',
|
||||
},
|
||||
{'id': 'cacert', 'label': _('Public Key Certificate'), 'type': 'string', 'multiline': True},
|
||||
],
|
||||
'metadata': [
|
||||
{
|
||||
'id': 'secret_path',
|
||||
'label': _('Secret Identifier'),
|
||||
'type': 'string',
|
||||
'help_text': _('The identifier for the secret e.g., /some/identifier'),
|
||||
},
|
||||
{
|
||||
'id': 'secret_version',
|
||||
'label': _('Secret Version'),
|
||||
'type': 'string',
|
||||
'help_text': _('Used to specify a specific secret version (if left empty, the latest version will be used).'),
|
||||
},
|
||||
],
|
||||
'required': ['url', 'api_key', 'account', 'username'],
|
||||
}
|
||||
|
||||
|
||||
def _is_base64(s: str) -> bool:
|
||||
try:
|
||||
return base64.b64encode(base64.b64decode(s.encode("utf-8"))) == s.encode("utf-8")
|
||||
except binascii.Error:
|
||||
return False
|
||||
|
||||
|
||||
def conjur_backend(**kwargs):
|
||||
url = kwargs['url']
|
||||
api_key = kwargs['api_key']
|
||||
account = quote(kwargs['account'], safe='')
|
||||
username = quote(kwargs['username'], safe='')
|
||||
secret_path = quote(kwargs['secret_path'], safe='')
|
||||
version = kwargs.get('secret_version')
|
||||
cacert = kwargs.get('cacert', None)
|
||||
|
||||
auth_kwargs = {
|
||||
'headers': {'Content-Type': 'text/plain', 'Accept-Encoding': 'base64'},
|
||||
'data': api_key,
|
||||
'allow_redirects': False,
|
||||
}
|
||||
|
||||
with CertFiles(cacert) as cert:
|
||||
# https://www.conjur.org/api.html#authentication-authenticate-post
|
||||
auth_kwargs['verify'] = cert
|
||||
try:
|
||||
resp = requests.post(urljoin(url, '/'.join(['authn', account, username, 'authenticate'])), **auth_kwargs)
|
||||
resp.raise_for_status()
|
||||
except requests.exceptions.HTTPError:
|
||||
resp = requests.post(urljoin(url, '/'.join(['api', 'authn', account, username, 'authenticate'])), **auth_kwargs)
|
||||
raise_for_status(resp)
|
||||
token = resp.content.decode('utf-8')
|
||||
|
||||
lookup_kwargs = {
|
||||
'headers': {'Authorization': 'Token token="{}"'.format(token if _is_base64(token) else base64.b64encode(token.encode('utf-8')).decode('utf-8'))},
|
||||
'allow_redirects': False,
|
||||
}
|
||||
|
||||
# https://www.conjur.org/api.html#secrets-retrieve-a-secret-get
|
||||
path = urljoin(url, '/'.join(['secrets', account, 'variable', secret_path]))
|
||||
path_conjurcloud = urljoin(url, '/'.join(['api', 'secrets', account, 'variable', secret_path]))
|
||||
if version:
|
||||
ver = "version={}".format(version)
|
||||
path = '?'.join([path, ver])
|
||||
path_conjurcloud = '?'.join([path_conjurcloud, ver])
|
||||
|
||||
with CertFiles(cacert) as cert:
|
||||
lookup_kwargs['verify'] = cert
|
||||
try:
|
||||
resp = requests.get(path, timeout=30, **lookup_kwargs)
|
||||
resp.raise_for_status()
|
||||
except requests.exceptions.HTTPError:
|
||||
resp = requests.get(path_conjurcloud, timeout=30, **lookup_kwargs)
|
||||
raise_for_status(resp)
|
||||
return resp.text
|
||||
|
||||
|
||||
conjur_plugin = CredentialPlugin('CyberArk Conjur Secrets Manager Lookup', inputs=conjur_inputs, backend=conjur_backend)
|
||||
@@ -1,94 +0,0 @@
|
||||
from .plugin import CredentialPlugin
|
||||
|
||||
from django.conf import settings
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
from delinea.secrets.vault import PasswordGrantAuthorizer, SecretsVault
|
||||
from base64 import b64decode
|
||||
|
||||
dsv_inputs = {
|
||||
'fields': [
|
||||
{
|
||||
'id': 'tenant',
|
||||
'label': _('Tenant'),
|
||||
'help_text': _('The tenant e.g. "ex" when the URL is https://ex.secretsvaultcloud.com'),
|
||||
'type': 'string',
|
||||
},
|
||||
{
|
||||
'id': 'tld',
|
||||
'label': _('Top-level Domain (TLD)'),
|
||||
'help_text': _('The TLD of the tenant e.g. "com" when the URL is https://ex.secretsvaultcloud.com'),
|
||||
'choices': ['ca', 'com', 'com.au', 'eu'],
|
||||
'default': 'com',
|
||||
},
|
||||
{
|
||||
'id': 'client_id',
|
||||
'label': _('Client ID'),
|
||||
'type': 'string',
|
||||
},
|
||||
{
|
||||
'id': 'client_secret',
|
||||
'label': _('Client Secret'),
|
||||
'type': 'string',
|
||||
'secret': True,
|
||||
},
|
||||
],
|
||||
'metadata': [
|
||||
{
|
||||
'id': 'path',
|
||||
'label': _('Secret Path'),
|
||||
'type': 'string',
|
||||
'help_text': _('The secret path e.g. /test/secret1'),
|
||||
},
|
||||
{
|
||||
'id': 'secret_field',
|
||||
'label': _('Secret Field'),
|
||||
'help_text': _('The field to extract from the secret'),
|
||||
'type': 'string',
|
||||
},
|
||||
{
|
||||
'id': 'secret_decoding',
|
||||
'label': _('Should the secret be base64 decoded?'),
|
||||
'help_text': _('Specify whether the secret should be base64 decoded, typically used for storing files, such as SSH keys'),
|
||||
'choices': ['No Decoding', 'Decode Base64'],
|
||||
'type': 'string',
|
||||
'default': 'No Decoding',
|
||||
},
|
||||
],
|
||||
'required': ['tenant', 'client_id', 'client_secret', 'path', 'secret_field', 'secret_decoding'],
|
||||
}
|
||||
|
||||
if settings.DEBUG:
|
||||
dsv_inputs['fields'].append(
|
||||
{
|
||||
'id': 'url_template',
|
||||
'label': _('URL template'),
|
||||
'type': 'string',
|
||||
'default': 'https://{}.secretsvaultcloud.{}',
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
def dsv_backend(**kwargs):
|
||||
tenant_name = kwargs['tenant']
|
||||
tenant_tld = kwargs.get('tld', 'com')
|
||||
tenant_url_template = kwargs.get('url_template', 'https://{}.secretsvaultcloud.{}')
|
||||
client_id = kwargs['client_id']
|
||||
client_secret = kwargs['client_secret']
|
||||
secret_path = kwargs['path']
|
||||
secret_field = kwargs['secret_field']
|
||||
# providing a default value to remain backward compatible for secrets that have not specified this option
|
||||
secret_decoding = kwargs.get('secret_decoding', 'No Decoding')
|
||||
|
||||
tenant_url = tenant_url_template.format(tenant_name, tenant_tld.strip("."))
|
||||
|
||||
authorizer = PasswordGrantAuthorizer(tenant_url, client_id, client_secret)
|
||||
dsv_secret = SecretsVault(tenant_url, authorizer).get_secret(secret_path)
|
||||
|
||||
# files can be uploaded base64 decoded to DSV and thus decoding it only, when asked for
|
||||
if secret_decoding == 'Decode Base64':
|
||||
return b64decode(dsv_secret['data'][secret_field]).decode()
|
||||
|
||||
return dsv_secret['data'][secret_field]
|
||||
|
||||
|
||||
dsv_plugin = CredentialPlugin(name='Thycotic DevOps Secrets Vault', inputs=dsv_inputs, backend=dsv_backend)
|
||||
@@ -1,384 +0,0 @@
|
||||
import copy
|
||||
import os
|
||||
import pathlib
|
||||
import time
|
||||
from urllib.parse import urljoin
|
||||
|
||||
from .plugin import CredentialPlugin, CertFiles, raise_for_status
|
||||
|
||||
import requests
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
|
||||
base_inputs = {
|
||||
'fields': [
|
||||
{
|
||||
'id': 'url',
|
||||
'label': _('Server URL'),
|
||||
'type': 'string',
|
||||
'format': 'url',
|
||||
'help_text': _('The URL to the HashiCorp Vault'),
|
||||
},
|
||||
{
|
||||
'id': 'token',
|
||||
'label': _('Token'),
|
||||
'type': 'string',
|
||||
'secret': True,
|
||||
'help_text': _('The access token used to authenticate to the Vault server'),
|
||||
},
|
||||
{
|
||||
'id': 'cacert',
|
||||
'label': _('CA Certificate'),
|
||||
'type': 'string',
|
||||
'multiline': True,
|
||||
'help_text': _('The CA certificate used to verify the SSL certificate of the Vault server'),
|
||||
},
|
||||
{'id': 'role_id', 'label': _('AppRole role_id'), 'type': 'string', 'multiline': False, 'help_text': _('The Role ID for AppRole Authentication')},
|
||||
{
|
||||
'id': 'secret_id',
|
||||
'label': _('AppRole secret_id'),
|
||||
'type': 'string',
|
||||
'multiline': False,
|
||||
'secret': True,
|
||||
'help_text': _('The Secret ID for AppRole Authentication'),
|
||||
},
|
||||
{
|
||||
'id': 'client_cert_public',
|
||||
'label': _('Client Certificate'),
|
||||
'type': 'string',
|
||||
'multiline': True,
|
||||
'help_text': _(
|
||||
'The PEM-encoded client certificate used for TLS client authentication.'
|
||||
' This should include the certificate and any intermediate certififcates.'
|
||||
),
|
||||
},
|
||||
{
|
||||
'id': 'client_cert_private',
|
||||
'label': _('Client Certificate Key'),
|
||||
'type': 'string',
|
||||
'multiline': True,
|
||||
'secret': True,
|
||||
'help_text': _('The certificate private key used for TLS client authentication.'),
|
||||
},
|
||||
{
|
||||
'id': 'client_cert_role',
|
||||
'label': _('TLS Authentication Role'),
|
||||
'type': 'string',
|
||||
'multiline': False,
|
||||
'help_text': _(
|
||||
'The role configured in Hashicorp Vault for TLS client authentication.'
|
||||
' If not provided, Hashicorp Vault may assign roles based on the certificate used.'
|
||||
),
|
||||
},
|
||||
{
|
||||
'id': 'namespace',
|
||||
'label': _('Namespace name (Vault Enterprise only)'),
|
||||
'type': 'string',
|
||||
'multiline': False,
|
||||
'help_text': _('Name of the namespace to use when authenticate and retrieve secrets'),
|
||||
},
|
||||
{
|
||||
'id': 'kubernetes_role',
|
||||
'label': _('Kubernetes role'),
|
||||
'type': 'string',
|
||||
'multiline': False,
|
||||
'help_text': _(
|
||||
'The Role for Kubernetes Authentication.'
|
||||
' This is the named role, configured in Vault server, for AWX pod auth policies.'
|
||||
' see https://www.vaultproject.io/docs/auth/kubernetes#configuration'
|
||||
),
|
||||
},
|
||||
{
|
||||
'id': 'username',
|
||||
'label': _('Username'),
|
||||
'type': 'string',
|
||||
'secret': False,
|
||||
'help_text': _('Username for user authentication.'),
|
||||
},
|
||||
{
|
||||
'id': 'password',
|
||||
'label': _('Password'),
|
||||
'type': 'string',
|
||||
'secret': True,
|
||||
'help_text': _('Password for user authentication.'),
|
||||
},
|
||||
{
|
||||
'id': 'default_auth_path',
|
||||
'label': _('Path to Auth'),
|
||||
'type': 'string',
|
||||
'multiline': False,
|
||||
'default': 'approle',
|
||||
'help_text': _('The Authentication path to use if one isn\'t provided in the metadata when linking to an input field. Defaults to \'approle\''),
|
||||
},
|
||||
],
|
||||
'metadata': [
|
||||
{
|
||||
'id': 'secret_path',
|
||||
'label': _('Path to Secret'),
|
||||
'type': 'string',
|
||||
'help_text': _(
|
||||
(
|
||||
'The path to the secret stored in the secret backend e.g, /some/secret/. It is recommended'
|
||||
' that you use the secret backend field to identify the storage backend and to use this field'
|
||||
' for locating a specific secret within that store. However, if you prefer to fully identify'
|
||||
' both the secret backend and one of its secrets using only this field, join their locations'
|
||||
' into a single path without any additional separators, e.g, /location/of/backend/some/secret.'
|
||||
)
|
||||
),
|
||||
},
|
||||
{
|
||||
'id': 'auth_path',
|
||||
'label': _('Path to Auth'),
|
||||
'type': 'string',
|
||||
'multiline': False,
|
||||
'help_text': _('The path where the Authentication method is mounted e.g, approle'),
|
||||
},
|
||||
],
|
||||
'required': ['url', 'secret_path'],
|
||||
}
|
||||
|
||||
hashi_kv_inputs = copy.deepcopy(base_inputs)
|
||||
hashi_kv_inputs['fields'].append(
|
||||
{
|
||||
'id': 'api_version',
|
||||
'label': _('API Version'),
|
||||
'choices': ['v1', 'v2'],
|
||||
'help_text': _('API v1 is for static key/value lookups. API v2 is for versioned key/value lookups.'),
|
||||
'default': 'v1',
|
||||
}
|
||||
)
|
||||
hashi_kv_inputs['metadata'] = (
|
||||
[
|
||||
{
|
||||
'id': 'secret_backend',
|
||||
'label': _('Name of Secret Backend'),
|
||||
'type': 'string',
|
||||
'help_text': _('The name of the kv secret backend (if left empty, the first segment of the secret path will be used).'),
|
||||
}
|
||||
]
|
||||
+ hashi_kv_inputs['metadata']
|
||||
+ [
|
||||
{
|
||||
'id': 'secret_key',
|
||||
'label': _('Key Name'),
|
||||
'type': 'string',
|
||||
'help_text': _('The name of the key to look up in the secret.'),
|
||||
},
|
||||
{
|
||||
'id': 'secret_version',
|
||||
'label': _('Secret Version (v2 only)'),
|
||||
'type': 'string',
|
||||
'help_text': _('Used to specify a specific secret version (if left empty, the latest version will be used).'),
|
||||
},
|
||||
]
|
||||
)
|
||||
hashi_kv_inputs['required'].extend(['api_version', 'secret_key'])
|
||||
|
||||
hashi_ssh_inputs = copy.deepcopy(base_inputs)
|
||||
hashi_ssh_inputs['metadata'] = (
|
||||
[
|
||||
{
|
||||
'id': 'public_key',
|
||||
'label': _('Unsigned Public Key'),
|
||||
'type': 'string',
|
||||
'multiline': True,
|
||||
}
|
||||
]
|
||||
+ hashi_ssh_inputs['metadata']
|
||||
+ [
|
||||
{'id': 'role', 'label': _('Role Name'), 'type': 'string', 'help_text': _('The name of the role used to sign.')},
|
||||
{
|
||||
'id': 'valid_principals',
|
||||
'label': _('Valid Principals'),
|
||||
'type': 'string',
|
||||
'help_text': _('Valid principals (either usernames or hostnames) that the certificate should be signed for.'),
|
||||
},
|
||||
]
|
||||
)
|
||||
hashi_ssh_inputs['required'].extend(['public_key', 'role'])
|
||||
|
||||
|
||||
def handle_auth(**kwargs):
|
||||
token = None
|
||||
if kwargs.get('token'):
|
||||
token = kwargs['token']
|
||||
elif kwargs.get('username') and kwargs.get('password'):
|
||||
token = method_auth(**kwargs, auth_param=userpass_auth(**kwargs))
|
||||
elif kwargs.get('role_id') and kwargs.get('secret_id'):
|
||||
token = method_auth(**kwargs, auth_param=approle_auth(**kwargs))
|
||||
elif kwargs.get('kubernetes_role'):
|
||||
token = method_auth(**kwargs, auth_param=kubernetes_auth(**kwargs))
|
||||
elif kwargs.get('client_cert_public') and kwargs.get('client_cert_private'):
|
||||
token = method_auth(**kwargs, auth_param=client_cert_auth(**kwargs))
|
||||
else:
|
||||
raise Exception('Token, Username/Password, AppRole, Kubernetes, or TLS authentication parameters must be set')
|
||||
return token
|
||||
|
||||
|
||||
def userpass_auth(**kwargs):
|
||||
return {'username': kwargs['username'], 'password': kwargs['password']}
|
||||
|
||||
|
||||
def approle_auth(**kwargs):
|
||||
return {'role_id': kwargs['role_id'], 'secret_id': kwargs['secret_id']}
|
||||
|
||||
|
||||
def kubernetes_auth(**kwargs):
|
||||
jwt_file = pathlib.Path('/var/run/secrets/kubernetes.io/serviceaccount/token')
|
||||
with jwt_file.open('r') as jwt_fo:
|
||||
jwt = jwt_fo.read().rstrip()
|
||||
return {'role': kwargs['kubernetes_role'], 'jwt': jwt}
|
||||
|
||||
|
||||
def client_cert_auth(**kwargs):
|
||||
return {'name': kwargs.get('client_cert_role')}
|
||||
|
||||
|
||||
def method_auth(**kwargs):
|
||||
# get auth method specific params
|
||||
request_kwargs = {'json': kwargs['auth_param'], 'timeout': 30}
|
||||
|
||||
# we first try to use the 'auth_path' from the metadata
|
||||
# if not found we try to fetch the 'default_auth_path' from inputs
|
||||
auth_path = kwargs.get('auth_path') or kwargs['default_auth_path']
|
||||
|
||||
url = urljoin(kwargs['url'], 'v1')
|
||||
cacert = kwargs.get('cacert', None)
|
||||
|
||||
sess = requests.Session()
|
||||
sess.mount(url, requests.adapters.HTTPAdapter(max_retries=5))
|
||||
|
||||
# Namespace support
|
||||
if kwargs.get('namespace'):
|
||||
sess.headers['X-Vault-Namespace'] = kwargs['namespace']
|
||||
request_url = '/'.join([url, 'auth', auth_path, 'login']).rstrip('/')
|
||||
if kwargs['auth_param'].get('username'):
|
||||
request_url = request_url + '/' + (kwargs['username'])
|
||||
with CertFiles(cacert) as cert:
|
||||
request_kwargs['verify'] = cert
|
||||
# TLS client certificate support
|
||||
if kwargs.get('client_cert_public') and kwargs.get('client_cert_private'):
|
||||
# Add client cert to requests Session before making call
|
||||
with CertFiles(kwargs['client_cert_public'], key=kwargs['client_cert_private']) as client_cert:
|
||||
sess.cert = client_cert
|
||||
resp = sess.post(request_url, **request_kwargs)
|
||||
else:
|
||||
# Make call without client certificate
|
||||
resp = sess.post(request_url, **request_kwargs)
|
||||
resp.raise_for_status()
|
||||
token = resp.json()['auth']['client_token']
|
||||
return token
|
||||
|
||||
|
||||
def kv_backend(**kwargs):
|
||||
token = handle_auth(**kwargs)
|
||||
url = kwargs['url']
|
||||
secret_path = kwargs['secret_path']
|
||||
secret_backend = kwargs.get('secret_backend', None)
|
||||
secret_key = kwargs.get('secret_key', None)
|
||||
cacert = kwargs.get('cacert', None)
|
||||
api_version = kwargs['api_version']
|
||||
|
||||
request_kwargs = {
|
||||
'timeout': 30,
|
||||
'allow_redirects': False,
|
||||
}
|
||||
|
||||
sess = requests.Session()
|
||||
sess.mount(url, requests.adapters.HTTPAdapter(max_retries=5))
|
||||
sess.headers['Authorization'] = 'Bearer {}'.format(token)
|
||||
# Compatibility header for older installs of Hashicorp Vault
|
||||
sess.headers['X-Vault-Token'] = token
|
||||
if kwargs.get('namespace'):
|
||||
sess.headers['X-Vault-Namespace'] = kwargs['namespace']
|
||||
|
||||
if api_version == 'v2':
|
||||
if kwargs.get('secret_version'):
|
||||
request_kwargs['params'] = {'version': kwargs['secret_version']}
|
||||
if secret_backend:
|
||||
path_segments = [secret_backend, 'data', secret_path]
|
||||
else:
|
||||
try:
|
||||
mount_point, *path = pathlib.Path(secret_path.lstrip(os.sep)).parts
|
||||
'/'.join(path)
|
||||
except Exception:
|
||||
mount_point, path = secret_path, []
|
||||
# https://www.vaultproject.io/api/secret/kv/kv-v2.html#read-secret-version
|
||||
path_segments = [mount_point, 'data'] + path
|
||||
else:
|
||||
if secret_backend:
|
||||
path_segments = [secret_backend, secret_path]
|
||||
else:
|
||||
path_segments = [secret_path]
|
||||
|
||||
request_url = urljoin(url, '/'.join(['v1'] + path_segments)).rstrip('/')
|
||||
with CertFiles(cacert) as cert:
|
||||
request_kwargs['verify'] = cert
|
||||
request_retries = 0
|
||||
while request_retries < 5:
|
||||
response = sess.get(request_url, **request_kwargs)
|
||||
# https://developer.hashicorp.com/vault/docs/enterprise/consistency
|
||||
if response.status_code == 412:
|
||||
request_retries += 1
|
||||
time.sleep(1)
|
||||
else:
|
||||
break
|
||||
raise_for_status(response)
|
||||
|
||||
json = response.json()
|
||||
if api_version == 'v2':
|
||||
json = json['data']
|
||||
|
||||
if secret_key:
|
||||
try:
|
||||
if (secret_key != 'data') and (secret_key not in json['data']) and ('data' in json['data']):
|
||||
return json['data']['data'][secret_key]
|
||||
return json['data'][secret_key]
|
||||
except KeyError:
|
||||
raise RuntimeError('{} is not present at {}'.format(secret_key, secret_path))
|
||||
return json['data']
|
||||
|
||||
|
||||
def ssh_backend(**kwargs):
|
||||
token = handle_auth(**kwargs)
|
||||
url = urljoin(kwargs['url'], 'v1')
|
||||
secret_path = kwargs['secret_path']
|
||||
role = kwargs['role']
|
||||
cacert = kwargs.get('cacert', None)
|
||||
|
||||
request_kwargs = {
|
||||
'timeout': 30,
|
||||
'allow_redirects': False,
|
||||
}
|
||||
|
||||
request_kwargs['json'] = {'public_key': kwargs['public_key']}
|
||||
if kwargs.get('valid_principals'):
|
||||
request_kwargs['json']['valid_principals'] = kwargs['valid_principals']
|
||||
|
||||
sess = requests.Session()
|
||||
sess.mount(url, requests.adapters.HTTPAdapter(max_retries=5))
|
||||
sess.headers['Authorization'] = 'Bearer {}'.format(token)
|
||||
if kwargs.get('namespace'):
|
||||
sess.headers['X-Vault-Namespace'] = kwargs['namespace']
|
||||
# Compatability header for older installs of Hashicorp Vault
|
||||
sess.headers['X-Vault-Token'] = token
|
||||
# https://www.vaultproject.io/api/secret/ssh/index.html#sign-ssh-key
|
||||
request_url = '/'.join([url, secret_path, 'sign', role]).rstrip('/')
|
||||
|
||||
with CertFiles(cacert) as cert:
|
||||
request_kwargs['verify'] = cert
|
||||
request_retries = 0
|
||||
while request_retries < 5:
|
||||
resp = sess.post(request_url, **request_kwargs)
|
||||
# https://developer.hashicorp.com/vault/docs/enterprise/consistency
|
||||
if resp.status_code == 412:
|
||||
request_retries += 1
|
||||
time.sleep(1)
|
||||
else:
|
||||
break
|
||||
raise_for_status(resp)
|
||||
return resp.json()['data']['signed_key']
|
||||
|
||||
|
||||
hashivault_kv_plugin = CredentialPlugin('HashiCorp Vault Secret Lookup', inputs=hashi_kv_inputs, backend=kv_backend)
|
||||
|
||||
hashivault_ssh_plugin = CredentialPlugin('HashiCorp Vault Signed SSH', inputs=hashi_ssh_inputs, backend=ssh_backend)
|
||||
@@ -1,55 +0,0 @@
|
||||
import os
|
||||
import tempfile
|
||||
|
||||
from collections import namedtuple
|
||||
|
||||
from requests.exceptions import HTTPError
|
||||
|
||||
CredentialPlugin = namedtuple('CredentialPlugin', ['name', 'inputs', 'backend'])
|
||||
|
||||
|
||||
def raise_for_status(resp):
|
||||
resp.raise_for_status()
|
||||
if resp.status_code >= 300:
|
||||
exc = HTTPError()
|
||||
setattr(exc, 'response', resp)
|
||||
raise exc
|
||||
|
||||
|
||||
class CertFiles:
|
||||
"""
|
||||
A context manager used for writing a certificate and (optional) key
|
||||
to $TMPDIR, and cleaning up afterwards.
|
||||
|
||||
This is particularly useful as a shared resource for credential plugins
|
||||
that want to pull cert/key data out of the database and persist it
|
||||
temporarily to the file system so that it can loaded into the openssl
|
||||
certificate chain (generally, for HTTPS requests plugins make via the
|
||||
Python requests library)
|
||||
|
||||
with CertFiles(cert_data, key_data) as cert:
|
||||
# cert is string representing a path to the cert or pemfile
|
||||
# temporarily written to disk
|
||||
requests.post(..., cert=cert)
|
||||
"""
|
||||
|
||||
certfile = None
|
||||
|
||||
def __init__(self, cert, key=None):
|
||||
self.cert = cert
|
||||
self.key = key
|
||||
|
||||
def __enter__(self):
|
||||
if not self.cert:
|
||||
return None
|
||||
self.certfile = tempfile.NamedTemporaryFile('wb', delete=False)
|
||||
self.certfile.write(self.cert.encode())
|
||||
if self.key:
|
||||
self.certfile.write(b'\n')
|
||||
self.certfile.write(self.key.encode())
|
||||
self.certfile.flush()
|
||||
return str(self.certfile.name)
|
||||
|
||||
def __exit__(self, *args):
|
||||
if self.certfile and os.path.exists(self.certfile.name):
|
||||
os.remove(self.certfile.name)
|
||||
@@ -1,76 +0,0 @@
|
||||
from .plugin import CredentialPlugin
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
|
||||
try:
|
||||
from delinea.secrets.server import DomainPasswordGrantAuthorizer, PasswordGrantAuthorizer, SecretServer, ServerSecret
|
||||
except ImportError:
|
||||
from thycotic.secrets.server import DomainPasswordGrantAuthorizer, PasswordGrantAuthorizer, SecretServer, ServerSecret
|
||||
|
||||
tss_inputs = {
|
||||
'fields': [
|
||||
{
|
||||
'id': 'server_url',
|
||||
'label': _('Secret Server URL'),
|
||||
'help_text': _('The Base URL of Secret Server e.g. https://myserver/SecretServer or https://mytenant.secretservercloud.com'),
|
||||
'type': 'string',
|
||||
},
|
||||
{
|
||||
'id': 'username',
|
||||
'label': _('Username'),
|
||||
'help_text': _('The (Application) user username'),
|
||||
'type': 'string',
|
||||
},
|
||||
{
|
||||
'id': 'domain',
|
||||
'label': _('Domain'),
|
||||
'help_text': _('The (Application) user domain'),
|
||||
'type': 'string',
|
||||
},
|
||||
{
|
||||
'id': 'password',
|
||||
'label': _('Password'),
|
||||
'help_text': _('The corresponding password'),
|
||||
'type': 'string',
|
||||
'secret': True,
|
||||
},
|
||||
],
|
||||
'metadata': [
|
||||
{
|
||||
'id': 'secret_id',
|
||||
'label': _('Secret ID'),
|
||||
'help_text': _('The integer ID of the secret'),
|
||||
'type': 'string',
|
||||
},
|
||||
{
|
||||
'id': 'secret_field',
|
||||
'label': _('Secret Field'),
|
||||
'help_text': _('The field to extract from the secret'),
|
||||
'type': 'string',
|
||||
},
|
||||
],
|
||||
'required': ['server_url', 'username', 'password', 'secret_id', 'secret_field'],
|
||||
}
|
||||
|
||||
|
||||
def tss_backend(**kwargs):
|
||||
if kwargs.get("domain"):
|
||||
authorizer = DomainPasswordGrantAuthorizer(
|
||||
base_url=kwargs['server_url'], username=kwargs['username'], domain=kwargs['domain'], password=kwargs['password']
|
||||
)
|
||||
else:
|
||||
authorizer = PasswordGrantAuthorizer(kwargs['server_url'], kwargs['username'], kwargs['password'])
|
||||
secret_server = SecretServer(kwargs['server_url'], authorizer)
|
||||
secret_dict = secret_server.get_secret(kwargs['secret_id'])
|
||||
secret = ServerSecret(**secret_dict)
|
||||
|
||||
if isinstance(secret.fields[kwargs['secret_field']].value, str) == False:
|
||||
return secret.fields[kwargs['secret_field']].value.text
|
||||
else:
|
||||
return secret.fields[kwargs['secret_field']].value
|
||||
|
||||
|
||||
tss_plugin = CredentialPlugin(
|
||||
'Thycotic Secret Server',
|
||||
tss_inputs,
|
||||
tss_backend,
|
||||
)
|
||||
@@ -1,9 +1,9 @@
|
||||
import os
|
||||
import pkg_resources
|
||||
import sqlite3
|
||||
import sys
|
||||
import traceback
|
||||
import uuid
|
||||
from importlib.metadata import version as _get_version
|
||||
|
||||
from django.core.cache import cache
|
||||
from django.core.cache.backends.locmem import LocMemCache
|
||||
@@ -70,7 +70,7 @@ class RecordedQueryLog(object):
|
||||
else:
|
||||
progname = os.path.basename(sys.argv[0])
|
||||
filepath = os.path.join(self.dest, '{}.sqlite'.format(progname))
|
||||
version = pkg_resources.get_distribution('awx').version
|
||||
version = _get_version('awx')
|
||||
log = sqlite3.connect(filepath, timeout=3)
|
||||
log.execute(
|
||||
'CREATE TABLE IF NOT EXISTS queries ('
|
||||
|
||||
@@ -72,8 +72,8 @@ class PubSub(object):
|
||||
ns = conn.wait(psycopg.generators.notifies(conn.pgconn))
|
||||
except psycopg.errors._NO_TRACEBACK as ex:
|
||||
raise ex.with_traceback(None)
|
||||
enc = psycopg._encodings.pgconn_encoding(conn.pgconn)
|
||||
for pgn in ns:
|
||||
enc = conn.pgconn._encoding
|
||||
n = psycopg.connection.Notify(pgn.relname.decode(enc), pgn.extra.decode(enc), pgn.be_pid)
|
||||
yield n
|
||||
|
||||
|
||||
53
awx/main/dispatch/config.py
Normal file
53
awx/main/dispatch/config.py
Normal file
@@ -0,0 +1,53 @@
|
||||
from django.conf import settings
|
||||
|
||||
from ansible_base.lib.utils.db import get_pg_notify_params
|
||||
from awx.main.dispatch import get_task_queuename
|
||||
from awx.main.dispatch.pool import get_auto_max_workers
|
||||
|
||||
|
||||
def get_dispatcherd_config(for_service: bool = False, mock_publish: bool = False) -> dict:
|
||||
"""Return a dictionary config for dispatcherd
|
||||
|
||||
Parameters:
|
||||
for_service: if True, include dynamic options needed for running the dispatcher service
|
||||
this will require database access, you should delay evaluation until after app setup
|
||||
"""
|
||||
config = {
|
||||
"version": 2,
|
||||
"service": {
|
||||
"pool_kwargs": {
|
||||
"min_workers": settings.JOB_EVENT_WORKERS,
|
||||
"max_workers": get_auto_max_workers(),
|
||||
},
|
||||
"main_kwargs": {"node_id": settings.CLUSTER_HOST_ID},
|
||||
"process_manager_cls": "ForkServerManager",
|
||||
"process_manager_kwargs": {"preload_modules": ['awx.main.dispatch.hazmat']},
|
||||
},
|
||||
"brokers": {
|
||||
"socket": {"socket_path": settings.DISPATCHERD_DEBUGGING_SOCKFILE},
|
||||
},
|
||||
"publish": {"default_control_broker": "socket"},
|
||||
"worker": {"worker_cls": "awx.main.dispatch.worker.dispatcherd.AWXTaskWorker"},
|
||||
}
|
||||
|
||||
if mock_publish:
|
||||
config["brokers"]["noop"] = {}
|
||||
config["publish"]["default_broker"] = "noop"
|
||||
else:
|
||||
config["brokers"]["pg_notify"] = {
|
||||
"config": get_pg_notify_params(),
|
||||
"sync_connection_factory": "ansible_base.lib.utils.db.psycopg_connection_from_django",
|
||||
"default_publish_channel": settings.CLUSTER_HOST_ID, # used for debugging commands
|
||||
}
|
||||
config["publish"]["default_broker"] = "pg_notify"
|
||||
|
||||
if for_service:
|
||||
config["producers"] = {
|
||||
"ScheduledProducer": {"task_schedule": settings.DISPATCHER_SCHEDULE},
|
||||
"OnStartProducer": {"task_list": {"awx.main.tasks.system.dispatch_startup": {}}},
|
||||
"ControlProducer": {},
|
||||
}
|
||||
|
||||
config["brokers"]["pg_notify"]["channels"] = ['tower_broadcast_all', 'tower_settings_change', get_task_queuename()]
|
||||
|
||||
return config
|
||||
36
awx/main/dispatch/hazmat.py
Normal file
36
awx/main/dispatch/hazmat.py
Normal file
@@ -0,0 +1,36 @@
|
||||
import django
|
||||
|
||||
# dispatcherd publisher logic is likely to be used, but needs manual preload
|
||||
from dispatcherd.brokers import pg_notify # noqa
|
||||
|
||||
# Cache may not be initialized until we are in the worker, so preload here
|
||||
from channels_redis import core # noqa
|
||||
|
||||
from awx import prepare_env
|
||||
|
||||
from dispatcherd.utils import resolve_callable
|
||||
|
||||
|
||||
prepare_env()
|
||||
|
||||
django.setup() # noqa
|
||||
|
||||
|
||||
from django.conf import settings
|
||||
|
||||
|
||||
# Preload all periodic tasks so their imports will be in shared memory
|
||||
for name, options in settings.CELERYBEAT_SCHEDULE.items():
|
||||
resolve_callable(options['task'])
|
||||
|
||||
|
||||
# Preload in-line import from tasks
|
||||
from awx.main.scheduler.kubernetes import PodManager # noqa
|
||||
|
||||
|
||||
from django.core.cache import cache as django_cache
|
||||
from django.db import connection
|
||||
|
||||
|
||||
connection.close()
|
||||
django_cache.close()
|
||||
@@ -88,8 +88,10 @@ class Scheduler:
|
||||
# internally times are all referenced relative to startup time, add grace period
|
||||
self.global_start = time.time() + 2.0
|
||||
|
||||
def get_and_mark_pending(self):
|
||||
relative_time = time.time() - self.global_start
|
||||
def get_and_mark_pending(self, reftime=None):
|
||||
if reftime is None:
|
||||
reftime = time.time() # mostly for tests
|
||||
relative_time = reftime - self.global_start
|
||||
to_run = []
|
||||
for job in self.jobs:
|
||||
if job.due_to_run(relative_time):
|
||||
@@ -98,8 +100,10 @@ class Scheduler:
|
||||
job.mark_run(relative_time)
|
||||
return to_run
|
||||
|
||||
def time_until_next_run(self):
|
||||
relative_time = time.time() - self.global_start
|
||||
def time_until_next_run(self, reftime=None):
|
||||
if reftime is None:
|
||||
reftime = time.time() # mostly for tests
|
||||
relative_time = reftime - self.global_start
|
||||
next_job = min(self.jobs, key=lambda j: j.next_run)
|
||||
delta = next_job.next_run - relative_time
|
||||
if delta <= 0.1:
|
||||
@@ -115,10 +119,11 @@ class Scheduler:
|
||||
def debug(self, *args, **kwargs):
|
||||
data = dict()
|
||||
data['title'] = 'Scheduler status'
|
||||
reftime = time.time()
|
||||
|
||||
now = datetime.fromtimestamp(time.time()).strftime('%Y-%m-%d %H:%M:%S UTC')
|
||||
now = datetime.fromtimestamp(reftime).strftime('%Y-%m-%d %H:%M:%S UTC')
|
||||
start_time = datetime.fromtimestamp(self.global_start).strftime('%Y-%m-%d %H:%M:%S UTC')
|
||||
relative_time = time.time() - self.global_start
|
||||
relative_time = reftime - self.global_start
|
||||
data['started_time'] = start_time
|
||||
data['current_time'] = now
|
||||
data['current_time_relative'] = round(relative_time, 3)
|
||||
|
||||
@@ -7,6 +7,7 @@ import time
|
||||
import traceback
|
||||
from datetime import datetime
|
||||
from uuid import uuid4
|
||||
import json
|
||||
|
||||
import collections
|
||||
from multiprocessing import Process
|
||||
@@ -21,9 +22,14 @@ from django_guid import set_guid
|
||||
from jinja2 import Template
|
||||
import psutil
|
||||
|
||||
from ansible_base.lib.logging.runtime import log_excess_runtime
|
||||
|
||||
from awx.main.models import UnifiedJob
|
||||
from awx.main.dispatch import reaper
|
||||
from awx.main.utils.common import convert_mem_str_to_bytes, get_mem_effective_capacity, log_excess_runtime
|
||||
from awx.main.utils.common import get_mem_effective_capacity, get_corrected_memory, get_corrected_cpu, get_cpu_effective_capacity
|
||||
|
||||
# ansible-runner
|
||||
from ansible_runner.utils.capacity import get_mem_in_bytes, get_cpu_count
|
||||
|
||||
if 'run_callback_receiver' in sys.argv:
|
||||
logger = logging.getLogger('awx.main.commands.run_callback_receiver')
|
||||
@@ -31,6 +37,9 @@ else:
|
||||
logger = logging.getLogger('awx.main.dispatch')
|
||||
|
||||
|
||||
RETIRED_SENTINEL_TASK = "[retired]"
|
||||
|
||||
|
||||
class NoOpResultQueue(object):
|
||||
def put(self, item):
|
||||
pass
|
||||
@@ -75,11 +84,17 @@ class PoolWorker(object):
|
||||
self.queue = MPQueue(queue_size)
|
||||
self.process = Process(target=target, args=(self.queue, self.finished) + args)
|
||||
self.process.daemon = True
|
||||
self.creation_time = time.monotonic()
|
||||
self.retiring = False
|
||||
|
||||
def start(self):
|
||||
self.process.start()
|
||||
|
||||
def put(self, body):
|
||||
if self.retiring:
|
||||
uuid = body.get('uuid', 'N/A') if isinstance(body, dict) else 'N/A'
|
||||
logger.info(f"Worker pid:{self.pid} is retiring. Refusing new task {uuid}.")
|
||||
raise QueueFull("Worker is retiring and not accepting new tasks") # AutoscalePool.write handles QueueFull
|
||||
uuid = '?'
|
||||
if isinstance(body, dict):
|
||||
if not body.get('uuid'):
|
||||
@@ -98,6 +113,11 @@ class PoolWorker(object):
|
||||
"""
|
||||
self.queue.put('QUIT')
|
||||
|
||||
@property
|
||||
def age(self):
|
||||
"""Returns the current age of the worker in seconds."""
|
||||
return time.monotonic() - self.creation_time
|
||||
|
||||
@property
|
||||
def pid(self):
|
||||
return self.process.pid
|
||||
@@ -144,6 +164,8 @@ class PoolWorker(object):
|
||||
# the purpose of self.managed_tasks is to just track internal
|
||||
# state of which events are *currently* being processed.
|
||||
logger.warning('Event UUID {} appears to be have been duplicated.'.format(uuid))
|
||||
if self.retiring:
|
||||
self.managed_tasks[RETIRED_SENTINEL_TASK] = {'task': RETIRED_SENTINEL_TASK}
|
||||
|
||||
@property
|
||||
def current_task(self):
|
||||
@@ -259,6 +281,8 @@ class WorkerPool(object):
|
||||
'{% for w in workers %}'
|
||||
'. worker[pid:{{ w.pid }}]{% if not w.alive %} GONE exit={{ w.exitcode }}{% endif %}'
|
||||
' sent={{ w.messages_sent }}'
|
||||
' age={{ "%.0f"|format(w.age) }}s'
|
||||
' retiring={{ w.retiring }}'
|
||||
'{% if w.messages_finished %} finished={{ w.messages_finished }}{% endif %}'
|
||||
' qsize={{ w.managed_tasks|length }}'
|
||||
' rss={{ w.mb }}MB'
|
||||
@@ -305,6 +329,41 @@ class WorkerPool(object):
|
||||
logger.exception('could not kill {}'.format(worker.pid))
|
||||
|
||||
|
||||
def get_auto_max_workers():
|
||||
"""Method we normally rely on to get max_workers
|
||||
|
||||
Uses almost same logic as Instance.local_health_check
|
||||
The important thing is to be MORE than Instance.capacity
|
||||
so that the task-manager does not over-schedule this node
|
||||
|
||||
Ideally we would just use the capacity from the database plus reserve workers,
|
||||
but this poses some bootstrap problems where OCP task containers
|
||||
register themselves after startup
|
||||
"""
|
||||
# Get memory from ansible-runner
|
||||
total_memory_gb = get_mem_in_bytes()
|
||||
|
||||
# This may replace memory calculation with a user override
|
||||
corrected_memory = get_corrected_memory(total_memory_gb)
|
||||
|
||||
# Get same number as max forks based on memory, this function takes memory as bytes
|
||||
mem_capacity = get_mem_effective_capacity(corrected_memory, is_control_node=True)
|
||||
|
||||
# Follow same process for CPU capacity constraint
|
||||
cpu_count = get_cpu_count()
|
||||
corrected_cpu = get_corrected_cpu(cpu_count)
|
||||
cpu_capacity = get_cpu_effective_capacity(corrected_cpu, is_control_node=True)
|
||||
|
||||
# Here is what is different from health checks,
|
||||
auto_max = max(mem_capacity, cpu_capacity)
|
||||
|
||||
# add magic number of extra workers to ensure
|
||||
# we have a few extra workers to run the heartbeat
|
||||
auto_max += 7
|
||||
|
||||
return auto_max
|
||||
|
||||
|
||||
class AutoscalePool(WorkerPool):
|
||||
"""
|
||||
An extended pool implementation that automatically scales workers up and
|
||||
@@ -315,22 +374,13 @@ class AutoscalePool(WorkerPool):
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
self.max_workers = kwargs.pop('max_workers', None)
|
||||
self.max_worker_lifetime_seconds = kwargs.pop(
|
||||
'max_worker_lifetime_seconds', getattr(settings, 'WORKER_MAX_LIFETIME_SECONDS', 14400)
|
||||
) # Default to 4 hours
|
||||
super(AutoscalePool, self).__init__(*args, **kwargs)
|
||||
|
||||
if self.max_workers is None:
|
||||
settings_absmem = getattr(settings, 'SYSTEM_TASK_ABS_MEM', None)
|
||||
if settings_absmem is not None:
|
||||
# There are 1073741824 bytes in a gigabyte. Convert bytes to gigabytes by dividing by 2**30
|
||||
total_memory_gb = convert_mem_str_to_bytes(settings_absmem) // 2**30
|
||||
else:
|
||||
total_memory_gb = (psutil.virtual_memory().total >> 30) + 1 # noqa: round up
|
||||
|
||||
# Get same number as max forks based on memory, this function takes memory as bytes
|
||||
self.max_workers = get_mem_effective_capacity(total_memory_gb * 2**30)
|
||||
|
||||
# add magic prime number of extra workers to ensure
|
||||
# we have a few extra workers to run the heartbeat
|
||||
self.max_workers += 7
|
||||
self.max_workers = get_auto_max_workers()
|
||||
|
||||
# max workers can't be less than min_workers
|
||||
self.max_workers = max(self.min_workers, self.max_workers)
|
||||
@@ -344,6 +394,9 @@ class AutoscalePool(WorkerPool):
|
||||
self.scale_up_ct = 0
|
||||
self.worker_count_max = 0
|
||||
|
||||
# last time we wrote current tasks, to avoid too much log spam
|
||||
self.last_task_list_log = time.monotonic()
|
||||
|
||||
def produce_subsystem_metrics(self, metrics_object):
|
||||
metrics_object.set('dispatcher_pool_scale_up_events', self.scale_up_ct)
|
||||
metrics_object.set('dispatcher_pool_active_task_count', sum(len(w.managed_tasks) for w in self.workers))
|
||||
@@ -366,7 +419,7 @@ class AutoscalePool(WorkerPool):
|
||||
def debug_meta(self):
|
||||
return 'min={} max={}'.format(self.min_workers, self.max_workers)
|
||||
|
||||
@log_excess_runtime(logger)
|
||||
@log_excess_runtime(logger, debug_cutoff=0.05, cutoff=0.2)
|
||||
def cleanup(self):
|
||||
"""
|
||||
Perform some internal account and cleanup. This is run on
|
||||
@@ -383,6 +436,7 @@ class AutoscalePool(WorkerPool):
|
||||
"""
|
||||
orphaned = []
|
||||
for w in self.workers[::]:
|
||||
is_retirement_age = self.max_worker_lifetime_seconds is not None and w.age > self.max_worker_lifetime_seconds
|
||||
if not w.alive:
|
||||
# the worker process has exited
|
||||
# 1. take the task it was running and enqueue the error
|
||||
@@ -391,6 +445,10 @@ class AutoscalePool(WorkerPool):
|
||||
# send them to another worker
|
||||
logger.error('worker pid:{} is gone (exit={})'.format(w.pid, w.exitcode))
|
||||
if w.current_task:
|
||||
if w.current_task == {'task': RETIRED_SENTINEL_TASK}:
|
||||
logger.debug('scaling down worker pid:{} due to worker age: {}'.format(w.pid, w.age))
|
||||
self.workers.remove(w)
|
||||
continue
|
||||
if w.current_task != 'QUIT':
|
||||
try:
|
||||
for j in UnifiedJob.objects.filter(celery_task_id=w.current_task['uuid']):
|
||||
@@ -401,6 +459,7 @@ class AutoscalePool(WorkerPool):
|
||||
logger.warning(f'Worker was told to quit but has not, pid={w.pid}')
|
||||
orphaned.extend(w.orphaned_tasks)
|
||||
self.workers.remove(w)
|
||||
|
||||
elif w.idle and len(self.workers) > self.min_workers:
|
||||
# the process has an empty queue (it's idle) and we have
|
||||
# more processes in the pool than we need (> min)
|
||||
@@ -409,6 +468,22 @@ class AutoscalePool(WorkerPool):
|
||||
logger.debug('scaling down worker pid:{}'.format(w.pid))
|
||||
w.quit()
|
||||
self.workers.remove(w)
|
||||
|
||||
elif w.idle and is_retirement_age:
|
||||
logger.debug('scaling down worker pid:{} due to worker age: {}'.format(w.pid, w.age))
|
||||
w.quit()
|
||||
self.workers.remove(w)
|
||||
|
||||
elif is_retirement_age and not w.retiring and not w.idle:
|
||||
logger.info(
|
||||
f"Worker pid:{w.pid} (age: {w.age:.0f}s) exceeded max lifetime ({self.max_worker_lifetime_seconds:.0f}s). "
|
||||
"Signaling for graceful retirement."
|
||||
)
|
||||
# Send QUIT signal; worker will finish current task then exit.
|
||||
w.quit()
|
||||
# mark as retiring to reject any future tasks that might be assigned in meantime
|
||||
w.retiring = True
|
||||
|
||||
if w.alive:
|
||||
# if we discover a task manager invocation that's been running
|
||||
# too long, reap it (because otherwise it'll just hold the postgres
|
||||
@@ -461,6 +536,14 @@ class AutoscalePool(WorkerPool):
|
||||
self.worker_count_max = new_worker_ct
|
||||
return ret
|
||||
|
||||
@staticmethod
|
||||
def fast_task_serialization(current_task):
|
||||
try:
|
||||
return str(current_task.get('task')) + ' - ' + str(sorted(current_task.get('args', []))) + ' - ' + str(sorted(current_task.get('kwargs', {})))
|
||||
except Exception:
|
||||
# just make sure this does not make things worse
|
||||
return str(current_task)
|
||||
|
||||
def write(self, preferred_queue, body):
|
||||
if 'guid' in body:
|
||||
set_guid(body['guid'])
|
||||
@@ -482,6 +565,15 @@ class AutoscalePool(WorkerPool):
|
||||
if isinstance(body, dict):
|
||||
task_name = body.get('task')
|
||||
logger.warning(f'Workers maxed, queuing {task_name}, load: {sum(len(w.managed_tasks) for w in self.workers)} / {len(self.workers)}')
|
||||
# Once every 10 seconds write out task list for debugging
|
||||
if time.monotonic() - self.last_task_list_log >= 10.0:
|
||||
task_counts = {}
|
||||
for worker in self.workers:
|
||||
task_slug = self.fast_task_serialization(worker.current_task)
|
||||
task_counts.setdefault(task_slug, 0)
|
||||
task_counts[task_slug] += 1
|
||||
logger.info(f'Running tasks by count:\n{json.dumps(task_counts, indent=2)}')
|
||||
self.last_task_list_log = time.monotonic()
|
||||
return super(AutoscalePool, self).write(preferred_queue, body)
|
||||
except Exception:
|
||||
for conn in connections.all():
|
||||
|
||||
@@ -4,10 +4,13 @@ import json
|
||||
import time
|
||||
from uuid import uuid4
|
||||
|
||||
from dispatcherd.publish import submit_task
|
||||
from dispatcherd.utils import resolve_callable
|
||||
|
||||
from django_guid import get_guid
|
||||
from django.conf import settings
|
||||
|
||||
from . import pg_bus_conn
|
||||
from awx.main.utils import is_testing
|
||||
|
||||
logger = logging.getLogger('awx.main.dispatch')
|
||||
|
||||
@@ -93,6 +96,19 @@ class task:
|
||||
|
||||
@classmethod
|
||||
def apply_async(cls, args=None, kwargs=None, queue=None, uuid=None, **kw):
|
||||
try:
|
||||
from flags.state import flag_enabled
|
||||
|
||||
if flag_enabled('FEATURE_DISPATCHERD_ENABLED'):
|
||||
# At this point we have the import string, and submit_task wants the method, so back to that
|
||||
actual_task = resolve_callable(cls.name)
|
||||
return submit_task(actual_task, args=args, kwargs=kwargs, queue=queue, uuid=uuid, **kw)
|
||||
except Exception:
|
||||
logger.exception(f"[DISPATCHER] Failed to check for alternative dispatcherd implementation for {cls.name}")
|
||||
# Continue with original implementation if anything fails
|
||||
pass
|
||||
|
||||
# Original implementation follows
|
||||
queue = queue or getattr(cls.queue, 'im_func', cls.queue)
|
||||
if not queue:
|
||||
msg = f'{cls.name}: Queue value required and may not be None'
|
||||
@@ -101,7 +117,7 @@ class task:
|
||||
obj = cls.get_async_body(args=args, kwargs=kwargs, uuid=uuid, **kw)
|
||||
if callable(queue):
|
||||
queue = queue()
|
||||
if not is_testing():
|
||||
if not settings.DISPATCHER_MOCK_PUBLISH:
|
||||
with pg_bus_conn() as conn:
|
||||
conn.notify(queue, json.dumps(obj))
|
||||
return (obj, queue)
|
||||
|
||||
@@ -15,11 +15,13 @@ from datetime import timedelta
|
||||
|
||||
from django import db
|
||||
from django.conf import settings
|
||||
import redis.exceptions
|
||||
|
||||
from ansible_base.lib.logging.runtime import log_excess_runtime
|
||||
|
||||
from awx.main.dispatch.pool import WorkerPool
|
||||
from awx.main.dispatch.periodic import Scheduler
|
||||
from awx.main.dispatch import pg_bus_conn
|
||||
from awx.main.utils.common import log_excess_runtime
|
||||
from awx.main.utils.db import set_connection_name
|
||||
import awx.main.analytics.subsystem_metrics as s_metrics
|
||||
|
||||
@@ -126,13 +128,16 @@ class AWXConsumerBase(object):
|
||||
return
|
||||
self.dispatch_task(body)
|
||||
|
||||
@log_excess_runtime(logger)
|
||||
@log_excess_runtime(logger, debug_cutoff=0.05, cutoff=0.2)
|
||||
def record_statistics(self):
|
||||
if time.time() - self.last_stats > 1: # buffer stat recording to once per second
|
||||
save_data = self.pool.debug()
|
||||
try:
|
||||
self.redis.set(f'awx_{self.name}_statistics', self.pool.debug())
|
||||
self.redis.set(f'awx_{self.name}_statistics', save_data)
|
||||
except redis.exceptions.ConnectionError as exc:
|
||||
logger.warning(f'Redis connection error saving {self.name} status data:\n{exc}\nmissed data:\n{save_data}')
|
||||
except Exception:
|
||||
logger.exception(f"encountered an error communicating with redis to store {self.name} statistics")
|
||||
logger.exception(f"Unknown redis error saving {self.name} status data:\nmissed data:\n{save_data}")
|
||||
self.last_stats = time.time()
|
||||
|
||||
def run(self, *args, **kwargs):
|
||||
@@ -183,11 +188,15 @@ class AWXConsumerPG(AWXConsumerBase):
|
||||
schedule['metrics_gather'] = {'control': self.record_metrics, 'schedule': timedelta(seconds=20)}
|
||||
self.scheduler = Scheduler(schedule)
|
||||
|
||||
@log_excess_runtime(logger, debug_cutoff=0.05, cutoff=0.2)
|
||||
def record_metrics(self):
|
||||
current_time = time.time()
|
||||
self.pool.produce_subsystem_metrics(self.subsystem_metrics)
|
||||
self.subsystem_metrics.set('dispatcher_availability', self.listen_cumulative_time / (current_time - self.last_metrics_gather))
|
||||
self.subsystem_metrics.pipe_execute()
|
||||
try:
|
||||
self.subsystem_metrics.pipe_execute()
|
||||
except redis.exceptions.ConnectionError as exc:
|
||||
logger.warning(f'Redis connection error saving dispatcher metrics, error:\n{exc}')
|
||||
self.listen_cumulative_time = 0.0
|
||||
self.last_metrics_gather = current_time
|
||||
|
||||
@@ -203,7 +212,11 @@ class AWXConsumerPG(AWXConsumerBase):
|
||||
except Exception as exc:
|
||||
logger.warning(f'Failed to save dispatcher statistics {exc}')
|
||||
|
||||
for job in self.scheduler.get_and_mark_pending():
|
||||
# Everything benchmarks to the same original time, so that skews due to
|
||||
# runtime of the actions, themselves, do not mess up scheduling expectations
|
||||
reftime = time.time()
|
||||
|
||||
for job in self.scheduler.get_and_mark_pending(reftime=reftime):
|
||||
if 'control' in job.data:
|
||||
try:
|
||||
job.data['control']()
|
||||
@@ -220,12 +233,12 @@ class AWXConsumerPG(AWXConsumerBase):
|
||||
|
||||
self.listen_start = time.time()
|
||||
|
||||
return self.scheduler.time_until_next_run()
|
||||
return self.scheduler.time_until_next_run(reftime=reftime)
|
||||
|
||||
def run(self, *args, **kwargs):
|
||||
super(AWXConsumerPG, self).run(*args, **kwargs)
|
||||
|
||||
logger.info(f"Running worker {self.name} listening to queues {self.queues}")
|
||||
logger.info(f"Running {self.name}, workers min={self.pool.min_workers} max={self.pool.max_workers}, listening to queues {self.queues}")
|
||||
init = False
|
||||
|
||||
while True:
|
||||
|
||||
@@ -20,6 +20,7 @@ from awx.main.models import JobEvent, AdHocCommandEvent, ProjectUpdateEvent, Inv
|
||||
from awx.main.constants import ACTIVE_STATES
|
||||
from awx.main.models.events import emit_event_detail
|
||||
from awx.main.utils.profiling import AWXProfiler
|
||||
from awx.main.tasks.system import events_processed_hook
|
||||
import awx.main.analytics.subsystem_metrics as s_metrics
|
||||
from .base import BaseWorker
|
||||
|
||||
@@ -46,7 +47,7 @@ def job_stats_wrapup(job_identifier, event=None):
|
||||
# If the status was a finished state before this update was made, send notifications
|
||||
# If not, we will send notifications when the status changes
|
||||
if uj.status not in ACTIVE_STATES:
|
||||
uj.send_notification_templates('succeeded' if uj.status == 'successful' else 'failed')
|
||||
events_processed_hook(uj)
|
||||
|
||||
except Exception:
|
||||
logger.exception('Worker failed to save stats or emit notifications: Job {}'.format(job_identifier))
|
||||
@@ -85,6 +86,7 @@ class CallbackBrokerWorker(BaseWorker):
|
||||
return os.getpid()
|
||||
|
||||
def read(self, queue):
|
||||
has_redis_error = False
|
||||
try:
|
||||
res = self.redis.blpop(self.queue_name, timeout=1)
|
||||
if res is None:
|
||||
@@ -94,14 +96,21 @@ class CallbackBrokerWorker(BaseWorker):
|
||||
self.subsystem_metrics.inc('callback_receiver_events_popped_redis', 1)
|
||||
self.subsystem_metrics.inc('callback_receiver_events_in_memory', 1)
|
||||
return json.loads(res[1])
|
||||
except redis.exceptions.ConnectionError as exc:
|
||||
# Low noise log, because very common and many workers will write this
|
||||
logger.error(f"redis connection error: {exc}")
|
||||
has_redis_error = True
|
||||
time.sleep(5)
|
||||
except redis.exceptions.RedisError:
|
||||
logger.exception("encountered an error communicating with redis")
|
||||
has_redis_error = True
|
||||
time.sleep(1)
|
||||
except (json.JSONDecodeError, KeyError):
|
||||
logger.exception("failed to decode JSON message from redis")
|
||||
finally:
|
||||
self.record_statistics()
|
||||
self.record_read_metrics()
|
||||
if not has_redis_error:
|
||||
self.record_statistics()
|
||||
self.record_read_metrics()
|
||||
|
||||
return {'event': 'FLUSH'}
|
||||
|
||||
|
||||
14
awx/main/dispatch/worker/dispatcherd.py
Normal file
14
awx/main/dispatch/worker/dispatcherd.py
Normal file
@@ -0,0 +1,14 @@
|
||||
from dispatcherd.worker.task import TaskWorker
|
||||
|
||||
from django.db import connection
|
||||
|
||||
|
||||
class AWXTaskWorker(TaskWorker):
|
||||
|
||||
def on_start(self) -> None:
|
||||
"""Get worker connected so that first task it gets will be worked quickly"""
|
||||
connection.ensure_connection()
|
||||
|
||||
def pre_task(self, message) -> None:
|
||||
"""This should remedy bad connections that can not fix themselves"""
|
||||
connection.close_if_unusable_or_obsolete()
|
||||
@@ -38,5 +38,12 @@ class PostRunError(Exception):
|
||||
super(PostRunError, self).__init__(msg)
|
||||
|
||||
|
||||
class PolicyEvaluationError(Exception):
|
||||
def __init__(self, msg, status='failed', tb=''):
|
||||
self.status = status
|
||||
self.tb = tb
|
||||
super(PolicyEvaluationError, self).__init__(msg)
|
||||
|
||||
|
||||
class ReceptorNodeNotFound(RuntimeError):
|
||||
pass
|
||||
|
||||
@@ -14,21 +14,14 @@ from jinja2.exceptions import UndefinedError, TemplateSyntaxError, SecurityError
|
||||
# Django
|
||||
from django.core import exceptions as django_exceptions
|
||||
from django.core.serializers.json import DjangoJSONEncoder
|
||||
from django.db.models.signals import (
|
||||
post_save,
|
||||
post_delete,
|
||||
)
|
||||
from django.db.models.signals import m2m_changed
|
||||
from django.db.models.signals import m2m_changed, post_save
|
||||
from django.db import models
|
||||
from django.db.models.fields.related import lazy_related_operation
|
||||
from django.db.models.fields.related_descriptors import (
|
||||
ReverseOneToOneDescriptor,
|
||||
ForwardManyToOneDescriptor,
|
||||
ManyToManyDescriptor,
|
||||
ReverseManyToOneDescriptor,
|
||||
create_forward_many_to_many_manager,
|
||||
)
|
||||
from django.utils.encoding import smart_str
|
||||
from django.db.models import JSONField
|
||||
from django.utils.functional import cached_property
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
@@ -54,7 +47,6 @@ __all__ = [
|
||||
'ImplicitRoleField',
|
||||
'SmartFilterField',
|
||||
'OrderedManyToManyField',
|
||||
'update_role_parentage_for_instance',
|
||||
'is_implicit_parent',
|
||||
]
|
||||
|
||||
@@ -146,34 +138,6 @@ class AutoOneToOneField(models.OneToOneField):
|
||||
setattr(cls, related.get_accessor_name(), AutoSingleRelatedObjectDescriptor(related))
|
||||
|
||||
|
||||
def resolve_role_field(obj, field):
|
||||
ret = []
|
||||
|
||||
field_components = field.split('.', 1)
|
||||
if hasattr(obj, field_components[0]):
|
||||
obj = getattr(obj, field_components[0])
|
||||
else:
|
||||
return []
|
||||
|
||||
if obj is None:
|
||||
return []
|
||||
|
||||
if len(field_components) == 1:
|
||||
# use extremely generous duck typing to accomidate all possible forms
|
||||
# of the model that may be used during various migrations
|
||||
if obj._meta.model_name != 'role' or obj._meta.app_label != 'main':
|
||||
raise Exception(smart_str('{} refers to a {}, not a Role'.format(field, type(obj))))
|
||||
ret.append(obj.id)
|
||||
else:
|
||||
if type(obj) is ManyToManyDescriptor:
|
||||
for o in obj.all():
|
||||
ret += resolve_role_field(o, field_components[1])
|
||||
else:
|
||||
ret += resolve_role_field(obj, field_components[1])
|
||||
|
||||
return ret
|
||||
|
||||
|
||||
def is_implicit_parent(parent_role, child_role):
|
||||
"""
|
||||
Determine if the parent_role is an implicit parent as defined by
|
||||
@@ -210,34 +174,6 @@ def is_implicit_parent(parent_role, child_role):
|
||||
return False
|
||||
|
||||
|
||||
def update_role_parentage_for_instance(instance):
|
||||
"""update_role_parentage_for_instance
|
||||
updates the parents listing for all the roles
|
||||
of a given instance if they have changed
|
||||
"""
|
||||
parents_removed = set()
|
||||
parents_added = set()
|
||||
for implicit_role_field in getattr(instance.__class__, '__implicit_role_fields'):
|
||||
cur_role = getattr(instance, implicit_role_field.name)
|
||||
original_parents = set(json.loads(cur_role.implicit_parents))
|
||||
new_parents = implicit_role_field._resolve_parent_roles(instance)
|
||||
removals = original_parents - new_parents
|
||||
if removals:
|
||||
cur_role.parents.remove(*list(removals))
|
||||
parents_removed.add(cur_role.pk)
|
||||
additions = new_parents - original_parents
|
||||
if additions:
|
||||
cur_role.parents.add(*list(additions))
|
||||
parents_added.add(cur_role.pk)
|
||||
new_parents_list = list(new_parents)
|
||||
new_parents_list.sort()
|
||||
new_parents_json = json.dumps(new_parents_list)
|
||||
if cur_role.implicit_parents != new_parents_json:
|
||||
cur_role.implicit_parents = new_parents_json
|
||||
cur_role.save(update_fields=['implicit_parents'])
|
||||
return (parents_added, parents_removed)
|
||||
|
||||
|
||||
class ImplicitRoleDescriptor(ForwardManyToOneDescriptor):
|
||||
pass
|
||||
|
||||
@@ -269,65 +205,6 @@ class ImplicitRoleField(models.ForeignKey):
|
||||
getattr(cls, '__implicit_role_fields').append(self)
|
||||
|
||||
post_save.connect(self._post_save, cls, True, dispatch_uid='implicit-role-post-save')
|
||||
post_delete.connect(self._post_delete, cls, True, dispatch_uid='implicit-role-post-delete')
|
||||
|
||||
function = lambda local, related, field: self.bind_m2m_changed(field, related, local)
|
||||
lazy_related_operation(function, cls, "self", field=self)
|
||||
|
||||
def bind_m2m_changed(self, _self, _role_class, cls):
|
||||
if not self.parent_role:
|
||||
return
|
||||
|
||||
field_names = self.parent_role
|
||||
if type(field_names) is not list:
|
||||
field_names = [field_names]
|
||||
|
||||
for field_name in field_names:
|
||||
if field_name.startswith('singleton:'):
|
||||
continue
|
||||
|
||||
field_name, sep, field_attr = field_name.partition('.')
|
||||
# Non existent fields will occur if ever a parent model is
|
||||
# moved inside a migration, needed for job_template_organization_field
|
||||
# migration in particular
|
||||
# consistency is assured by unit test awx.main.tests.functional
|
||||
field = getattr(cls, field_name, None)
|
||||
|
||||
if field and type(field) is ReverseManyToOneDescriptor or type(field) is ManyToManyDescriptor:
|
||||
if '.' in field_attr:
|
||||
raise Exception('Referencing deep roles through ManyToMany fields is unsupported.')
|
||||
|
||||
if type(field) is ReverseManyToOneDescriptor:
|
||||
sender = field.through
|
||||
else:
|
||||
sender = field.related.through
|
||||
|
||||
reverse = type(field) is ManyToManyDescriptor
|
||||
m2m_changed.connect(self.m2m_update(field_attr, reverse), sender, weak=False)
|
||||
|
||||
def m2m_update(self, field_attr, _reverse):
|
||||
def _m2m_update(instance, action, model, pk_set, reverse, **kwargs):
|
||||
if action == 'post_add' or action == 'pre_remove':
|
||||
if _reverse:
|
||||
reverse = not reverse
|
||||
|
||||
if reverse:
|
||||
for pk in pk_set:
|
||||
obj = model.objects.get(pk=pk)
|
||||
if action == 'post_add':
|
||||
getattr(instance, field_attr).children.add(getattr(obj, self.name))
|
||||
if action == 'pre_remove':
|
||||
getattr(instance, field_attr).children.remove(getattr(obj, self.name))
|
||||
|
||||
else:
|
||||
for pk in pk_set:
|
||||
obj = model.objects.get(pk=pk)
|
||||
if action == 'post_add':
|
||||
getattr(instance, self.name).parents.add(getattr(obj, field_attr))
|
||||
if action == 'pre_remove':
|
||||
getattr(instance, self.name).parents.remove(getattr(obj, field_attr))
|
||||
|
||||
return _m2m_update
|
||||
|
||||
def _post_save(self, instance, created, *args, **kwargs):
|
||||
Role_ = utils.get_current_apps().get_model('main', 'Role')
|
||||
@@ -337,68 +214,24 @@ class ImplicitRoleField(models.ForeignKey):
|
||||
Model = utils.get_current_apps().get_model('main', instance.__class__.__name__)
|
||||
latest_instance = Model.objects.get(pk=instance.pk)
|
||||
|
||||
# Avoid circular import
|
||||
from awx.main.models.rbac import batch_role_ancestor_rebuilding, Role
|
||||
# Create any missing role objects
|
||||
missing_roles = []
|
||||
for implicit_role_field in getattr(latest_instance.__class__, '__implicit_role_fields'):
|
||||
cur_role = getattr(latest_instance, implicit_role_field.name, None)
|
||||
if cur_role is None:
|
||||
missing_roles.append(Role_(role_field=implicit_role_field.name, content_type_id=ct_id, object_id=latest_instance.id))
|
||||
|
||||
with batch_role_ancestor_rebuilding():
|
||||
# Create any missing role objects
|
||||
missing_roles = []
|
||||
for implicit_role_field in getattr(latest_instance.__class__, '__implicit_role_fields'):
|
||||
cur_role = getattr(latest_instance, implicit_role_field.name, None)
|
||||
if cur_role is None:
|
||||
missing_roles.append(Role_(role_field=implicit_role_field.name, content_type_id=ct_id, object_id=latest_instance.id))
|
||||
if len(missing_roles) > 0:
|
||||
Role_.objects.bulk_create(missing_roles)
|
||||
updates = {}
|
||||
role_ids = []
|
||||
for role in Role_.objects.filter(content_type_id=ct_id, object_id=latest_instance.id):
|
||||
setattr(latest_instance, role.role_field, role)
|
||||
updates[role.role_field] = role.id
|
||||
role_ids.append(role.id)
|
||||
type(latest_instance).objects.filter(pk=latest_instance.pk).update(**updates)
|
||||
|
||||
if len(missing_roles) > 0:
|
||||
Role_.objects.bulk_create(missing_roles)
|
||||
updates = {}
|
||||
role_ids = []
|
||||
for role in Role_.objects.filter(content_type_id=ct_id, object_id=latest_instance.id):
|
||||
setattr(latest_instance, role.role_field, role)
|
||||
updates[role.role_field] = role.id
|
||||
role_ids.append(role.id)
|
||||
type(latest_instance).objects.filter(pk=latest_instance.pk).update(**updates)
|
||||
Role.rebuild_role_ancestor_list(role_ids, [])
|
||||
|
||||
update_role_parentage_for_instance(latest_instance)
|
||||
instance.refresh_from_db()
|
||||
|
||||
def _resolve_parent_roles(self, instance):
|
||||
if not self.parent_role:
|
||||
return set()
|
||||
|
||||
paths = self.parent_role if type(self.parent_role) is list else [self.parent_role]
|
||||
parent_roles = set()
|
||||
|
||||
for path in paths:
|
||||
if path.startswith("singleton:"):
|
||||
singleton_name = path[10:]
|
||||
Role_ = utils.get_current_apps().get_model('main', 'Role')
|
||||
qs = Role_.objects.filter(singleton_name=singleton_name)
|
||||
if qs.count() >= 1:
|
||||
role = qs[0]
|
||||
else:
|
||||
role = Role_.objects.create(singleton_name=singleton_name, role_field=singleton_name)
|
||||
parents = [role.id]
|
||||
else:
|
||||
parents = resolve_role_field(instance, path)
|
||||
|
||||
for parent in parents:
|
||||
parent_roles.add(parent)
|
||||
return parent_roles
|
||||
|
||||
def _post_delete(self, instance, *args, **kwargs):
|
||||
role_ids = []
|
||||
for implicit_role_field in getattr(instance.__class__, '__implicit_role_fields'):
|
||||
role_ids.append(getattr(instance, implicit_role_field.name + '_id'))
|
||||
|
||||
Role_ = utils.get_current_apps().get_model('main', 'Role')
|
||||
child_ids = [x for x in Role_.parents.through.objects.filter(to_role_id__in=role_ids).distinct().values_list('from_role_id', flat=True)]
|
||||
Role_.objects.filter(id__in=role_ids).delete()
|
||||
|
||||
# Avoid circular import
|
||||
from awx.main.models.rbac import Role
|
||||
|
||||
Role.rebuild_role_ancestor_list([], child_ids)
|
||||
instance.refresh_from_db()
|
||||
|
||||
|
||||
class SmartFilterField(models.TextField):
|
||||
@@ -832,7 +665,7 @@ class CredentialTypeInjectorField(JSONSchemaField):
|
||||
'type': 'string',
|
||||
# The environment variable _value_ can be any ascii,
|
||||
# but pexpect will choke on any unicode
|
||||
'pattern': '^[\x00-\x7F]*$',
|
||||
'pattern': '^[\x00-\x7f]*$',
|
||||
},
|
||||
},
|
||||
'additionalProperties': False,
|
||||
@@ -1039,7 +872,7 @@ class OrderedManyToManyField(models.ManyToManyField):
|
||||
descriptor = getattr(instance, self.name)
|
||||
order_with_respect_to = descriptor.source_field_name
|
||||
|
||||
for i, ig in enumerate(sender.objects.filter(**{order_with_respect_to: instance.pk})):
|
||||
for i, ig in enumerate(sender.objects.filter(**{order_with_respect_to: instance.pk}).order_by('id')):
|
||||
if ig.position != i:
|
||||
ig.position = i
|
||||
ig.save()
|
||||
|
||||
@@ -1,26 +0,0 @@
|
||||
import logging
|
||||
from django.core import management
|
||||
from django.core.management.base import BaseCommand
|
||||
|
||||
from awx.main.models import OAuth2AccessToken
|
||||
from oauth2_provider.models import RefreshToken
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
def init_logging(self):
|
||||
log_levels = dict(enumerate([logging.ERROR, logging.INFO, logging.DEBUG, 0]))
|
||||
self.logger = logging.getLogger('awx.main.commands.cleanup_tokens')
|
||||
self.logger.setLevel(log_levels.get(self.verbosity, 0))
|
||||
handler = logging.StreamHandler()
|
||||
handler.setFormatter(logging.Formatter('%(message)s'))
|
||||
self.logger.addHandler(handler)
|
||||
self.logger.propagate = False
|
||||
|
||||
def execute(self, *args, **options):
|
||||
self.verbosity = int(options.get('verbosity', 1))
|
||||
self.init_logging()
|
||||
total_accesstokens = OAuth2AccessToken.objects.all().count()
|
||||
total_refreshtokens = RefreshToken.objects.all().count()
|
||||
management.call_command('cleartokens')
|
||||
self.logger.info("Expired OAuth 2 Access Tokens deleted: {}".format(total_accesstokens - OAuth2AccessToken.objects.all().count()))
|
||||
self.logger.info("Expired OAuth 2 Refresh Tokens deleted: {}".format(total_refreshtokens - RefreshToken.objects.all().count()))
|
||||
@@ -1,34 +0,0 @@
|
||||
# Django
|
||||
from django.core.management.base import BaseCommand, CommandError
|
||||
from django.contrib.auth.models import User
|
||||
from django.core.exceptions import ObjectDoesNotExist
|
||||
|
||||
# AWX
|
||||
from awx.api.serializers import OAuth2TokenSerializer
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
"""Command that creates an OAuth2 token for a certain user. Returns the value of created token."""
|
||||
|
||||
help = 'Creates an OAuth2 token for a user.'
|
||||
|
||||
def add_arguments(self, parser):
|
||||
parser.add_argument('--user', dest='user', type=str)
|
||||
|
||||
def handle(self, *args, **options):
|
||||
if not options['user']:
|
||||
raise CommandError('Username not supplied. Usage: awx-manage create_oauth2_token --user=username.')
|
||||
try:
|
||||
user = User.objects.get(username=options['user'])
|
||||
except ObjectDoesNotExist:
|
||||
raise CommandError('The user does not exist.')
|
||||
config = {'user': user, 'scope': 'write'}
|
||||
serializer_obj = OAuth2TokenSerializer()
|
||||
|
||||
class FakeRequest(object):
|
||||
def __init__(self):
|
||||
self.user = user
|
||||
|
||||
serializer_obj.context['request'] = FakeRequest()
|
||||
token_record = serializer_obj.create(config)
|
||||
self.stdout.write(token_record.token)
|
||||
@@ -4,6 +4,7 @@
|
||||
from django.core.management.base import BaseCommand
|
||||
from django.db import transaction
|
||||
from crum import impersonate
|
||||
from ansible_base.resource_registry.signals.handlers import no_reverse_sync
|
||||
from awx.main.models import User, Organization, Project, Inventory, CredentialType, Credential, Host, JobTemplate
|
||||
from awx.main.signals import disable_computed_fields
|
||||
|
||||
@@ -16,8 +17,9 @@ class Command(BaseCommand):
|
||||
def handle(self, *args, **kwargs):
|
||||
# Wrap the operation in an atomic block, so we do not on accident
|
||||
# create the organization but not create the project, etc.
|
||||
with transaction.atomic():
|
||||
self._handle()
|
||||
with no_reverse_sync():
|
||||
with transaction.atomic():
|
||||
self._handle()
|
||||
|
||||
def _handle(self):
|
||||
changed = False
|
||||
|
||||
@@ -4,8 +4,9 @@
|
||||
from django.db import transaction
|
||||
from django.core.management.base import BaseCommand, CommandError
|
||||
|
||||
from ansible_base.lib.utils.db import advisory_lock
|
||||
|
||||
from awx.main.models import Instance
|
||||
from awx.main.utils.pglock import advisory_lock
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
|
||||
@@ -63,7 +63,7 @@ class AWXInstance:
|
||||
def instance_pretty(self):
|
||||
instance = (
|
||||
self.instance.hostname,
|
||||
urljoin(settings.TOWER_URL_BASE, f"/#/instances/{self.instance.pk}/details"),
|
||||
urljoin(settings.TOWER_URL_BASE, f"{settings.OPTIONAL_UI_URL_PREFIX}/infrastructure/instances/{self.instance.pk}/details"),
|
||||
)
|
||||
return f"[\"{instance[0]}\"]({instance[1]})"
|
||||
|
||||
|
||||
@@ -1,195 +0,0 @@
|
||||
import json
|
||||
import os
|
||||
import sys
|
||||
import re
|
||||
from typing import Any
|
||||
|
||||
from django.core.management.base import BaseCommand
|
||||
from django.conf import settings
|
||||
|
||||
from awx.conf import settings_registry
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = 'Dump the current auth configuration in django_ansible_base.authenticator format, currently supports LDAP and SAML'
|
||||
|
||||
DAB_SAML_AUTHENTICATOR_KEYS = {
|
||||
"SP_ENTITY_ID": True,
|
||||
"SP_PUBLIC_CERT": True,
|
||||
"SP_PRIVATE_KEY": True,
|
||||
"ORG_INFO": True,
|
||||
"TECHNICAL_CONTACT": True,
|
||||
"SUPPORT_CONTACT": True,
|
||||
"SP_EXTRA": False,
|
||||
"SECURITY_CONFIG": False,
|
||||
"EXTRA_DATA": False,
|
||||
"ENABLED_IDPS": True,
|
||||
"CALLBACK_URL": False,
|
||||
}
|
||||
|
||||
DAB_LDAP_AUTHENTICATOR_KEYS = {
|
||||
"SERVER_URI": True,
|
||||
"BIND_DN": False,
|
||||
"BIND_PASSWORD": False,
|
||||
"CONNECTION_OPTIONS": False,
|
||||
"GROUP_TYPE": True,
|
||||
"GROUP_TYPE_PARAMS": True,
|
||||
"GROUP_SEARCH": False,
|
||||
"START_TLS": False,
|
||||
"USER_DN_TEMPLATE": True,
|
||||
"USER_ATTR_MAP": True,
|
||||
"USER_SEARCH": False,
|
||||
}
|
||||
|
||||
def is_enabled(self, settings, keys):
|
||||
missing_fields = []
|
||||
for key, required in keys.items():
|
||||
if required and not settings.get(key):
|
||||
missing_fields.append(key)
|
||||
if missing_fields:
|
||||
return False, missing_fields
|
||||
return True, None
|
||||
|
||||
def get_awx_ldap_settings(self) -> dict[str, dict[str, Any]]:
|
||||
awx_ldap_settings = {}
|
||||
|
||||
for awx_ldap_setting in settings_registry.get_registered_settings(category_slug='ldap'):
|
||||
key = awx_ldap_setting.removeprefix("AUTH_LDAP_")
|
||||
value = getattr(settings, awx_ldap_setting, None)
|
||||
awx_ldap_settings[key] = value
|
||||
|
||||
grouped_settings = {}
|
||||
|
||||
for key, value in awx_ldap_settings.items():
|
||||
match = re.search(r'(\d+)', key)
|
||||
index = int(match.group()) if match else 0
|
||||
new_key = re.sub(r'\d+_', '', key)
|
||||
|
||||
if index not in grouped_settings:
|
||||
grouped_settings[index] = {}
|
||||
|
||||
grouped_settings[index][new_key] = value
|
||||
if new_key == "GROUP_TYPE" and value:
|
||||
grouped_settings[index][new_key] = type(value).__name__
|
||||
|
||||
if new_key == "SERVER_URI" and value:
|
||||
value = value.split(", ")
|
||||
grouped_settings[index][new_key] = value
|
||||
|
||||
if type(value).__name__ == "LDAPSearch":
|
||||
data = []
|
||||
data.append(value.base_dn)
|
||||
data.append("SCOPE_SUBTREE")
|
||||
data.append(value.filterstr)
|
||||
grouped_settings[index][new_key] = data
|
||||
|
||||
return grouped_settings
|
||||
|
||||
def get_awx_saml_settings(self) -> dict[str, Any]:
|
||||
awx_saml_settings = {}
|
||||
for awx_saml_setting in settings_registry.get_registered_settings(category_slug='saml'):
|
||||
awx_saml_settings[awx_saml_setting.removeprefix("SOCIAL_AUTH_SAML_")] = getattr(settings, awx_saml_setting, None)
|
||||
|
||||
return awx_saml_settings
|
||||
|
||||
def format_config_data(self, enabled, awx_settings, type, keys, name):
|
||||
config = {
|
||||
"type": f"ansible_base.authentication.authenticator_plugins.{type}",
|
||||
"name": name,
|
||||
"enabled": enabled,
|
||||
"create_objects": True,
|
||||
"users_unique": False,
|
||||
"remove_users": True,
|
||||
"configuration": {},
|
||||
}
|
||||
for k in keys:
|
||||
v = awx_settings.get(k)
|
||||
config["configuration"].update({k: v})
|
||||
|
||||
if type == "saml":
|
||||
idp_to_key_mapping = {
|
||||
"url": "IDP_URL",
|
||||
"x509cert": "IDP_X509_CERT",
|
||||
"entity_id": "IDP_ENTITY_ID",
|
||||
"attr_email": "IDP_ATTR_EMAIL",
|
||||
"attr_groups": "IDP_GROUPS",
|
||||
"attr_username": "IDP_ATTR_USERNAME",
|
||||
"attr_last_name": "IDP_ATTR_LAST_NAME",
|
||||
"attr_first_name": "IDP_ATTR_FIRST_NAME",
|
||||
"attr_user_permanent_id": "IDP_ATTR_USER_PERMANENT_ID",
|
||||
}
|
||||
for idp_name in awx_settings.get("ENABLED_IDPS", {}):
|
||||
for key in idp_to_key_mapping:
|
||||
value = awx_settings["ENABLED_IDPS"][idp_name].get(key)
|
||||
if value is not None:
|
||||
config["name"] = idp_name
|
||||
config["configuration"].update({idp_to_key_mapping[key]: value})
|
||||
|
||||
return config
|
||||
|
||||
def add_arguments(self, parser):
|
||||
parser.add_argument(
|
||||
"output_file",
|
||||
nargs="?",
|
||||
type=str,
|
||||
default=None,
|
||||
help="Output JSON file path",
|
||||
)
|
||||
|
||||
def handle(self, *args, **options):
|
||||
try:
|
||||
data = []
|
||||
|
||||
# dump SAML settings
|
||||
awx_saml_settings = self.get_awx_saml_settings()
|
||||
awx_saml_enabled, saml_missing_fields = self.is_enabled(awx_saml_settings, self.DAB_SAML_AUTHENTICATOR_KEYS)
|
||||
if awx_saml_enabled:
|
||||
awx_saml_name = awx_saml_settings["ENABLED_IDPS"]
|
||||
data.append(
|
||||
self.format_config_data(
|
||||
awx_saml_enabled,
|
||||
awx_saml_settings,
|
||||
"saml",
|
||||
self.DAB_SAML_AUTHENTICATOR_KEYS,
|
||||
awx_saml_name,
|
||||
)
|
||||
)
|
||||
else:
|
||||
data.append({"SAML_missing_fields": saml_missing_fields})
|
||||
|
||||
# dump LDAP settings
|
||||
awx_ldap_group_settings = self.get_awx_ldap_settings()
|
||||
for awx_ldap_name, awx_ldap_settings in awx_ldap_group_settings.items():
|
||||
awx_ldap_enabled, ldap_missing_fields = self.is_enabled(awx_ldap_settings, self.DAB_LDAP_AUTHENTICATOR_KEYS)
|
||||
if awx_ldap_enabled:
|
||||
data.append(
|
||||
self.format_config_data(
|
||||
awx_ldap_enabled,
|
||||
awx_ldap_settings,
|
||||
"ldap",
|
||||
self.DAB_LDAP_AUTHENTICATOR_KEYS,
|
||||
f"LDAP_{awx_ldap_name}",
|
||||
)
|
||||
)
|
||||
else:
|
||||
data.append({f"LDAP_{awx_ldap_name}_missing_fields": ldap_missing_fields})
|
||||
|
||||
# write to file if requested
|
||||
if options["output_file"]:
|
||||
# Define the path for the output JSON file
|
||||
output_file = options["output_file"]
|
||||
|
||||
# Ensure the directory exists
|
||||
os.makedirs(os.path.dirname(output_file), exist_ok=True)
|
||||
|
||||
# Write data to the JSON file
|
||||
with open(output_file, "w") as f:
|
||||
json.dump(data, f, indent=4)
|
||||
|
||||
self.stdout.write(self.style.SUCCESS(f"Auth config data dumped to {output_file}"))
|
||||
else:
|
||||
self.stdout.write(json.dumps(data, indent=4))
|
||||
|
||||
except Exception as e:
|
||||
self.stdout.write(self.style.ERROR(f"An error occurred: {str(e)}"))
|
||||
sys.exit(1)
|
||||
@@ -21,6 +21,9 @@ from django.utils.encoding import smart_str
|
||||
# DRF error class to distinguish license exceptions
|
||||
from rest_framework.exceptions import PermissionDenied
|
||||
|
||||
# django-ansible-base
|
||||
from ansible_base.lib.utils.db import advisory_lock
|
||||
|
||||
# AWX inventory imports
|
||||
from awx.main.models.inventory import Inventory, InventorySource, InventoryUpdate, Host
|
||||
from awx.main.utils.mem_inventory import MemInventory, dict_to_mem_data
|
||||
@@ -30,9 +33,9 @@ from awx.main.utils.safe_yaml import sanitize_jinja
|
||||
from awx.main.models.rbac import batch_role_ancestor_rebuilding
|
||||
from awx.main.utils import ignore_inventory_computed_fields, get_licenser
|
||||
from awx.main.utils.execution_environments import get_default_execution_environment
|
||||
from awx.main.utils.inventory_vars import update_group_variables
|
||||
from awx.main.signals import disable_activity_stream
|
||||
from awx.main.constants import STANDARD_INVENTORY_UPDATE_ENV
|
||||
from awx.main.utils.pglock import advisory_lock
|
||||
|
||||
logger = logging.getLogger('awx.main.commands.inventory_import')
|
||||
|
||||
@@ -455,19 +458,19 @@ class Command(BaseCommand):
|
||||
"""
|
||||
Update inventory variables from "all" group.
|
||||
"""
|
||||
# TODO: We disable variable overwrite here in case user-defined inventory variables get
|
||||
# mangled. But we still need to figure out a better way of processing multiple inventory
|
||||
# update variables mixing with each other.
|
||||
# issue for this: https://github.com/ansible/awx/issues/11623
|
||||
|
||||
if self.inventory.kind == 'constructed' and self.inventory_source.overwrite_vars:
|
||||
# NOTE: we had to add a exception case to not merge variables
|
||||
# to make constructed inventory coherent
|
||||
db_variables = self.all_group.variables
|
||||
else:
|
||||
db_variables = self.inventory.variables_dict
|
||||
db_variables.update(self.all_group.variables)
|
||||
|
||||
db_variables = update_group_variables(
|
||||
group_id=None, # `None` denotes the 'all' group (which doesn't have a pk).
|
||||
newvars=self.all_group.variables,
|
||||
dbvars=self.inventory.variables_dict,
|
||||
invsrc_id=self.inventory_source.id,
|
||||
inventory_id=self.inventory.id,
|
||||
overwrite_vars=self.overwrite_vars,
|
||||
)
|
||||
if db_variables != self.inventory.variables_dict:
|
||||
self.inventory.variables = json.dumps(db_variables)
|
||||
self.inventory.save(update_fields=['variables'])
|
||||
|
||||
@@ -10,7 +10,7 @@ from django.db.models.signals import post_save
|
||||
from awx.conf import settings_registry
|
||||
from awx.conf.models import Setting
|
||||
from awx.conf.signals import on_post_save_setting
|
||||
from awx.main.models import UnifiedJob, Credential, NotificationTemplate, Job, JobTemplate, WorkflowJob, WorkflowJobTemplate, OAuth2Application
|
||||
from awx.main.models import UnifiedJob, Credential, NotificationTemplate, Job, JobTemplate, WorkflowJob, WorkflowJobTemplate
|
||||
from awx.main.utils.encryption import encrypt_field, decrypt_field, encrypt_value, decrypt_value, get_encryption_key
|
||||
|
||||
|
||||
@@ -45,7 +45,6 @@ class Command(BaseCommand):
|
||||
self._notification_templates()
|
||||
self._credentials()
|
||||
self._unified_jobs()
|
||||
self._oauth2_app_secrets()
|
||||
self._settings()
|
||||
self._survey_passwords()
|
||||
return self.new_key
|
||||
@@ -74,13 +73,6 @@ class Command(BaseCommand):
|
||||
uj.start_args = encrypt_field(uj, 'start_args', secret_key=self.new_key)
|
||||
uj.save()
|
||||
|
||||
def _oauth2_app_secrets(self):
|
||||
for app in OAuth2Application.objects.iterator():
|
||||
raw = app.client_secret
|
||||
app.client_secret = raw
|
||||
encrypted = encrypt_value(raw, secret_key=self.new_key)
|
||||
OAuth2Application.objects.filter(pk=app.pk).update(client_secret=encrypted)
|
||||
|
||||
def _settings(self):
|
||||
# don't update the cache, the *actual* value isn't changing
|
||||
post_save.disconnect(on_post_save_setting, sender=Setting)
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user