mirror of
https://github.com/ansible/awx.git
synced 2026-02-10 14:14:43 -03:30
Compare commits
493 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
c5bea2b557 | ||
|
|
6d0c47fdd0 | ||
|
|
54b4acbdfc | ||
|
|
a41766090e | ||
|
|
34fa897dda | ||
|
|
32df114e41 | ||
|
|
018f235a64 | ||
|
|
7e77235d5e | ||
|
|
139d8f0ae2 | ||
|
|
7691365aea | ||
|
|
59f61517d4 | ||
|
|
fa670e2d7f | ||
|
|
a87a044d64 | ||
|
|
381ade1148 | ||
|
|
864a30e3d4 | ||
|
|
5f42db67e6 | ||
|
|
ddf4f288d4 | ||
|
|
e75bc8bc1e | ||
|
|
bb533287b8 | ||
|
|
9979fc659e | ||
|
|
9e5babc093 | ||
|
|
c71e2524ed | ||
|
|
48b4c62186 | ||
|
|
853730acb9 | ||
|
|
f1448fced1 | ||
|
|
7697b6a69b | ||
|
|
22a491c32c | ||
|
|
cbd9dce940 | ||
|
|
a4fdcc1cca | ||
|
|
df95439008 | ||
|
|
acd834df8b | ||
|
|
587f0ecf98 | ||
|
|
5a2091f7bf | ||
|
|
fa7423819a | ||
|
|
fde8af9f11 | ||
|
|
209e7e27b1 | ||
|
|
6c7d29a982 | ||
|
|
282ba36839 | ||
|
|
b727d2c3b3 | ||
|
|
7fc3d5c7c7 | ||
|
|
4e055f46c4 | ||
|
|
f595985b7c | ||
|
|
ea232315bf | ||
|
|
ee251812b5 | ||
|
|
00ba1ea569 | ||
|
|
d91af132c1 | ||
|
|
94e5795dfc | ||
|
|
c4688d6298 | ||
|
|
6763badea3 | ||
|
|
2c4ad6ef0f | ||
|
|
37f44d7214 | ||
|
|
98bbc836a6 | ||
|
|
b59aff50dc | ||
|
|
a70b0c1ddc | ||
|
|
db72c9d5b8 | ||
|
|
4e0d19914f | ||
|
|
6f2307f50e | ||
|
|
dbc2215bb6 | ||
|
|
7c08b29827 | ||
|
|
407194d320 | ||
|
|
853af295d9 | ||
|
|
4738c8333a | ||
|
|
13dcea0afd | ||
|
|
bc2d339981 | ||
|
|
bef9ef10bb | ||
|
|
8645fe5c57 | ||
|
|
b93aa20362 | ||
|
|
4bbfc8a946 | ||
|
|
2c8eef413b | ||
|
|
d5bad1a533 | ||
|
|
f6c0effcb2 | ||
|
|
31a086b11a | ||
|
|
d94f766fcb | ||
|
|
a7113549eb | ||
|
|
bfd811f408 | ||
|
|
030704a9e1 | ||
|
|
c312d9bce3 | ||
|
|
aadcc217eb | ||
|
|
345c1c11e9 | ||
|
|
2c3a7fafc5 | ||
|
|
dbcd32a1d9 | ||
|
|
d45e258a78 | ||
|
|
d16b69a102 | ||
|
|
8b4efbc973 | ||
|
|
4cb061e7db | ||
|
|
31db6a1447 | ||
|
|
ad9d5904d8 | ||
|
|
b837d549ff | ||
|
|
9e22865d2e | ||
|
|
ee3e3e1516 | ||
|
|
4a8f6e45f8 | ||
|
|
6a317cca1b | ||
|
|
d67af79451 | ||
|
|
fe77fda7b2 | ||
|
|
f613b76baa | ||
|
|
054cbe69d7 | ||
|
|
87e9dcb6d7 | ||
|
|
c8829b057e | ||
|
|
a0b376a6ca | ||
|
|
d675207f99 | ||
|
|
20504042c9 | ||
|
|
0e87e97820 | ||
|
|
1f154742df | ||
|
|
85fc81aab1 | ||
|
|
5cfeeb3e87 | ||
|
|
a8c07b06d8 | ||
|
|
53c5feaf6b | ||
|
|
6f57aaa8f5 | ||
|
|
bea74a401d | ||
|
|
54e85813c8 | ||
|
|
b69ed08fe5 | ||
|
|
de25408a23 | ||
|
|
b17f0a188b | ||
|
|
fb860d76ce | ||
|
|
451f20ce0f | ||
|
|
c1dc0c7b86 | ||
|
|
d65ea2a3d5 | ||
|
|
8827ae7554 | ||
|
|
4915262af1 | ||
|
|
d43c91e1a5 | ||
|
|
b470ca32af | ||
|
|
793777bec7 | ||
|
|
6dc4a4508d | ||
|
|
cf09a4220d | ||
|
|
659c3b64de | ||
|
|
37ad690d09 | ||
|
|
7845ec7e01 | ||
|
|
a15bcf1d55 | ||
|
|
7b3fb2c2a8 | ||
|
|
6df47c8449 | ||
|
|
cae42653bf | ||
|
|
da46a29f40 | ||
|
|
0eb465531c | ||
|
|
d0fe0ed796 | ||
|
|
ceafa14c9d | ||
|
|
08e1454098 | ||
|
|
776b661fb3 | ||
|
|
af6ccdbde5 | ||
|
|
559ab3564b | ||
|
|
208ef0ce25 | ||
|
|
c3d9aa54d8 | ||
|
|
66efe7198a | ||
|
|
adf930ee42 | ||
|
|
892410477a | ||
|
|
0d4f653794 | ||
|
|
8de8f6dce2 | ||
|
|
fc9064e27f | ||
|
|
7de350dc3e | ||
|
|
d4bdaad4d8 | ||
|
|
a9b2ffa3e9 | ||
|
|
1b8d409043 | ||
|
|
da2bccf5a8 | ||
|
|
a2f083bd8e | ||
|
|
4d641b6cf5 | ||
|
|
439c3f0c23 | ||
|
|
946bbe3560 | ||
|
|
20f054d600 | ||
|
|
918d5b3565 | ||
|
|
158314af50 | ||
|
|
4754819a09 | ||
|
|
78fc23138a | ||
|
|
014534bfa5 | ||
|
|
2502e7c7d8 | ||
|
|
fb237e3834 | ||
|
|
e4646ae611 | ||
|
|
7dc77546f4 | ||
|
|
f5f85666c8 | ||
|
|
47a061eb39 | ||
|
|
c760577855 | ||
|
|
814ceb0d06 | ||
|
|
f178c84728 | ||
|
|
c0f71801f6 | ||
|
|
4e8e1398d7 | ||
|
|
3d6a8fd4ef | ||
|
|
e873bb1304 | ||
|
|
672f1eb745 | ||
|
|
199507c6f1 | ||
|
|
a176c04c14 | ||
|
|
e3af658f82 | ||
|
|
e8a3b96482 | ||
|
|
c015e8413e | ||
|
|
390c2d8907 | ||
|
|
97605c5f19 | ||
|
|
818c326160 | ||
|
|
c98727d83e | ||
|
|
a138a92e67 | ||
|
|
7aed19ffda | ||
|
|
3bb559dd09 | ||
|
|
389a729b75 | ||
|
|
2f3c9122fd | ||
|
|
733478ee19 | ||
|
|
41c6337fc1 | ||
|
|
7446da1c2f | ||
|
|
c79fca5ceb | ||
|
|
dc5f43927a | ||
|
|
35a5a81e19 | ||
|
|
9dcc11d54c | ||
|
|
74ce21fa54 | ||
|
|
eb93660b36 | ||
|
|
f50e597548 | ||
|
|
817c3b36b9 | ||
|
|
1859a6ae69 | ||
|
|
0645d342dd | ||
|
|
61ec03e540 | ||
|
|
09f0a366bf | ||
|
|
778961d31e | ||
|
|
f962c88df3 | ||
|
|
8db3ffe719 | ||
|
|
cc5d4dd119 | ||
|
|
86204cf23b | ||
|
|
468949b899 | ||
|
|
f1d9966224 | ||
|
|
b022b50966 | ||
|
|
e2f4213839 | ||
|
|
ae1235b223 | ||
|
|
c061f59f1c | ||
|
|
3edaaebba2 | ||
|
|
7cdf1c7f96 | ||
|
|
d558204192 | ||
|
|
d06ce8f911 | ||
|
|
4b6f7e0ebe | ||
|
|
370c567be1 | ||
|
|
9be64f3de5 | ||
|
|
30500e5a95 | ||
|
|
bb323c5710 | ||
|
|
7571df49d5 | ||
|
|
1559c21033 | ||
|
|
d9b81731e9 | ||
|
|
2034cca3a9 | ||
|
|
0b5e59d9cb | ||
|
|
f48b2d1ae5 | ||
|
|
b44bb98c7e | ||
|
|
8cafdf0400 | ||
|
|
3f566c8737 | ||
|
|
c8021a25bf | ||
|
|
934646a0f6 | ||
|
|
9bb97dd658 | ||
|
|
7150f5edc6 | ||
|
|
93da15c0ee | ||
|
|
ab593bda45 | ||
|
|
065bd3ae2a | ||
|
|
8ff7260bc6 | ||
|
|
a635445082 | ||
|
|
949e7efab1 | ||
|
|
615f09226f | ||
|
|
d903c524f5 | ||
|
|
393d9c39c6 | ||
|
|
dfab342bb4 | ||
|
|
12843eccf7 | ||
|
|
dd9160135d | ||
|
|
ad96a92fa7 | ||
|
|
ca8085fe7e | ||
|
|
b076cb00a9 | ||
|
|
ee9eac15dc | ||
|
|
3f2f7b75a6 | ||
|
|
b71645f3b1 | ||
|
|
eb300252b8 | ||
|
|
2e2cd7f2de | ||
|
|
727278aaa3 | ||
|
|
81825ab755 | ||
|
|
7f2a1b6b03 | ||
|
|
1b56d94d30 | ||
|
|
e1e32c971c | ||
|
|
a4a2fabc01 | ||
|
|
b7b7bfa520 | ||
|
|
887604317e | ||
|
|
d35d8b6ed7 | ||
|
|
ec28eff7f7 | ||
|
|
a5d17539c6 | ||
|
|
a49d894cf1 | ||
|
|
b3466d4449 | ||
|
|
237adc6150 | ||
|
|
09b028ee3c | ||
|
|
fb83bfbc31 | ||
|
|
88e406e121 | ||
|
|
59d0bcc63f | ||
|
|
3fb3125bc3 | ||
|
|
d70c6b9474 | ||
|
|
5549516a37 | ||
|
|
14ac91a8a2 | ||
|
|
d5753818a0 | ||
|
|
33010a2e02 | ||
|
|
14454cc670 | ||
|
|
7ab2bca16e | ||
|
|
f0f655f2c3 | ||
|
|
4286d411a7 | ||
|
|
06ad32ed8e | ||
|
|
1ebff23232 | ||
|
|
700de14c76 | ||
|
|
8605e339df | ||
|
|
e50954ce40 | ||
|
|
7caca60308 | ||
|
|
f4e13af056 | ||
|
|
decdb56288 | ||
|
|
bcd4c2e8ef | ||
|
|
d663066ac5 | ||
|
|
1ceebb275c | ||
|
|
f78ba282a6 | ||
|
|
81d88df757 | ||
|
|
0bdb01a9e9 | ||
|
|
cd91fbf59f | ||
|
|
f240e640e5 | ||
|
|
46f489185e | ||
|
|
dbb80fb7e3 | ||
|
|
cb3d357ce1 | ||
|
|
dfa4db9266 | ||
|
|
6906a88dc9 | ||
|
|
1f7be9258c | ||
|
|
dcce024424 | ||
|
|
79d7179c72 | ||
|
|
4d80f886e0 | ||
|
|
5179333185 | ||
|
|
362e11aaf2 | ||
|
|
decff01fa4 | ||
|
|
a14cc8199d | ||
|
|
b6436826f6 | ||
|
|
2109b5039e | ||
|
|
b6f9b73418 | ||
|
|
40a8a3cb2f | ||
|
|
19f80c0a26 | ||
|
|
5d1bb2125e | ||
|
|
99c512bcef | ||
|
|
ed0329f5db | ||
|
|
dd53345397 | ||
|
|
f66cde51d7 | ||
|
|
d1c31687fc | ||
|
|
38424487f1 | ||
|
|
b0565e9937 | ||
|
|
44d85b589c | ||
|
|
46f816e7a4 | ||
|
|
54b32c10f0 | ||
|
|
20202054cc | ||
|
|
e84e2962d0 | ||
|
|
2259047527 | ||
|
|
f429ef6ca7 | ||
|
|
4b637c1319 | ||
|
|
4c41f6b018 | ||
|
|
3ae72219b4 | ||
|
|
402c29dc52 | ||
|
|
8eb4a9a2a0 | ||
|
|
36f3b46726 | ||
|
|
55c6a319dc | ||
|
|
56b6a07f6e | ||
|
|
519fd22bec | ||
|
|
2e5306ae8e | ||
|
|
068e6acbd5 | ||
|
|
f9a23a5645 | ||
|
|
40150a2be8 | ||
|
|
b79aa5b1ed | ||
|
|
b3aeb962ce | ||
|
|
2300b8fddf | ||
|
|
3a3284b5df | ||
|
|
2359004cc1 | ||
|
|
694d7e98e7 | ||
|
|
8c9c02c975 | ||
|
|
8a902debd5 | ||
|
|
6dcaa09dfb | ||
|
|
21fd6af0f9 | ||
|
|
eeae1d59d4 | ||
|
|
a252d0ae33 | ||
|
|
48971411cc | ||
|
|
083c05f12a | ||
|
|
b558397b67 | ||
|
|
904c6001e9 | ||
|
|
818e11dfdc | ||
|
|
7fc13a0569 | ||
|
|
92c693f14e | ||
|
|
f2417f0ed2 | ||
|
|
8f22188116 | ||
|
|
05502c0af8 | ||
|
|
957ce59bf7 | ||
|
|
cc4cc37d46 | ||
|
|
1e254c804c | ||
|
|
1b44bebed3 | ||
|
|
a4cf55bdba | ||
|
|
c333d0e82f | ||
|
|
b093c89a84 | ||
|
|
f98493aa61 | ||
|
|
c36d2b0485 | ||
|
|
8ddb604bf1 | ||
|
|
cd9dd43be7 | ||
|
|
82323390a7 | ||
|
|
4c5ac1d3da | ||
|
|
9c06370e33 | ||
|
|
449b95d1eb | ||
|
|
1712540c8e | ||
|
|
7cf639d8eb | ||
|
|
dbfcc40d7c | ||
|
|
73d2c92ae3 | ||
|
|
24a4242147 | ||
|
|
92ce85b688 | ||
|
|
9531f8377a | ||
|
|
15a16b3dd1 | ||
|
|
a37e7bf147 | ||
|
|
a2fcd2f97a | ||
|
|
c394ffdd19 | ||
|
|
69102cf265 | ||
|
|
a188798543 | ||
|
|
60108ebd10 | ||
|
|
8c7c00451a | ||
|
|
7a1ed406da | ||
|
|
f916ffe1e9 | ||
|
|
901dbd697e | ||
|
|
d8b4a9825e | ||
|
|
6db66c5f81 | ||
|
|
82ad7dcf40 | ||
|
|
93500f9fea | ||
|
|
9ba70c151d | ||
|
|
46dc61253f | ||
|
|
6cb2cd18b0 | ||
|
|
5d1dd8ec41 | ||
|
|
9f69daf787 | ||
|
|
16ece5de7e | ||
|
|
ab0e9265c5 | ||
|
|
04cbbbccfa | ||
|
|
d1cacf64de | ||
|
|
5385eb0fb3 | ||
|
|
7d7503279d | ||
|
|
d860d1d91b | ||
|
|
3a17c45b64 | ||
|
|
bca68bcdf1 | ||
|
|
c32f234ebb | ||
|
|
5cb3d3b078 | ||
|
|
5199cc5246 | ||
|
|
387e877485 | ||
|
|
d54c5934ff | ||
|
|
2fa5116197 | ||
|
|
527755d986 | ||
|
|
f9c0b97c53 | ||
|
|
65655f84de | ||
|
|
9aa3d5584a | ||
|
|
266e31d71a | ||
|
|
a1bbe75aed | ||
|
|
695f1cf892 | ||
|
|
0ab103d8c4 | ||
|
|
9ac1c0f6c2 | ||
|
|
2e168d8177 | ||
|
|
d4f7bfef18 | ||
|
|
985a8d499d | ||
|
|
e3b52f0169 | ||
|
|
f69f600cff | ||
|
|
74cd23be5c | ||
|
|
209747d88e | ||
|
|
d91da39f81 | ||
|
|
5cd029df96 | ||
|
|
5a93a519f6 | ||
|
|
5f5cd960d5 | ||
|
|
42701f32fe | ||
|
|
30d4df788f | ||
|
|
1bcd71a8ac | ||
|
|
43be90f051 | ||
|
|
bb1922cdbb | ||
|
|
403f545071 | ||
|
|
a06a2a883c | ||
|
|
2529fdcfd7 | ||
|
|
19dff9c2d1 | ||
|
|
2a6cf032f8 | ||
|
|
6119b33a50 | ||
|
|
aacf9653c5 | ||
|
|
325f5250db | ||
|
|
b14518c1e5 | ||
|
|
6440e3cb55 | ||
|
|
b5f6aac3aa | ||
|
|
6e5e1c8fff | ||
|
|
bf42c63c12 | ||
|
|
df24cb692b | ||
|
|
0d825a744b | ||
|
|
5e48bf091b | ||
|
|
1294cec92c | ||
|
|
dae12ee1b8 | ||
|
|
b091f6cf79 | ||
|
|
fe564c5fad | ||
|
|
eb3bc84461 | ||
|
|
6aa2997dce | ||
|
|
dd00bbba42 | ||
|
|
fe6bac6d9e | ||
|
|
87abbd4b10 | ||
|
|
fb04e5d9f6 | ||
|
|
478e2cb28d | ||
|
|
2ac304d289 | ||
|
|
3e5851f3af | ||
|
|
adb1b12074 | ||
|
|
8fae20c48a | ||
|
|
ec364cc60e | ||
|
|
1cfd51764e | ||
|
|
0b8fedfd04 | ||
|
|
72a8173462 | ||
|
|
873b1fbe07 | ||
|
|
1f36e84b45 | ||
|
|
8c4bff2b86 | ||
|
|
14f636af84 | ||
|
|
0057c8daf6 | ||
|
|
d8a28b3c06 |
10
.github/actions/awx_devel_image/action.yml
vendored
10
.github/actions/awx_devel_image/action.yml
vendored
@@ -11,6 +11,12 @@ runs:
|
|||||||
shell: bash
|
shell: bash
|
||||||
run: echo py_version=`make PYTHON_VERSION` >> $GITHUB_ENV
|
run: echo py_version=`make PYTHON_VERSION` >> $GITHUB_ENV
|
||||||
|
|
||||||
|
- name: Set lower case owner name
|
||||||
|
shell: bash
|
||||||
|
run: echo "OWNER_LC=${OWNER,,}" >> $GITHUB_ENV
|
||||||
|
env:
|
||||||
|
OWNER: '${{ github.repository_owner }}'
|
||||||
|
|
||||||
- name: Log in to registry
|
- name: Log in to registry
|
||||||
shell: bash
|
shell: bash
|
||||||
run: |
|
run: |
|
||||||
@@ -18,11 +24,11 @@ runs:
|
|||||||
|
|
||||||
- name: Pre-pull latest devel image to warm cache
|
- name: Pre-pull latest devel image to warm cache
|
||||||
shell: bash
|
shell: bash
|
||||||
run: docker pull ghcr.io/${{ github.repository_owner }}/awx_devel:${{ github.base_ref }}
|
run: docker pull -q ghcr.io/${OWNER_LC}/awx_devel:${{ github.base_ref }}
|
||||||
|
|
||||||
- name: Build image for current source checkout
|
- name: Build image for current source checkout
|
||||||
shell: bash
|
shell: bash
|
||||||
run: |
|
run: |
|
||||||
DEV_DOCKER_TAG_BASE=ghcr.io/${{ github.repository_owner }} \
|
DEV_DOCKER_TAG_BASE=ghcr.io/${OWNER_LC} \
|
||||||
COMPOSE_TAG=${{ github.base_ref }} \
|
COMPOSE_TAG=${{ github.base_ref }} \
|
||||||
make docker-compose-build
|
make docker-compose-build
|
||||||
|
|||||||
26
.github/actions/run_awx_devel/action.yml
vendored
26
.github/actions/run_awx_devel/action.yml
vendored
@@ -35,7 +35,7 @@ runs:
|
|||||||
- name: Start AWX
|
- name: Start AWX
|
||||||
shell: bash
|
shell: bash
|
||||||
run: |
|
run: |
|
||||||
DEV_DOCKER_TAG_BASE=ghcr.io/${{ github.repository_owner }} \
|
DEV_DOCKER_OWNER=${{ github.repository_owner }} \
|
||||||
COMPOSE_TAG=${{ github.base_ref }} \
|
COMPOSE_TAG=${{ github.base_ref }} \
|
||||||
COMPOSE_UP_OPTS="-d" \
|
COMPOSE_UP_OPTS="-d" \
|
||||||
make docker-compose
|
make docker-compose
|
||||||
@@ -43,31 +43,25 @@ runs:
|
|||||||
- name: Update default AWX password
|
- name: Update default AWX password
|
||||||
shell: bash
|
shell: bash
|
||||||
run: |
|
run: |
|
||||||
while [[ "$(curl -s -o /dev/null -w ''%{http_code}'' -k https://localhost:8043/api/v2/ping/)" != "200" ]]
|
SECONDS=0
|
||||||
do
|
while [[ "$(curl -s -o /dev/null -w ''%{http_code}'' -k https://localhost:8043/api/v2/ping/)" != "200" ]]; do
|
||||||
echo "Waiting for AWX..."
|
if [[ $SECONDS -gt 600 ]]; then
|
||||||
sleep 5
|
echo "Timing out, AWX never came up"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
echo "Waiting for AWX..."
|
||||||
|
sleep 5
|
||||||
done
|
done
|
||||||
echo "AWX is up, updating the password..."
|
echo "AWX is up, updating the password..."
|
||||||
docker exec -i tools_awx_1 sh <<-EOSH
|
docker exec -i tools_awx_1 sh <<-EOSH
|
||||||
awx-manage update_password --username=admin --password=password
|
awx-manage update_password --username=admin --password=password
|
||||||
EOSH
|
EOSH
|
||||||
|
|
||||||
- name: Build UI
|
|
||||||
# This must be a string comparison in composite actions:
|
|
||||||
# https://github.com/actions/runner/issues/2238
|
|
||||||
if: ${{ inputs.build-ui == 'true' }}
|
|
||||||
shell: bash
|
|
||||||
run: |
|
|
||||||
docker exec -i tools_awx_1 sh <<-EOSH
|
|
||||||
make ui-devel
|
|
||||||
EOSH
|
|
||||||
|
|
||||||
- name: Get instance data
|
- name: Get instance data
|
||||||
id: data
|
id: data
|
||||||
shell: bash
|
shell: bash
|
||||||
run: |
|
run: |
|
||||||
AWX_IP=$(docker inspect -f '{{range.NetworkSettings.Networks}}{{.IPAddress}}{{end}}' tools_awx_1)
|
AWX_IP=$(docker inspect -f '{{.NetworkSettings.Networks.awx.IPAddress}}' tools_awx_1)
|
||||||
ADMIN_TOKEN=$(docker exec -i tools_awx_1 awx-manage create_oauth2_token --user admin)
|
ADMIN_TOKEN=$(docker exec -i tools_awx_1 awx-manage create_oauth2_token --user admin)
|
||||||
echo "ip=$AWX_IP" >> $GITHUB_OUTPUT
|
echo "ip=$AWX_IP" >> $GITHUB_OUTPUT
|
||||||
echo "admin_token=$ADMIN_TOKEN" >> $GITHUB_OUTPUT
|
echo "admin_token=$ADMIN_TOKEN" >> $GITHUB_OUTPUT
|
||||||
|
|||||||
10
.github/dependabot.yml
vendored
Normal file
10
.github/dependabot.yml
vendored
Normal file
@@ -0,0 +1,10 @@
|
|||||||
|
version: 2
|
||||||
|
updates:
|
||||||
|
- package-ecosystem: "pip"
|
||||||
|
directory: "docs/docsite/"
|
||||||
|
schedule:
|
||||||
|
interval: "weekly"
|
||||||
|
open-pull-requests-limit: 2
|
||||||
|
labels:
|
||||||
|
- "docs"
|
||||||
|
- "dependencies"
|
||||||
3
.github/pr_labeler.yml
vendored
3
.github/pr_labeler.yml
vendored
@@ -15,5 +15,4 @@
|
|||||||
|
|
||||||
"dependencies":
|
"dependencies":
|
||||||
- any: ["awx/ui/package.json"]
|
- any: ["awx/ui/package.json"]
|
||||||
- any: ["requirements/*.txt"]
|
- any: ["requirements/*"]
|
||||||
- any: ["requirements/requirements.in"]
|
|
||||||
|
|||||||
2
.github/triage_replies.md
vendored
2
.github/triage_replies.md
vendored
@@ -1,7 +1,7 @@
|
|||||||
## General
|
## General
|
||||||
- For the roundup of all the different mailing lists available from AWX, Ansible, and beyond visit: https://docs.ansible.com/ansible/latest/community/communication.html
|
- For the roundup of all the different mailing lists available from AWX, Ansible, and beyond visit: https://docs.ansible.com/ansible/latest/community/communication.html
|
||||||
- Hello, we think your question is answered in our FAQ. Does this: https://www.ansible.com/products/awx-project/faq cover your question?
|
- Hello, we think your question is answered in our FAQ. Does this: https://www.ansible.com/products/awx-project/faq cover your question?
|
||||||
- You can find the latest documentation here: https://docs.ansible.com/automation-controller/latest/html/userguide/index.html
|
- You can find the latest documentation here: https://ansible.readthedocs.io/projects/awx/en/latest/userguide/index.html
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
63
.github/workflows/ci.yml
vendored
63
.github/workflows/ci.yml
vendored
@@ -11,6 +11,7 @@ jobs:
|
|||||||
common-tests:
|
common-tests:
|
||||||
name: ${{ matrix.tests.name }}
|
name: ${{ matrix.tests.name }}
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
timeout-minutes: 60
|
||||||
permissions:
|
permissions:
|
||||||
packages: write
|
packages: write
|
||||||
contents: read
|
contents: read
|
||||||
@@ -20,6 +21,8 @@ jobs:
|
|||||||
tests:
|
tests:
|
||||||
- name: api-test
|
- name: api-test
|
||||||
command: /start_tests.sh
|
command: /start_tests.sh
|
||||||
|
- name: api-migrations
|
||||||
|
command: /start_tests.sh test_migrations
|
||||||
- name: api-lint
|
- name: api-lint
|
||||||
command: /var/lib/awx/venv/awx/bin/tox -e linters
|
command: /var/lib/awx/venv/awx/bin/tox -e linters
|
||||||
- name: api-swagger
|
- name: api-swagger
|
||||||
@@ -35,7 +38,9 @@ jobs:
|
|||||||
- name: ui-test-general
|
- name: ui-test-general
|
||||||
command: make ui-test-general
|
command: make ui-test-general
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
show-progress: false
|
||||||
|
|
||||||
- name: Build awx_devel image for running checks
|
- name: Build awx_devel image for running checks
|
||||||
uses: ./.github/actions/awx_devel_image
|
uses: ./.github/actions/awx_devel_image
|
||||||
@@ -47,8 +52,11 @@ jobs:
|
|||||||
|
|
||||||
dev-env:
|
dev-env:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
timeout-minutes: 60
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
show-progress: false
|
||||||
|
|
||||||
- uses: ./.github/actions/run_awx_devel
|
- uses: ./.github/actions/run_awx_devel
|
||||||
id: awx
|
id: awx
|
||||||
@@ -61,15 +69,20 @@ jobs:
|
|||||||
|
|
||||||
awx-operator:
|
awx-operator:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
timeout-minutes: 60
|
||||||
|
env:
|
||||||
|
DEBUG_OUTPUT_DIR: /tmp/awx_operator_molecule_test
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout awx
|
- name: Checkout awx
|
||||||
uses: actions/checkout@v3
|
uses: actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
|
show-progress: false
|
||||||
path: awx
|
path: awx
|
||||||
|
|
||||||
- name: Checkout awx-operator
|
- name: Checkout awx-operator
|
||||||
uses: actions/checkout@v3
|
uses: actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
|
show-progress: false\
|
||||||
repository: ansible/awx-operator
|
repository: ansible/awx-operator
|
||||||
path: awx-operator
|
path: awx-operator
|
||||||
|
|
||||||
@@ -89,11 +102,11 @@ jobs:
|
|||||||
- name: Build AWX image
|
- name: Build AWX image
|
||||||
working-directory: awx
|
working-directory: awx
|
||||||
run: |
|
run: |
|
||||||
ansible-playbook -v tools/ansible/build.yml \
|
VERSION=`make version-for-buildyml` make awx-kube-build
|
||||||
-e headless=yes \
|
env:
|
||||||
-e awx_image=awx \
|
COMPOSE_TAG: ci
|
||||||
-e awx_image_tag=ci \
|
DEV_DOCKER_TAG_BASE: local
|
||||||
-e ansible_python_interpreter=$(which python3)
|
HEADLESS: yes
|
||||||
|
|
||||||
- name: Run test deployment with awx-operator
|
- name: Run test deployment with awx-operator
|
||||||
working-directory: awx-operator
|
working-directory: awx-operator
|
||||||
@@ -102,18 +115,30 @@ jobs:
|
|||||||
ansible-galaxy collection install -r molecule/requirements.yml
|
ansible-galaxy collection install -r molecule/requirements.yml
|
||||||
sudo rm -f $(which kustomize)
|
sudo rm -f $(which kustomize)
|
||||||
make kustomize
|
make kustomize
|
||||||
KUSTOMIZE_PATH=$(readlink -f bin/kustomize) molecule -v test -s kind
|
KUSTOMIZE_PATH=$(readlink -f bin/kustomize) molecule -v test -s kind -- --skip-tags=replicas
|
||||||
env:
|
env:
|
||||||
AWX_TEST_IMAGE: awx
|
AWX_TEST_IMAGE: local/awx
|
||||||
AWX_TEST_VERSION: ci
|
AWX_TEST_VERSION: ci
|
||||||
|
AWX_EE_TEST_IMAGE: quay.io/ansible/awx-ee:latest
|
||||||
|
STORE_DEBUG_OUTPUT: true
|
||||||
|
|
||||||
|
- name: Upload debug output
|
||||||
|
if: failure()
|
||||||
|
uses: actions/upload-artifact@v3
|
||||||
|
with:
|
||||||
|
name: awx-operator-debug-output
|
||||||
|
path: ${{ env.DEBUG_OUTPUT_DIR }}
|
||||||
|
|
||||||
collection-sanity:
|
collection-sanity:
|
||||||
name: awx_collection sanity
|
name: awx_collection sanity
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
timeout-minutes: 30
|
||||||
strategy:
|
strategy:
|
||||||
fail-fast: false
|
fail-fast: false
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
show-progress: false
|
||||||
|
|
||||||
# The containers that GitHub Actions use have Ansible installed, so upgrade to make sure we have the latest version.
|
# The containers that GitHub Actions use have Ansible installed, so upgrade to make sure we have the latest version.
|
||||||
- name: Upgrade ansible-core
|
- name: Upgrade ansible-core
|
||||||
@@ -121,14 +146,11 @@ jobs:
|
|||||||
|
|
||||||
- name: Run sanity tests
|
- name: Run sanity tests
|
||||||
run: make test_collection_sanity
|
run: make test_collection_sanity
|
||||||
env:
|
|
||||||
# needed due to cgroupsv2. This is fixed, but a stable release
|
|
||||||
# with the fix has not been made yet.
|
|
||||||
ANSIBLE_TEST_PREFER_PODMAN: 1
|
|
||||||
|
|
||||||
collection-integration:
|
collection-integration:
|
||||||
name: awx_collection integration
|
name: awx_collection integration
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
timeout-minutes: 60
|
||||||
strategy:
|
strategy:
|
||||||
fail-fast: false
|
fail-fast: false
|
||||||
matrix:
|
matrix:
|
||||||
@@ -140,7 +162,9 @@ jobs:
|
|||||||
- name: r-z0-9
|
- name: r-z0-9
|
||||||
regex: ^[r-z0-9]
|
regex: ^[r-z0-9]
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
show-progress: false
|
||||||
|
|
||||||
- uses: ./.github/actions/run_awx_devel
|
- uses: ./.github/actions/run_awx_devel
|
||||||
id: awx
|
id: awx
|
||||||
@@ -180,12 +204,15 @@ jobs:
|
|||||||
collection-integration-coverage-combine:
|
collection-integration-coverage-combine:
|
||||||
name: combine awx_collection integration coverage
|
name: combine awx_collection integration coverage
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
timeout-minutes: 10
|
||||||
needs:
|
needs:
|
||||||
- collection-integration
|
- collection-integration
|
||||||
strategy:
|
strategy:
|
||||||
fail-fast: false
|
fail-fast: false
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
show-progress: false
|
||||||
|
|
||||||
- name: Upgrade ansible-core
|
- name: Upgrade ansible-core
|
||||||
run: python3 -m pip install --upgrade ansible-core
|
run: python3 -m pip install --upgrade ansible-core
|
||||||
|
|||||||
57
.github/workflows/dab-release.yml
vendored
Normal file
57
.github/workflows/dab-release.yml
vendored
Normal file
@@ -0,0 +1,57 @@
|
|||||||
|
---
|
||||||
|
name: django-ansible-base requirements update
|
||||||
|
on:
|
||||||
|
workflow_dispatch:
|
||||||
|
schedule:
|
||||||
|
- cron: '0 6 * * *' # once an day @ 6 AM
|
||||||
|
permissions:
|
||||||
|
pull-requests: write
|
||||||
|
contents: write
|
||||||
|
jobs:
|
||||||
|
dab-pin-newest:
|
||||||
|
if: (github.repository_owner == 'ansible' && endsWith(github.repository, 'awx')) || github.event_name != 'schedule'
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- id: dab-release
|
||||||
|
name: Get current django-ansible-base release version
|
||||||
|
uses: pozetroninc/github-action-get-latest-release@2a61c339ea7ef0a336d1daa35ef0cb1418e7676c # v0.8.0
|
||||||
|
with:
|
||||||
|
owner: ansible
|
||||||
|
repo: django-ansible-base
|
||||||
|
excludes: prerelease, draft
|
||||||
|
|
||||||
|
- name: Check out respository code
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- id: dab-pinned
|
||||||
|
name: Get current django-ansible-base pinned version
|
||||||
|
run:
|
||||||
|
echo "version=$(requirements/django-ansible-base-pinned-version.sh)" >> "$GITHUB_OUTPUT"
|
||||||
|
|
||||||
|
- name: Update django-ansible-base pinned version to upstream release
|
||||||
|
run:
|
||||||
|
requirements/django-ansible-base-pinned-version.sh -s ${{ steps.dab-release.outputs.release }}
|
||||||
|
|
||||||
|
- name: Create Pull Request
|
||||||
|
uses: peter-evans/create-pull-request@c5a7806660adbe173f04e3e038b0ccdcd758773c # v6
|
||||||
|
with:
|
||||||
|
base: devel
|
||||||
|
branch: bump-django-ansible-base
|
||||||
|
title: Bump django-ansible-base to ${{ steps.dab-release.outputs.release }}
|
||||||
|
body: |
|
||||||
|
##### SUMMARY
|
||||||
|
Automated .github/workflows/dab-release.yml
|
||||||
|
|
||||||
|
django-ansible-base upstream released version == ${{ steps.dab-release.outputs.release }}
|
||||||
|
requirements_git.txt django-ansible-base pinned version == ${{ steps.dab-pinned.outputs.version }}
|
||||||
|
|
||||||
|
##### ISSUE TYPE
|
||||||
|
- Bug, Docs Fix or other nominal change
|
||||||
|
|
||||||
|
##### COMPONENT NAME
|
||||||
|
- API
|
||||||
|
|
||||||
|
commit-message: |
|
||||||
|
Update django-ansible-base version to ${{ steps.dab-pinned.outputs.version }}
|
||||||
|
add-paths:
|
||||||
|
requirements/requirements_git.txt
|
||||||
68
.github/workflows/devel_images.yml
vendored
68
.github/workflows/devel_images.yml
vendored
@@ -2,28 +2,54 @@
|
|||||||
name: Build/Push Development Images
|
name: Build/Push Development Images
|
||||||
env:
|
env:
|
||||||
LC_ALL: "C.UTF-8" # prevent ERROR: Ansible could not initialize the preferred locale: unsupported locale setting
|
LC_ALL: "C.UTF-8" # prevent ERROR: Ansible could not initialize the preferred locale: unsupported locale setting
|
||||||
|
DOCKER_CACHE: "--no-cache" # using the cache will not rebuild git requirements and other things
|
||||||
on:
|
on:
|
||||||
|
workflow_dispatch:
|
||||||
push:
|
push:
|
||||||
branches:
|
branches:
|
||||||
- devel
|
- devel
|
||||||
- release_*
|
- release_*
|
||||||
- feature_*
|
- feature_*
|
||||||
jobs:
|
jobs:
|
||||||
push:
|
push-development-images:
|
||||||
if: endsWith(github.repository, '/awx') || startsWith(github.ref, 'refs/heads/release_')
|
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
timeout-minutes: 120
|
||||||
permissions:
|
permissions:
|
||||||
packages: write
|
packages: write
|
||||||
contents: read
|
contents: read
|
||||||
|
strategy:
|
||||||
|
fail-fast: false
|
||||||
|
matrix:
|
||||||
|
build-targets:
|
||||||
|
- image-name: awx_devel
|
||||||
|
make-target: docker-compose-buildx
|
||||||
|
- image-name: awx_kube_devel
|
||||||
|
make-target: awx-kube-dev-buildx
|
||||||
|
- image-name: awx
|
||||||
|
make-target: awx-kube-buildx
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
|
||||||
|
|
||||||
- name: Get python version from Makefile
|
- name: Skipping build of awx image for non-awx repository
|
||||||
run: echo py_version=`make PYTHON_VERSION` >> $GITHUB_ENV
|
|
||||||
|
|
||||||
- name: Set lower case owner name
|
|
||||||
run: |
|
run: |
|
||||||
echo "OWNER_LC=${OWNER,,}" >>${GITHUB_ENV}
|
echo "Skipping build of awx image for non-awx repository"
|
||||||
|
exit 0
|
||||||
|
if: matrix.build-targets.image-name == 'awx' && !endsWith(github.repository, '/awx')
|
||||||
|
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
show-progress: false
|
||||||
|
|
||||||
|
- name: Set up QEMU
|
||||||
|
uses: docker/setup-qemu-action@v3
|
||||||
|
|
||||||
|
- name: Set up Docker Buildx
|
||||||
|
uses: docker/setup-buildx-action@v3
|
||||||
|
|
||||||
|
- name: Set GITHUB_ENV variables
|
||||||
|
run: |
|
||||||
|
echo "DEV_DOCKER_TAG_BASE=ghcr.io/${OWNER,,}" >> $GITHUB_ENV
|
||||||
|
echo "COMPOSE_TAG=${GITHUB_REF##*/}" >> $GITHUB_ENV
|
||||||
|
echo py_version=`make PYTHON_VERSION` >> $GITHUB_ENV
|
||||||
env:
|
env:
|
||||||
OWNER: '${{ github.repository_owner }}'
|
OWNER: '${{ github.repository_owner }}'
|
||||||
|
|
||||||
@@ -36,23 +62,17 @@ jobs:
|
|||||||
run: |
|
run: |
|
||||||
echo "${{ secrets.GITHUB_TOKEN }}" | docker login ghcr.io -u ${{ github.actor }} --password-stdin
|
echo "${{ secrets.GITHUB_TOKEN }}" | docker login ghcr.io -u ${{ github.actor }} --password-stdin
|
||||||
|
|
||||||
- name: Pre-pull image to warm build cache
|
- name: Setup node and npm for the new UI build
|
||||||
run: |
|
uses: actions/setup-node@v2
|
||||||
docker pull ghcr.io/${OWNER_LC}/awx_devel:${GITHUB_REF##*/} || :
|
with:
|
||||||
docker pull ghcr.io/${OWNER_LC}/awx_kube_devel:${GITHUB_REF##*/} || :
|
node-version: '18'
|
||||||
docker pull ghcr.io/${OWNER_LC}/awx:${GITHUB_REF##*/} || :
|
if: matrix.build-targets.image-name == 'awx'
|
||||||
|
|
||||||
- name: Build images
|
- name: Prebuild new UI for awx image (to speed up build process)
|
||||||
run: |
|
run: |
|
||||||
DEV_DOCKER_TAG_BASE=ghcr.io/${OWNER_LC} COMPOSE_TAG=${GITHUB_REF##*/} make docker-compose-build
|
make ui-next
|
||||||
DEV_DOCKER_TAG_BASE=ghcr.io/${OWNER_LC} COMPOSE_TAG=${GITHUB_REF##*/} make awx-kube-dev-build
|
if: matrix.build-targets.image-name == 'awx'
|
||||||
DEV_DOCKER_TAG_BASE=ghcr.io/${OWNER_LC} COMPOSE_TAG=${GITHUB_REF##*/} make awx-kube-build
|
|
||||||
|
|
||||||
- name: Push development images
|
- name: Build and push AWX devel images
|
||||||
run: |
|
run: |
|
||||||
docker push ghcr.io/${OWNER_LC}/awx_devel:${GITHUB_REF##*/}
|
make ${{ matrix.build-targets.make-target }}
|
||||||
docker push ghcr.io/${OWNER_LC}/awx_kube_devel:${GITHUB_REF##*/}
|
|
||||||
|
|
||||||
- name: Push AWX k8s image, only for upstream and feature branches
|
|
||||||
run: docker push ghcr.io/${OWNER_LC}/awx:${GITHUB_REF##*/}
|
|
||||||
if: endsWith(github.repository, '/awx')
|
|
||||||
|
|||||||
5
.github/workflows/docs.yml
vendored
5
.github/workflows/docs.yml
vendored
@@ -6,8 +6,11 @@ jobs:
|
|||||||
docsite-build:
|
docsite-build:
|
||||||
name: docsite test build
|
name: docsite test build
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
timeout-minutes: 30
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
show-progress: false
|
||||||
|
|
||||||
- name: install tox
|
- name: install tox
|
||||||
run: pip install tox
|
run: pip install tox
|
||||||
|
|||||||
75
.github/workflows/e2e_test.yml
vendored
75
.github/workflows/e2e_test.yml
vendored
@@ -1,75 +0,0 @@
|
|||||||
---
|
|
||||||
name: E2E Tests
|
|
||||||
env:
|
|
||||||
LC_ALL: "C.UTF-8" # prevent ERROR: Ansible could not initialize the preferred locale: unsupported locale setting
|
|
||||||
|
|
||||||
on:
|
|
||||||
pull_request_target:
|
|
||||||
types: [labeled]
|
|
||||||
jobs:
|
|
||||||
e2e-test:
|
|
||||||
if: contains(github.event.pull_request.labels.*.name, 'qe:e2e')
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
timeout-minutes: 40
|
|
||||||
permissions:
|
|
||||||
packages: write
|
|
||||||
contents: read
|
|
||||||
strategy:
|
|
||||||
matrix:
|
|
||||||
job: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24]
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v3
|
|
||||||
|
|
||||||
- uses: ./.github/actions/run_awx_devel
|
|
||||||
id: awx
|
|
||||||
with:
|
|
||||||
build-ui: true
|
|
||||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
|
||||||
|
|
||||||
- name: Pull awx_cypress_base image
|
|
||||||
run: |
|
|
||||||
docker pull quay.io/awx/awx_cypress_base:latest
|
|
||||||
|
|
||||||
- name: Checkout test project
|
|
||||||
uses: actions/checkout@v3
|
|
||||||
with:
|
|
||||||
repository: ${{ github.repository_owner }}/tower-qa
|
|
||||||
ssh-key: ${{ secrets.QA_REPO_KEY }}
|
|
||||||
path: tower-qa
|
|
||||||
ref: devel
|
|
||||||
|
|
||||||
- name: Build cypress
|
|
||||||
run: |
|
|
||||||
cd ${{ secrets.E2E_PROJECT }}/ui-tests/awx-pf-tests
|
|
||||||
docker build -t awx-pf-tests .
|
|
||||||
|
|
||||||
- name: Run E2E tests
|
|
||||||
env:
|
|
||||||
CYPRESS_RECORD_KEY: ${{ secrets.CYPRESS_RECORD_KEY }}
|
|
||||||
run: |
|
|
||||||
export COMMIT_INFO_BRANCH=$GITHUB_HEAD_REF
|
|
||||||
export COMMIT_INFO_AUTHOR=$GITHUB_ACTOR
|
|
||||||
export COMMIT_INFO_SHA=$GITHUB_SHA
|
|
||||||
export COMMIT_INFO_REMOTE=$GITHUB_REPOSITORY_OWNER
|
|
||||||
cd ${{ secrets.E2E_PROJECT }}/ui-tests/awx-pf-tests
|
|
||||||
AWX_IP=${{ steps.awx.outputs.ip }}
|
|
||||||
printenv > .env
|
|
||||||
echo "Executing tests:"
|
|
||||||
docker run \
|
|
||||||
--network '_sources_default' \
|
|
||||||
--ipc=host \
|
|
||||||
--env-file=.env \
|
|
||||||
-e CYPRESS_baseUrl="https://$AWX_IP:8043" \
|
|
||||||
-e CYPRESS_AWX_E2E_USERNAME=admin \
|
|
||||||
-e CYPRESS_AWX_E2E_PASSWORD='password' \
|
|
||||||
-e COMMAND="npm run cypress-concurrently-gha" \
|
|
||||||
-v /dev/shm:/dev/shm \
|
|
||||||
-v $PWD:/e2e \
|
|
||||||
-w /e2e \
|
|
||||||
awx-pf-tests run --project .
|
|
||||||
|
|
||||||
- uses: ./.github/actions/upload_awx_devel_logs
|
|
||||||
if: always()
|
|
||||||
with:
|
|
||||||
log-filename: e2e-${{ matrix.job }}.log
|
|
||||||
13
.github/workflows/feature_branch_deletion.yml
vendored
13
.github/workflows/feature_branch_deletion.yml
vendored
@@ -2,13 +2,12 @@
|
|||||||
name: Feature branch deletion cleanup
|
name: Feature branch deletion cleanup
|
||||||
env:
|
env:
|
||||||
LC_ALL: "C.UTF-8" # prevent ERROR: Ansible could not initialize the preferred locale: unsupported locale setting
|
LC_ALL: "C.UTF-8" # prevent ERROR: Ansible could not initialize the preferred locale: unsupported locale setting
|
||||||
on:
|
on: delete
|
||||||
delete:
|
|
||||||
branches:
|
|
||||||
- feature_**
|
|
||||||
jobs:
|
jobs:
|
||||||
push:
|
branch_delete:
|
||||||
|
if: ${{ github.event.ref_type == 'branch' && startsWith(github.event.ref, 'feature_') }}
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
timeout-minutes: 20
|
||||||
permissions:
|
permissions:
|
||||||
packages: write
|
packages: write
|
||||||
contents: read
|
contents: read
|
||||||
@@ -21,6 +20,4 @@ jobs:
|
|||||||
run: |
|
run: |
|
||||||
ansible localhost -c local, -m command -a "{{ ansible_python_interpreter + ' -m pip install boto3'}}"
|
ansible localhost -c local, -m command -a "{{ ansible_python_interpreter + ' -m pip install boto3'}}"
|
||||||
ansible localhost -c local -m aws_s3 \
|
ansible localhost -c local -m aws_s3 \
|
||||||
-a "bucket=awx-public-ci-files object=${GITHUB_REF##*/}/schema.json mode=delete permission=public-read"
|
-a "bucket=awx-public-ci-files object=${GITHUB_REF##*/}/schema.json mode=delobj permission=public-read"
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
7
.github/workflows/label_issue.yml
vendored
7
.github/workflows/label_issue.yml
vendored
@@ -13,6 +13,7 @@ permissions:
|
|||||||
jobs:
|
jobs:
|
||||||
triage:
|
triage:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
timeout-minutes: 20
|
||||||
name: Label Issue
|
name: Label Issue
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
@@ -26,9 +27,13 @@ jobs:
|
|||||||
|
|
||||||
community:
|
community:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
timeout-minutes: 20
|
||||||
name: Label Issue - Community
|
name: Label Issue - Community
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
show-progress: false
|
||||||
|
|
||||||
- uses: actions/setup-python@v4
|
- uses: actions/setup-python@v4
|
||||||
- name: Install python requests
|
- name: Install python requests
|
||||||
run: pip install requests
|
run: pip install requests
|
||||||
|
|||||||
7
.github/workflows/label_pr.yml
vendored
7
.github/workflows/label_pr.yml
vendored
@@ -14,6 +14,7 @@ permissions:
|
|||||||
jobs:
|
jobs:
|
||||||
triage:
|
triage:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
timeout-minutes: 20
|
||||||
name: Label PR
|
name: Label PR
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
@@ -25,9 +26,13 @@ jobs:
|
|||||||
|
|
||||||
community:
|
community:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
timeout-minutes: 20
|
||||||
name: Label PR - Community
|
name: Label PR - Community
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
show-progress: false
|
||||||
|
|
||||||
- uses: actions/setup-python@v4
|
- uses: actions/setup-python@v4
|
||||||
- name: Install python requests
|
- name: Install python requests
|
||||||
run: pip install requests
|
run: pip install requests
|
||||||
|
|||||||
1
.github/workflows/pr_body_check.yml
vendored
1
.github/workflows/pr_body_check.yml
vendored
@@ -10,6 +10,7 @@ jobs:
|
|||||||
if: github.repository_owner == 'ansible' && endsWith(github.repository, 'awx')
|
if: github.repository_owner == 'ansible' && endsWith(github.repository, 'awx')
|
||||||
name: Scan PR description for semantic versioning keywords
|
name: Scan PR description for semantic versioning keywords
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
timeout-minutes: 20
|
||||||
permissions:
|
permissions:
|
||||||
packages: write
|
packages: write
|
||||||
contents: read
|
contents: read
|
||||||
|
|||||||
62
.github/workflows/promote.yml
vendored
62
.github/workflows/promote.yml
vendored
@@ -7,17 +7,34 @@ env:
|
|||||||
on:
|
on:
|
||||||
release:
|
release:
|
||||||
types: [published]
|
types: [published]
|
||||||
|
workflow_dispatch:
|
||||||
|
inputs:
|
||||||
|
tag_name:
|
||||||
|
description: 'Name for the tag of the release.'
|
||||||
|
required: true
|
||||||
permissions:
|
permissions:
|
||||||
contents: read # to fetch code (actions/checkout)
|
contents: read # to fetch code (actions/checkout)
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
promote:
|
promote:
|
||||||
if: endsWith(github.repository, '/awx')
|
if: endsWith(github.repository, '/awx')
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
timeout-minutes: 90
|
||||||
steps:
|
steps:
|
||||||
|
- name: Set GitHub Env vars for workflow_dispatch event
|
||||||
|
if: ${{ github.event_name == 'workflow_dispatch' }}
|
||||||
|
run: |
|
||||||
|
echo "TAG_NAME=${{ github.event.inputs.tag_name }}" >> $GITHUB_ENV
|
||||||
|
|
||||||
|
- name: Set GitHub Env vars if release event
|
||||||
|
if: ${{ github.event_name == 'release' }}
|
||||||
|
run: |
|
||||||
|
echo "TAG_NAME=${{ github.event.release.tag_name }}" >> $GITHUB_ENV
|
||||||
|
|
||||||
- name: Checkout awx
|
- name: Checkout awx
|
||||||
uses: actions/checkout@v3
|
uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
show-progress: false
|
||||||
|
|
||||||
- name: Get python version from Makefile
|
- name: Get python version from Makefile
|
||||||
run: echo py_version=`make PYTHON_VERSION` >> $GITHUB_ENV
|
run: echo py_version=`make PYTHON_VERSION` >> $GITHUB_ENV
|
||||||
@@ -42,16 +59,21 @@ jobs:
|
|||||||
- name: Build collection and publish to galaxy
|
- name: Build collection and publish to galaxy
|
||||||
env:
|
env:
|
||||||
COLLECTION_NAMESPACE: ${{ env.collection_namespace }}
|
COLLECTION_NAMESPACE: ${{ env.collection_namespace }}
|
||||||
COLLECTION_VERSION: ${{ github.event.release.tag_name }}
|
COLLECTION_VERSION: ${{ env.TAG_NAME }}
|
||||||
COLLECTION_TEMPLATE_VERSION: true
|
COLLECTION_TEMPLATE_VERSION: true
|
||||||
run: |
|
run: |
|
||||||
|
sudo apt-get install jq
|
||||||
make build_collection
|
make build_collection
|
||||||
if [ "$(curl --head -sw '%{http_code}' https://galaxy.ansible.com/download/${{ env.collection_namespace }}-awx-${{ github.event.release.tag_name }}.tar.gz | tail -1)" == "302" ] ; then \
|
count=$(curl -s https://galaxy.ansible.com/api/v3/plugin/ansible/search/collection-versions/\?namespace\=${COLLECTION_NAMESPACE}\&name\=awx\&version\=${COLLECTION_VERSION} | jq .meta.count)
|
||||||
echo "Galaxy release already done"; \
|
if [[ "$count" == "1" ]]; then
|
||||||
else \
|
echo "Galaxy release already done";
|
||||||
|
elif [[ "$count" == "0" ]]; then
|
||||||
ansible-galaxy collection publish \
|
ansible-galaxy collection publish \
|
||||||
--token=${{ secrets.GALAXY_TOKEN }} \
|
--token=${{ secrets.GALAXY_TOKEN }} \
|
||||||
awx_collection_build/${{ env.collection_namespace }}-awx-${{ github.event.release.tag_name }}.tar.gz; \
|
awx_collection_build/${COLLECTION_NAMESPACE}-awx-${COLLECTION_VERSION}.tar.gz;
|
||||||
|
else
|
||||||
|
echo "Unexpected count from galaxy search: $count";
|
||||||
|
exit 1;
|
||||||
fi
|
fi
|
||||||
|
|
||||||
- name: Set official pypi info
|
- name: Set official pypi info
|
||||||
@@ -63,9 +85,11 @@ jobs:
|
|||||||
if: ${{ github.repository_owner != 'ansible' }}
|
if: ${{ github.repository_owner != 'ansible' }}
|
||||||
|
|
||||||
- name: Build awxkit and upload to pypi
|
- name: Build awxkit and upload to pypi
|
||||||
|
env:
|
||||||
|
SETUPTOOLS_SCM_PRETEND_VERSION: ${{ env.TAG_NAME }}
|
||||||
run: |
|
run: |
|
||||||
git reset --hard
|
git reset --hard
|
||||||
cd awxkit && python3 setup.py bdist_wheel
|
cd awxkit && python3 setup.py sdist bdist_wheel
|
||||||
twine upload \
|
twine upload \
|
||||||
-r ${{ env.pypi_repo }} \
|
-r ${{ env.pypi_repo }} \
|
||||||
-u ${{ secrets.PYPI_USERNAME }} \
|
-u ${{ secrets.PYPI_USERNAME }} \
|
||||||
@@ -82,11 +106,15 @@ jobs:
|
|||||||
|
|
||||||
- name: Re-tag and promote awx image
|
- name: Re-tag and promote awx image
|
||||||
run: |
|
run: |
|
||||||
docker pull ghcr.io/${{ github.repository }}:${{ github.event.release.tag_name }}
|
docker buildx imagetools create \
|
||||||
docker tag ghcr.io/${{ github.repository }}:${{ github.event.release.tag_name }} quay.io/${{ github.repository }}:${{ github.event.release.tag_name }}
|
ghcr.io/${{ github.repository }}:${{ env.TAG_NAME }} \
|
||||||
docker tag ghcr.io/${{ github.repository }}:${{ github.event.release.tag_name }} quay.io/${{ github.repository }}:latest
|
--tag quay.io/${{ github.repository }}:${{ env.TAG_NAME }}
|
||||||
docker push quay.io/${{ github.repository }}:${{ github.event.release.tag_name }}
|
docker buildx imagetools create \
|
||||||
docker push quay.io/${{ github.repository }}:latest
|
ghcr.io/${{ github.repository }}:${{ env.TAG_NAME }} \
|
||||||
docker pull ghcr.io/${{ github.repository_owner }}/awx-ee:${{ github.event.release.tag_name }}
|
--tag quay.io/${{ github.repository }}:latest
|
||||||
docker tag ghcr.io/${{ github.repository_owner }}/awx-ee:${{ github.event.release.tag_name }} quay.io/${{ github.repository_owner }}/awx-ee:${{ github.event.release.tag_name }}
|
|
||||||
docker push quay.io/${{ github.repository_owner }}/awx-ee:${{ github.event.release.tag_name }}
|
- name: Re-tag and promote awx-ee image
|
||||||
|
run: |
|
||||||
|
docker buildx imagetools create \
|
||||||
|
ghcr.io/${{ github.repository_owner }}/awx-ee:${{ env.TAG_NAME }} \
|
||||||
|
--tag quay.io/${{ github.repository_owner }}/awx-ee:${{ env.TAG_NAME }}
|
||||||
|
|||||||
109
.github/workflows/stage.yml
vendored
109
.github/workflows/stage.yml
vendored
@@ -23,6 +23,7 @@ jobs:
|
|||||||
stage:
|
stage:
|
||||||
if: endsWith(github.repository, '/awx')
|
if: endsWith(github.repository, '/awx')
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
timeout-minutes: 90
|
||||||
permissions:
|
permissions:
|
||||||
packages: write
|
packages: write
|
||||||
contents: write
|
contents: write
|
||||||
@@ -44,11 +45,27 @@ jobs:
|
|||||||
exit 0
|
exit 0
|
||||||
|
|
||||||
- name: Checkout awx
|
- name: Checkout awx
|
||||||
uses: actions/checkout@v3
|
uses: actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
|
show-progress: false
|
||||||
path: awx
|
path: awx
|
||||||
|
|
||||||
|
- name: Checkout awx-operator
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
show-progress: false
|
||||||
|
repository: ${{ github.repository_owner }}/awx-operator
|
||||||
|
path: awx-operator
|
||||||
|
|
||||||
|
- name: Checkout awx-logos
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
show-progress: false
|
||||||
|
repository: ansible/awx-logos
|
||||||
|
path: awx-logos
|
||||||
|
|
||||||
- name: Get python version from Makefile
|
- name: Get python version from Makefile
|
||||||
|
working-directory: awx
|
||||||
run: echo py_version=`make PYTHON_VERSION` >> $GITHUB_ENV
|
run: echo py_version=`make PYTHON_VERSION` >> $GITHUB_ENV
|
||||||
|
|
||||||
- name: Install python ${{ env.py_version }}
|
- name: Install python ${{ env.py_version }}
|
||||||
@@ -56,56 +73,72 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
python-version: ${{ env.py_version }}
|
python-version: ${{ env.py_version }}
|
||||||
|
|
||||||
- name: Checkout awx-logos
|
|
||||||
uses: actions/checkout@v3
|
|
||||||
with:
|
|
||||||
repository: ansible/awx-logos
|
|
||||||
path: awx-logos
|
|
||||||
|
|
||||||
- name: Checkout awx-operator
|
|
||||||
uses: actions/checkout@v3
|
|
||||||
with:
|
|
||||||
repository: ${{ github.repository_owner }}/awx-operator
|
|
||||||
path: awx-operator
|
|
||||||
|
|
||||||
- name: Install playbook dependencies
|
- name: Install playbook dependencies
|
||||||
run: |
|
run: |
|
||||||
python3 -m pip install docker
|
python3 -m pip install docker
|
||||||
|
|
||||||
- name: Build and stage AWX
|
- name: Log into registry ghcr.io
|
||||||
|
uses: docker/login-action@v3
|
||||||
|
with:
|
||||||
|
registry: ghcr.io
|
||||||
|
username: ${{ github.actor }}
|
||||||
|
password: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
|
||||||
|
- name: Copy logos for inclusion in sdist for official build
|
||||||
working-directory: awx
|
working-directory: awx
|
||||||
run: |
|
run: |
|
||||||
ansible-playbook -v tools/ansible/build.yml \
|
cp ../awx-logos/awx/ui/client/assets/* awx/ui/public/static/media/
|
||||||
-e registry=ghcr.io \
|
|
||||||
-e registry_username=${{ github.actor }} \
|
|
||||||
-e registry_password=${{ secrets.GITHUB_TOKEN }} \
|
|
||||||
-e awx_image=${{ github.repository }} \
|
|
||||||
-e awx_version=${{ github.event.inputs.version }} \
|
|
||||||
-e ansible_python_interpreter=$(which python3) \
|
|
||||||
-e push=yes \
|
|
||||||
-e awx_official=yes
|
|
||||||
|
|
||||||
- name: Log in to GHCR
|
- name: Setup node and npm for new UI build
|
||||||
run: |
|
uses: actions/setup-node@v2
|
||||||
echo ${{ secrets.GITHUB_TOKEN }} | docker login ghcr.io -u ${{ github.actor }} --password-stdin
|
with:
|
||||||
|
node-version: '18'
|
||||||
|
|
||||||
- name: Log in to Quay
|
- name: Prebuild new UI for awx image (to speed up build process)
|
||||||
|
working-directory: awx
|
||||||
|
run: make ui-next
|
||||||
|
|
||||||
|
- name: Set build env variables
|
||||||
run: |
|
run: |
|
||||||
echo ${{ secrets.QUAY_TOKEN }} | docker login quay.io -u ${{ secrets.QUAY_USER }} --password-stdin
|
echo "DEV_DOCKER_TAG_BASE=ghcr.io/${OWNER,,}" >> $GITHUB_ENV
|
||||||
|
echo "COMPOSE_TAG=${{ github.event.inputs.version }}" >> $GITHUB_ENV
|
||||||
|
echo "VERSION=${{ github.event.inputs.version }}" >> $GITHUB_ENV
|
||||||
|
echo "AWX_TEST_VERSION=${{ github.event.inputs.version }}" >> $GITHUB_ENV
|
||||||
|
echo "AWX_TEST_IMAGE=ghcr.io/${OWNER,,}/awx" >> $GITHUB_ENV
|
||||||
|
echo "AWX_EE_TEST_IMAGE=ghcr.io/${OWNER,,}/awx-ee:${{ github.event.inputs.version }}" >> $GITHUB_ENV
|
||||||
|
echo "AWX_OPERATOR_TEST_IMAGE=ghcr.io/${OWNER,,}/awx-operator:${{ github.event.inputs.operator_version }}" >> $GITHUB_ENV
|
||||||
|
env:
|
||||||
|
OWNER: ${{ github.repository_owner }}
|
||||||
|
|
||||||
|
- name: Build and stage AWX
|
||||||
|
working-directory: awx
|
||||||
|
env:
|
||||||
|
DOCKER_BUILDX_PUSH: true
|
||||||
|
HEADLESS: false
|
||||||
|
PLATFORMS: linux/amd64,linux/arm64
|
||||||
|
run: |
|
||||||
|
make awx-kube-buildx
|
||||||
|
|
||||||
- name: tag awx-ee:latest with version input
|
- name: tag awx-ee:latest with version input
|
||||||
run: |
|
run: |
|
||||||
docker pull quay.io/ansible/awx-ee:latest
|
docker buildx imagetools create \
|
||||||
docker tag quay.io/ansible/awx-ee:latest ghcr.io/${{ github.repository_owner }}/awx-ee:${{ github.event.inputs.version }}
|
quay.io/ansible/awx-ee:latest \
|
||||||
docker push ghcr.io/${{ github.repository_owner }}/awx-ee:${{ github.event.inputs.version }}
|
--tag ${AWX_EE_TEST_IMAGE}
|
||||||
|
|
||||||
- name: Build and stage awx-operator
|
- name: Stage awx-operator image
|
||||||
working-directory: awx-operator
|
working-directory: awx-operator
|
||||||
run: |
|
run: |
|
||||||
BUILD_ARGS="--build-arg DEFAULT_AWX_VERSION=${{ github.event.inputs.version }} \
|
BUILD_ARGS="--build-arg DEFAULT_AWX_VERSION=${{ github.event.inputs.version}} \
|
||||||
--build-arg OPERATOR_VERSION=${{ github.event.inputs.operator_version }}" \
|
--build-arg OPERATOR_VERSION=${{ github.event.inputs.operator_version }}" \
|
||||||
IMAGE_TAG_BASE=ghcr.io/${{ github.repository_owner }}/awx-operator \
|
IMG=${AWX_OPERATOR_TEST_IMAGE} \
|
||||||
VERSION=${{ github.event.inputs.operator_version }} make docker-build docker-push
|
make docker-buildx
|
||||||
|
|
||||||
|
- name: Pulling images for test deployment with awx-operator
|
||||||
|
# awx operator molecue test expect to kind load image and buildx exports image to registry and not local
|
||||||
|
run: |
|
||||||
|
docker pull -q ${AWX_OPERATOR_TEST_IMAGE}
|
||||||
|
docker pull -q ${AWX_EE_TEST_IMAGE}
|
||||||
|
docker pull -q ${AWX_TEST_IMAGE}:${AWX_TEST_VERSION}
|
||||||
|
|
||||||
- name: Run test deployment with awx-operator
|
- name: Run test deployment with awx-operator
|
||||||
working-directory: awx-operator
|
working-directory: awx-operator
|
||||||
@@ -115,10 +148,6 @@ jobs:
|
|||||||
sudo rm -f $(which kustomize)
|
sudo rm -f $(which kustomize)
|
||||||
make kustomize
|
make kustomize
|
||||||
KUSTOMIZE_PATH=$(readlink -f bin/kustomize) molecule test -s kind
|
KUSTOMIZE_PATH=$(readlink -f bin/kustomize) molecule test -s kind
|
||||||
env:
|
|
||||||
AWX_TEST_IMAGE: ${{ github.repository }}
|
|
||||||
AWX_TEST_VERSION: ${{ github.event.inputs.version }}
|
|
||||||
AWX_EE_TEST_IMAGE: ghcr.io/${{ github.repository_owner }}/awx-ee:${{ github.event.inputs.version }}
|
|
||||||
|
|
||||||
- name: Create draft release for AWX
|
- name: Create draft release for AWX
|
||||||
working-directory: awx
|
working-directory: awx
|
||||||
|
|||||||
5
.github/workflows/update_dependabot_prs.yml
vendored
5
.github/workflows/update_dependabot_prs.yml
vendored
@@ -9,10 +9,13 @@ jobs:
|
|||||||
name: Update Dependabot Prs
|
name: Update Dependabot Prs
|
||||||
if: contains(github.event.pull_request.labels.*.name, 'dependencies') && contains(github.event.pull_request.labels.*.name, 'component:ui')
|
if: contains(github.event.pull_request.labels.*.name, 'dependencies') && contains(github.event.pull_request.labels.*.name, 'component:ui')
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
timeout-minutes: 20
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout branch
|
- name: Checkout branch
|
||||||
uses: actions/checkout@v3
|
uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
show-progress: false
|
||||||
|
|
||||||
- name: Update PR Body
|
- name: Update PR Body
|
||||||
env:
|
env:
|
||||||
|
|||||||
7
.github/workflows/upload_schema.yml
vendored
7
.github/workflows/upload_schema.yml
vendored
@@ -13,11 +13,14 @@ on:
|
|||||||
jobs:
|
jobs:
|
||||||
push:
|
push:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
timeout-minutes: 60
|
||||||
permissions:
|
permissions:
|
||||||
packages: write
|
packages: write
|
||||||
contents: read
|
contents: read
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
show-progress: false
|
||||||
|
|
||||||
- name: Get python version from Makefile
|
- name: Get python version from Makefile
|
||||||
run: echo py_version=`make PYTHON_VERSION` >> $GITHUB_ENV
|
run: echo py_version=`make PYTHON_VERSION` >> $GITHUB_ENV
|
||||||
@@ -33,7 +36,7 @@ jobs:
|
|||||||
|
|
||||||
- name: Pre-pull image to warm build cache
|
- name: Pre-pull image to warm build cache
|
||||||
run: |
|
run: |
|
||||||
docker pull ghcr.io/${{ github.repository_owner }}/awx_devel:${GITHUB_REF##*/} || :
|
docker pull -q ghcr.io/${{ github.repository_owner }}/awx_devel:${GITHUB_REF##*/} || :
|
||||||
|
|
||||||
- name: Build image
|
- name: Build image
|
||||||
run: |
|
run: |
|
||||||
|
|||||||
8
.gitignore
vendored
8
.gitignore
vendored
@@ -46,6 +46,11 @@ tools/docker-compose/overrides/
|
|||||||
tools/docker-compose-minikube/_sources
|
tools/docker-compose-minikube/_sources
|
||||||
tools/docker-compose/keycloak.awx.realm.json
|
tools/docker-compose/keycloak.awx.realm.json
|
||||||
|
|
||||||
|
!tools/docker-compose/editable_dependencies
|
||||||
|
tools/docker-compose/editable_dependencies/*
|
||||||
|
!tools/docker-compose/editable_dependencies/README.md
|
||||||
|
!tools/docker-compose/editable_dependencies/install.sh
|
||||||
|
|
||||||
# Tower setup playbook testing
|
# Tower setup playbook testing
|
||||||
setup/test/roles/postgresql
|
setup/test/roles/postgresql
|
||||||
**/provision_docker
|
**/provision_docker
|
||||||
@@ -169,3 +174,6 @@ awx/ui_next/build
|
|||||||
# Docs build stuff
|
# Docs build stuff
|
||||||
docs/docsite/build/
|
docs/docsite/build/
|
||||||
_readthedocs/
|
_readthedocs/
|
||||||
|
|
||||||
|
# Pyenv
|
||||||
|
.python-version
|
||||||
|
|||||||
@@ -10,6 +10,7 @@ build:
|
|||||||
3.11
|
3.11
|
||||||
commands:
|
commands:
|
||||||
- pip install --user tox
|
- pip install --user tox
|
||||||
- python3 -m tox -e docs
|
- python3 -m tox -e docs --notest -v
|
||||||
|
- python3 -m tox -e docs --skip-pkg-install -q
|
||||||
- mkdir -p _readthedocs/html/
|
- mkdir -p _readthedocs/html/
|
||||||
- mv docs/docsite/build/html/* _readthedocs/html/
|
- mv docs/docsite/build/html/* _readthedocs/html/
|
||||||
|
|||||||
113
.vscode/launch.json
vendored
Normal file
113
.vscode/launch.json
vendored
Normal file
@@ -0,0 +1,113 @@
|
|||||||
|
{
|
||||||
|
"version": "0.2.0",
|
||||||
|
"configurations": [
|
||||||
|
{
|
||||||
|
"name": "run_ws_heartbeat",
|
||||||
|
"type": "debugpy",
|
||||||
|
"request": "launch",
|
||||||
|
"program": "manage.py",
|
||||||
|
"args": ["run_ws_heartbeat"],
|
||||||
|
"django": true,
|
||||||
|
"preLaunchTask": "stop awx-ws-heartbeat",
|
||||||
|
"postDebugTask": "start awx-ws-heartbeat"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "run_cache_clear",
|
||||||
|
"type": "debugpy",
|
||||||
|
"request": "launch",
|
||||||
|
"program": "manage.py",
|
||||||
|
"args": ["run_cache_clear"],
|
||||||
|
"django": true,
|
||||||
|
"preLaunchTask": "stop awx-cache-clear",
|
||||||
|
"postDebugTask": "start awx-cache-clear"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "run_callback_receiver",
|
||||||
|
"type": "debugpy",
|
||||||
|
"request": "launch",
|
||||||
|
"program": "manage.py",
|
||||||
|
"args": ["run_callback_receiver"],
|
||||||
|
"django": true,
|
||||||
|
"preLaunchTask": "stop awx-receiver",
|
||||||
|
"postDebugTask": "start awx-receiver"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "run_dispatcher",
|
||||||
|
"type": "debugpy",
|
||||||
|
"request": "launch",
|
||||||
|
"program": "manage.py",
|
||||||
|
"args": ["run_dispatcher"],
|
||||||
|
"django": true,
|
||||||
|
"preLaunchTask": "stop awx-dispatcher",
|
||||||
|
"postDebugTask": "start awx-dispatcher"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "run_rsyslog_configurer",
|
||||||
|
"type": "debugpy",
|
||||||
|
"request": "launch",
|
||||||
|
"program": "manage.py",
|
||||||
|
"args": ["run_rsyslog_configurer"],
|
||||||
|
"django": true,
|
||||||
|
"preLaunchTask": "stop awx-rsyslog-configurer",
|
||||||
|
"postDebugTask": "start awx-rsyslog-configurer"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "run_cache_clear",
|
||||||
|
"type": "debugpy",
|
||||||
|
"request": "launch",
|
||||||
|
"program": "manage.py",
|
||||||
|
"args": ["run_cache_clear"],
|
||||||
|
"django": true,
|
||||||
|
"preLaunchTask": "stop awx-cache-clear",
|
||||||
|
"postDebugTask": "start awx-cache-clear"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "run_wsrelay",
|
||||||
|
"type": "debugpy",
|
||||||
|
"request": "launch",
|
||||||
|
"program": "manage.py",
|
||||||
|
"args": ["run_wsrelay"],
|
||||||
|
"django": true,
|
||||||
|
"preLaunchTask": "stop awx-wsrelay",
|
||||||
|
"postDebugTask": "start awx-wsrelay"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "daphne",
|
||||||
|
"type": "debugpy",
|
||||||
|
"request": "launch",
|
||||||
|
"program": "/var/lib/awx/venv/awx/bin/daphne",
|
||||||
|
"args": ["-b", "127.0.0.1", "-p", "8051", "awx.asgi:channel_layer"],
|
||||||
|
"django": true,
|
||||||
|
"preLaunchTask": "stop awx-daphne",
|
||||||
|
"postDebugTask": "start awx-daphne"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "runserver(uwsgi alternative)",
|
||||||
|
"type": "debugpy",
|
||||||
|
"request": "launch",
|
||||||
|
"program": "manage.py",
|
||||||
|
"args": ["runserver", "127.0.0.1:8052"],
|
||||||
|
"django": true,
|
||||||
|
"preLaunchTask": "stop awx-uwsgi",
|
||||||
|
"postDebugTask": "start awx-uwsgi"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "runserver_plus(uwsgi alternative)",
|
||||||
|
"type": "debugpy",
|
||||||
|
"request": "launch",
|
||||||
|
"program": "manage.py",
|
||||||
|
"args": ["runserver_plus", "127.0.0.1:8052"],
|
||||||
|
"django": true,
|
||||||
|
"preLaunchTask": "stop awx-uwsgi and install Werkzeug",
|
||||||
|
"postDebugTask": "start awx-uwsgi"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "shell_plus",
|
||||||
|
"type": "debugpy",
|
||||||
|
"request": "launch",
|
||||||
|
"program": "manage.py",
|
||||||
|
"args": ["shell_plus"],
|
||||||
|
"django": true,
|
||||||
|
},
|
||||||
|
]
|
||||||
|
}
|
||||||
100
.vscode/tasks.json
vendored
Normal file
100
.vscode/tasks.json
vendored
Normal file
@@ -0,0 +1,100 @@
|
|||||||
|
{
|
||||||
|
"version": "2.0.0",
|
||||||
|
"tasks": [
|
||||||
|
{
|
||||||
|
"label": "start awx-cache-clear",
|
||||||
|
"type": "shell",
|
||||||
|
"command": "supervisorctl start tower-processes:awx-cache-clear"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"label": "stop awx-cache-clear",
|
||||||
|
"type": "shell",
|
||||||
|
"command": "supervisorctl stop tower-processes:awx-cache-clear"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"label": "start awx-daphne",
|
||||||
|
"type": "shell",
|
||||||
|
"command": "supervisorctl start tower-processes:awx-daphne"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"label": "stop awx-daphne",
|
||||||
|
"type": "shell",
|
||||||
|
"command": "supervisorctl stop tower-processes:awx-daphne"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"label": "start awx-dispatcher",
|
||||||
|
"type": "shell",
|
||||||
|
"command": "supervisorctl start tower-processes:awx-dispatcher"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"label": "stop awx-dispatcher",
|
||||||
|
"type": "shell",
|
||||||
|
"command": "supervisorctl stop tower-processes:awx-dispatcher"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"label": "start awx-receiver",
|
||||||
|
"type": "shell",
|
||||||
|
"command": "supervisorctl start tower-processes:awx-receiver"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"label": "stop awx-receiver",
|
||||||
|
"type": "shell",
|
||||||
|
"command": "supervisorctl stop tower-processes:awx-receiver"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"label": "start awx-rsyslog-configurer",
|
||||||
|
"type": "shell",
|
||||||
|
"command": "supervisorctl start tower-processes:awx-rsyslog-configurer"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"label": "stop awx-rsyslog-configurer",
|
||||||
|
"type": "shell",
|
||||||
|
"command": "supervisorctl stop tower-processes:awx-rsyslog-configurer"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"label": "start awx-rsyslogd",
|
||||||
|
"type": "shell",
|
||||||
|
"command": "supervisorctl start tower-processes:awx-rsyslogd"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"label": "stop awx-rsyslogd",
|
||||||
|
"type": "shell",
|
||||||
|
"command": "supervisorctl stop tower-processes:awx-rsyslogd"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"label": "start awx-uwsgi",
|
||||||
|
"type": "shell",
|
||||||
|
"command": "supervisorctl start tower-processes:awx-uwsgi"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"label": "stop awx-uwsgi",
|
||||||
|
"type": "shell",
|
||||||
|
"command": "supervisorctl stop tower-processes:awx-uwsgi"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"label": "stop awx-uwsgi and install Werkzeug",
|
||||||
|
"type": "shell",
|
||||||
|
"command": "pip install Werkzeug; supervisorctl stop tower-processes:awx-uwsgi"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"label": "start awx-ws-heartbeat",
|
||||||
|
"type": "shell",
|
||||||
|
"command": "supervisorctl start tower-processes:awx-ws-heartbeat"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"label": "stop awx-ws-heartbeat",
|
||||||
|
"type": "shell",
|
||||||
|
"command": "supervisorctl stop tower-processes:awx-ws-heartbeat"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"label": "start awx-wsrelay",
|
||||||
|
"type": "shell",
|
||||||
|
"command": "supervisorctl start tower-processes:awx-wsrelay"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"label": "stop awx-wsrelay",
|
||||||
|
"type": "shell",
|
||||||
|
"command": "supervisorctl stop tower-processes:awx-wsrelay"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
@@ -11,6 +11,8 @@ ignore: |
|
|||||||
# django template files
|
# django template files
|
||||||
awx/api/templates/instance_install_bundle/**
|
awx/api/templates/instance_install_bundle/**
|
||||||
.readthedocs.yaml
|
.readthedocs.yaml
|
||||||
|
tools/loki
|
||||||
|
tools/otel
|
||||||
|
|
||||||
extends: default
|
extends: default
|
||||||
|
|
||||||
|
|||||||
@@ -67,7 +67,7 @@ If you're not using Docker for Mac, or Docker for Windows, you may need, or choo
|
|||||||
|
|
||||||
#### Frontend Development
|
#### Frontend Development
|
||||||
|
|
||||||
See [the ui development documentation](awx/ui/CONTRIBUTING.md).
|
See [the ansible-ui development documentation](https://github.com/ansible/ansible-ui/blob/main/CONTRIBUTING.md).
|
||||||
|
|
||||||
#### Fork and clone the AWX repo
|
#### Fork and clone the AWX repo
|
||||||
|
|
||||||
@@ -121,7 +121,7 @@ If it has someone assigned to it then that person is the person responsible for
|
|||||||
|
|
||||||
**NOTES**
|
**NOTES**
|
||||||
|
|
||||||
> Issue assignment will only be done for maintainers of the project. If you decide to work on an issue, please feel free to add a comment in the issue to let others know that you are working on it; but know that we will accept the first pull request from whomever is able to fix an issue. Once your PR is accepted we can add you as an assignee to an issue upon request.
|
> Issue assignment will only be done for maintainers of the project. If you decide to work on an issue, please feel free to add a comment in the issue to let others know that you are working on it; but know that we will accept the first pull request from whomever is able to fix an issue. Once your PR is accepted we can add you as an assignee to an issue upon request.
|
||||||
|
|
||||||
|
|
||||||
> If you work in a part of the codebase that is going through active development, your changes may be rejected, or you may be asked to `rebase`. A good idea before starting work is to have a discussion with us in the `#ansible-awx` channel on irc.libera.chat, or on the [mailing list](https://groups.google.com/forum/#!forum/awx-project).
|
> If you work in a part of the codebase that is going through active development, your changes may be rejected, or you may be asked to `rebase`. A good idea before starting work is to have a discussion with us in the `#ansible-awx` channel on irc.libera.chat, or on the [mailing list](https://groups.google.com/forum/#!forum/awx-project).
|
||||||
@@ -132,7 +132,7 @@ If it has someone assigned to it then that person is the person responsible for
|
|||||||
|
|
||||||
At this time we do not accept PRs for adding additional language translations as we have an automated process for generating our translations. This is because translations require constant care as new strings are added and changed in the code base. Because of this the .po files are overwritten during every translation release cycle. We also can't support a lot of translations on AWX as its an open source project and each language adds time and cost to maintain. If you would like to see AWX translated into a new language please create an issue and ask others you know to upvote the issue. Our translation team will review the needs of the community and see what they can do around supporting additional language.
|
At this time we do not accept PRs for adding additional language translations as we have an automated process for generating our translations. This is because translations require constant care as new strings are added and changed in the code base. Because of this the .po files are overwritten during every translation release cycle. We also can't support a lot of translations on AWX as its an open source project and each language adds time and cost to maintain. If you would like to see AWX translated into a new language please create an issue and ask others you know to upvote the issue. Our translation team will review the needs of the community and see what they can do around supporting additional language.
|
||||||
|
|
||||||
If you find an issue with an existing translation, please see the [Reporting Issues](#reporting-issues) section to open an issue and our translation team will work with you on a resolution.
|
If you find an issue with an existing translation, please see the [Reporting Issues](#reporting-issues) section to open an issue and our translation team will work with you on a resolution.
|
||||||
|
|
||||||
|
|
||||||
## Submitting Pull Requests
|
## Submitting Pull Requests
|
||||||
@@ -161,7 +161,7 @@ Sometimes it might take us a while to fully review your PR. We try to keep the `
|
|||||||
When your PR is initially submitted the checks will not be run until a maintainer allows them to be. Once a maintainer has done a quick review of your work the PR will have the linter and unit tests run against them via GitHub Actions, and the status reported in the PR.
|
When your PR is initially submitted the checks will not be run until a maintainer allows them to be. Once a maintainer has done a quick review of your work the PR will have the linter and unit tests run against them via GitHub Actions, and the status reported in the PR.
|
||||||
|
|
||||||
## Reporting Issues
|
## Reporting Issues
|
||||||
|
|
||||||
We welcome your feedback, and encourage you to file an issue when you run into a problem. But before opening a new issues, we ask that you please view our [Issues guide](./ISSUES.md).
|
We welcome your feedback, and encourage you to file an issue when you run into a problem. But before opening a new issues, we ask that you please view our [Issues guide](./ISSUES.md).
|
||||||
|
|
||||||
## Getting Help
|
## Getting Help
|
||||||
|
|||||||
@@ -80,7 +80,7 @@ If any of those items are missing your pull request will still get the `needs_tr
|
|||||||
Currently you can expect awxbot to add common labels such as `state:needs_triage`, `type:bug`, `component:docs`, etc...
|
Currently you can expect awxbot to add common labels such as `state:needs_triage`, `type:bug`, `component:docs`, etc...
|
||||||
These labels are determined by the template data. Please use the template and fill it out as accurately as possible.
|
These labels are determined by the template data. Please use the template and fill it out as accurately as possible.
|
||||||
|
|
||||||
The `state:needs_triage` label will will remain on your pull request until a person has looked at it.
|
The `state:needs_triage` label will remain on your pull request until a person has looked at it.
|
||||||
|
|
||||||
You can also expect the bot to CC maintainers of specific areas of the code, this will notify them that there is a pull request by placing a comment on the pull request.
|
You can also expect the bot to CC maintainers of specific areas of the code, this will notify them that there is a pull request by placing a comment on the pull request.
|
||||||
The comment will look something like `CC @matburt @wwitzel3 ...`.
|
The comment will look something like `CC @matburt @wwitzel3 ...`.
|
||||||
|
|||||||
@@ -22,7 +22,7 @@ recursive-exclude awx/settings local_settings.py*
|
|||||||
include tools/scripts/request_tower_configuration.sh
|
include tools/scripts/request_tower_configuration.sh
|
||||||
include tools/scripts/request_tower_configuration.ps1
|
include tools/scripts/request_tower_configuration.ps1
|
||||||
include tools/scripts/automation-controller-service
|
include tools/scripts/automation-controller-service
|
||||||
include tools/scripts/failure-event-handler
|
include tools/scripts/rsyslog-4xx-recovery
|
||||||
include tools/scripts/awx-python
|
include tools/scripts/awx-python
|
||||||
include awx/playbooks/library/mkfifo.py
|
include awx/playbooks/library/mkfifo.py
|
||||||
include tools/sosreport/*
|
include tools/sosreport/*
|
||||||
|
|||||||
145
Makefile
145
Makefile
@@ -1,8 +1,8 @@
|
|||||||
-include awx/ui_next/Makefile
|
-include awx/ui_next/Makefile
|
||||||
|
|
||||||
PYTHON := $(notdir $(shell for i in python3.9 python3; do command -v $$i; done|sed 1q))
|
PYTHON := $(notdir $(shell for i in python3.11 python3; do command -v $$i; done|sed 1q))
|
||||||
SHELL := bash
|
SHELL := bash
|
||||||
DOCKER_COMPOSE ?= docker-compose
|
DOCKER_COMPOSE ?= docker compose
|
||||||
OFFICIAL ?= no
|
OFFICIAL ?= no
|
||||||
NODE ?= node
|
NODE ?= node
|
||||||
NPM_BIN ?= npm
|
NPM_BIN ?= npm
|
||||||
@@ -10,7 +10,7 @@ KIND_BIN ?= $(shell which kind)
|
|||||||
CHROMIUM_BIN=/tmp/chrome-linux/chrome
|
CHROMIUM_BIN=/tmp/chrome-linux/chrome
|
||||||
GIT_BRANCH ?= $(shell git rev-parse --abbrev-ref HEAD)
|
GIT_BRANCH ?= $(shell git rev-parse --abbrev-ref HEAD)
|
||||||
MANAGEMENT_COMMAND ?= awx-manage
|
MANAGEMENT_COMMAND ?= awx-manage
|
||||||
VERSION ?= $(shell $(PYTHON) tools/scripts/scm_version.py)
|
VERSION ?= $(shell $(PYTHON) tools/scripts/scm_version.py 2> /dev/null)
|
||||||
|
|
||||||
# ansible-test requires semver compatable version, so we allow overrides to hack it
|
# ansible-test requires semver compatable version, so we allow overrides to hack it
|
||||||
COLLECTION_VERSION ?= $(shell $(PYTHON) tools/scripts/scm_version.py | cut -d . -f 1-3)
|
COLLECTION_VERSION ?= $(shell $(PYTHON) tools/scripts/scm_version.py | cut -d . -f 1-3)
|
||||||
@@ -43,8 +43,18 @@ PROMETHEUS ?= false
|
|||||||
GRAFANA ?= false
|
GRAFANA ?= false
|
||||||
# If set to true docker-compose will also start a hashicorp vault instance
|
# If set to true docker-compose will also start a hashicorp vault instance
|
||||||
VAULT ?= false
|
VAULT ?= false
|
||||||
|
# If set to true docker-compose will also start a hashicorp vault instance with TLS enabled
|
||||||
|
VAULT_TLS ?= false
|
||||||
# If set to true docker-compose will also start a tacacs+ instance
|
# If set to true docker-compose will also start a tacacs+ instance
|
||||||
TACACS ?= false
|
TACACS ?= false
|
||||||
|
# If set to true docker-compose will also start an OpenTelemetry Collector instance
|
||||||
|
OTEL ?= false
|
||||||
|
# If set to true docker-compose will also start a Loki instance
|
||||||
|
LOKI ?= false
|
||||||
|
# If set to true docker-compose will install editable dependencies
|
||||||
|
EDITABLE_DEPENDENCIES ?= false
|
||||||
|
# If set to true, use tls for postgres connection
|
||||||
|
PG_TLS ?= false
|
||||||
|
|
||||||
VENV_BASE ?= /var/lib/awx/venv
|
VENV_BASE ?= /var/lib/awx/venv
|
||||||
|
|
||||||
@@ -53,6 +63,11 @@ DEV_DOCKER_OWNER ?= ansible
|
|||||||
DEV_DOCKER_OWNER_LOWER = $(shell echo $(DEV_DOCKER_OWNER) | tr A-Z a-z)
|
DEV_DOCKER_OWNER_LOWER = $(shell echo $(DEV_DOCKER_OWNER) | tr A-Z a-z)
|
||||||
DEV_DOCKER_TAG_BASE ?= ghcr.io/$(DEV_DOCKER_OWNER_LOWER)
|
DEV_DOCKER_TAG_BASE ?= ghcr.io/$(DEV_DOCKER_OWNER_LOWER)
|
||||||
DEVEL_IMAGE_NAME ?= $(DEV_DOCKER_TAG_BASE)/awx_devel:$(COMPOSE_TAG)
|
DEVEL_IMAGE_NAME ?= $(DEV_DOCKER_TAG_BASE)/awx_devel:$(COMPOSE_TAG)
|
||||||
|
IMAGE_KUBE_DEV=$(DEV_DOCKER_TAG_BASE)/awx_kube_devel:$(COMPOSE_TAG)
|
||||||
|
IMAGE_KUBE=$(DEV_DOCKER_TAG_BASE)/awx:$(COMPOSE_TAG)
|
||||||
|
|
||||||
|
# Common command to use for running ansible-playbook
|
||||||
|
ANSIBLE_PLAYBOOK ?= ansible-playbook -e ansible_python_interpreter=$(PYTHON)
|
||||||
|
|
||||||
RECEPTOR_IMAGE ?= quay.io/ansible/receptor:devel
|
RECEPTOR_IMAGE ?= quay.io/ansible/receptor:devel
|
||||||
|
|
||||||
@@ -61,7 +76,7 @@ RECEPTOR_IMAGE ?= quay.io/ansible/receptor:devel
|
|||||||
SRC_ONLY_PKGS ?= cffi,pycparser,psycopg,twilio
|
SRC_ONLY_PKGS ?= cffi,pycparser,psycopg,twilio
|
||||||
# These should be upgraded in the AWX and Ansible venv before attempting
|
# These should be upgraded in the AWX and Ansible venv before attempting
|
||||||
# to install the actual requirements
|
# to install the actual requirements
|
||||||
VENV_BOOTSTRAP ?= pip==21.2.4 setuptools==65.6.3 setuptools_scm[toml]==7.0.5 wheel==0.38.4
|
VENV_BOOTSTRAP ?= pip==21.2.4 setuptools==69.0.2 setuptools_scm[toml]==8.0.4 wheel==0.42.0 cython==0.29.37
|
||||||
|
|
||||||
NAME ?= awx
|
NAME ?= awx
|
||||||
|
|
||||||
@@ -73,6 +88,21 @@ SDIST_TAR_FILE ?= $(SDIST_TAR_NAME).tar.gz
|
|||||||
|
|
||||||
I18N_FLAG_FILE = .i18n_built
|
I18N_FLAG_FILE = .i18n_built
|
||||||
|
|
||||||
|
## PLATFORMS defines the target platforms for the manager image be build to provide support to multiple
|
||||||
|
PLATFORMS ?= linux/amd64,linux/arm64 # linux/ppc64le,linux/s390x
|
||||||
|
|
||||||
|
# Set up cache variables for image builds, allowing to control whether cache is used or not, ex:
|
||||||
|
# DOCKER_CACHE=--no-cache make docker-compose-build
|
||||||
|
ifeq ($(DOCKER_CACHE),)
|
||||||
|
DOCKER_DEVEL_CACHE_FLAG=--cache-from=$(DEVEL_IMAGE_NAME)
|
||||||
|
DOCKER_KUBE_DEV_CACHE_FLAG=--cache-from=$(IMAGE_KUBE_DEV)
|
||||||
|
DOCKER_KUBE_CACHE_FLAG=--cache-from=$(IMAGE_KUBE)
|
||||||
|
else
|
||||||
|
DOCKER_DEVEL_CACHE_FLAG=$(DOCKER_CACHE)
|
||||||
|
DOCKER_KUBE_DEV_CACHE_FLAG=$(DOCKER_CACHE)
|
||||||
|
DOCKER_KUBE_CACHE_FLAG=$(DOCKER_CACHE)
|
||||||
|
endif
|
||||||
|
|
||||||
.PHONY: awx-link clean clean-tmp clean-venv requirements requirements_dev \
|
.PHONY: awx-link clean clean-tmp clean-venv requirements requirements_dev \
|
||||||
develop refresh adduser migrate dbchange \
|
develop refresh adduser migrate dbchange \
|
||||||
receiver test test_unit test_coverage coverage_html \
|
receiver test test_unit test_coverage coverage_html \
|
||||||
@@ -211,8 +241,6 @@ collectstatic:
|
|||||||
fi; \
|
fi; \
|
||||||
$(PYTHON) manage.py collectstatic --clear --noinput > /dev/null 2>&1
|
$(PYTHON) manage.py collectstatic --clear --noinput > /dev/null 2>&1
|
||||||
|
|
||||||
DEV_RELOAD_COMMAND ?= supervisorctl restart tower-processes:*
|
|
||||||
|
|
||||||
uwsgi: collectstatic
|
uwsgi: collectstatic
|
||||||
@if [ "$(VENV_BASE)" ]; then \
|
@if [ "$(VENV_BASE)" ]; then \
|
||||||
. $(VENV_BASE)/awx/bin/activate; \
|
. $(VENV_BASE)/awx/bin/activate; \
|
||||||
@@ -220,7 +248,7 @@ uwsgi: collectstatic
|
|||||||
uwsgi /etc/tower/uwsgi.ini
|
uwsgi /etc/tower/uwsgi.ini
|
||||||
|
|
||||||
awx-autoreload:
|
awx-autoreload:
|
||||||
@/awx_devel/tools/docker-compose/awx-autoreload /awx_devel/awx "$(DEV_RELOAD_COMMAND)"
|
@/awx_devel/tools/docker-compose/awx-autoreload /awx_devel/awx
|
||||||
|
|
||||||
daphne:
|
daphne:
|
||||||
@if [ "$(VENV_BASE)" ]; then \
|
@if [ "$(VENV_BASE)" ]; then \
|
||||||
@@ -300,7 +328,7 @@ swagger: reports
|
|||||||
@if [ "$(VENV_BASE)" ]; then \
|
@if [ "$(VENV_BASE)" ]; then \
|
||||||
. $(VENV_BASE)/awx/bin/activate; \
|
. $(VENV_BASE)/awx/bin/activate; \
|
||||||
fi; \
|
fi; \
|
||||||
(set -o pipefail && py.test $(PYTEST_ARGS) awx/conf/tests/functional awx/main/tests/functional/api awx/main/tests/docs --release=$(VERSION_TARGET) | tee reports/$@.report)
|
(set -o pipefail && py.test $(PYTEST_ARGS) awx/conf/tests/functional awx/main/tests/functional/api awx/main/tests/docs | tee reports/$@.report)
|
||||||
|
|
||||||
check: black
|
check: black
|
||||||
|
|
||||||
@@ -324,6 +352,12 @@ test:
|
|||||||
cd awxkit && $(VENV_BASE)/awx/bin/tox -re py3
|
cd awxkit && $(VENV_BASE)/awx/bin/tox -re py3
|
||||||
awx-manage check_migrations --dry-run --check -n 'missing_migration_file'
|
awx-manage check_migrations --dry-run --check -n 'missing_migration_file'
|
||||||
|
|
||||||
|
test_migrations:
|
||||||
|
if [ "$(VENV_BASE)" ]; then \
|
||||||
|
. $(VENV_BASE)/awx/bin/activate; \
|
||||||
|
fi; \
|
||||||
|
PYTHONDONTWRITEBYTECODE=1 py.test -p no:cacheprovider --migrations -m migration_test $(PYTEST_ARGS) $(TEST_DIRS)
|
||||||
|
|
||||||
## Runs AWX_DOCKER_CMD inside a new docker container.
|
## Runs AWX_DOCKER_CMD inside a new docker container.
|
||||||
docker-runner:
|
docker-runner:
|
||||||
docker run -u $(shell id -u) --rm -v $(shell pwd):/awx_devel/:Z --workdir=/awx_devel $(DEVEL_IMAGE_NAME) $(AWX_DOCKER_CMD)
|
docker run -u $(shell id -u) --rm -v $(shell pwd):/awx_devel/:Z --workdir=/awx_devel $(DEVEL_IMAGE_NAME) $(AWX_DOCKER_CMD)
|
||||||
@@ -351,7 +385,7 @@ symlink_collection:
|
|||||||
ln -s $(shell pwd)/awx_collection $(COLLECTION_INSTALL)
|
ln -s $(shell pwd)/awx_collection $(COLLECTION_INSTALL)
|
||||||
|
|
||||||
awx_collection_build: $(shell find awx_collection -type f)
|
awx_collection_build: $(shell find awx_collection -type f)
|
||||||
ansible-playbook -i localhost, awx_collection/tools/template_galaxy.yml \
|
$(ANSIBLE_PLAYBOOK) -i localhost, awx_collection/tools/template_galaxy.yml \
|
||||||
-e collection_package=$(COLLECTION_PACKAGE) \
|
-e collection_package=$(COLLECTION_PACKAGE) \
|
||||||
-e collection_namespace=$(COLLECTION_NAMESPACE) \
|
-e collection_namespace=$(COLLECTION_NAMESPACE) \
|
||||||
-e collection_version=$(COLLECTION_VERSION) \
|
-e collection_version=$(COLLECTION_VERSION) \
|
||||||
@@ -468,13 +502,7 @@ ui-test-general:
|
|||||||
$(NPM_BIN) run --prefix awx/ui pretest
|
$(NPM_BIN) run --prefix awx/ui pretest
|
||||||
$(NPM_BIN) run --prefix awx/ui/ test-general --runInBand
|
$(NPM_BIN) run --prefix awx/ui/ test-general --runInBand
|
||||||
|
|
||||||
# NOTE: The make target ui-next is imported from awx/ui_next/Makefile
|
|
||||||
HEADLESS ?= no
|
|
||||||
ifeq ($(HEADLESS), yes)
|
|
||||||
dist/$(SDIST_TAR_FILE):
|
dist/$(SDIST_TAR_FILE):
|
||||||
else
|
|
||||||
dist/$(SDIST_TAR_FILE): $(UI_BUILD_FLAG_FILE) ui-next
|
|
||||||
endif
|
|
||||||
$(PYTHON) -m build -s
|
$(PYTHON) -m build -s
|
||||||
ln -sf $(SDIST_TAR_FILE) dist/awx.tar.gz
|
ln -sf $(SDIST_TAR_FILE) dist/awx.tar.gz
|
||||||
|
|
||||||
@@ -505,10 +533,10 @@ endif
|
|||||||
|
|
||||||
docker-compose-sources: .git/hooks/pre-commit
|
docker-compose-sources: .git/hooks/pre-commit
|
||||||
@if [ $(MINIKUBE_CONTAINER_GROUP) = true ]; then\
|
@if [ $(MINIKUBE_CONTAINER_GROUP) = true ]; then\
|
||||||
ansible-playbook -i tools/docker-compose/inventory -e minikube_setup=$(MINIKUBE_SETUP) tools/docker-compose-minikube/deploy.yml; \
|
$(ANSIBLE_PLAYBOOK) -i tools/docker-compose/inventory -e minikube_setup=$(MINIKUBE_SETUP) tools/docker-compose-minikube/deploy.yml; \
|
||||||
fi;
|
fi;
|
||||||
|
|
||||||
ansible-playbook -i tools/docker-compose/inventory tools/docker-compose/ansible/sources.yml \
|
$(ANSIBLE_PLAYBOOK) -i tools/docker-compose/inventory tools/docker-compose/ansible/sources.yml \
|
||||||
-e awx_image=$(DEV_DOCKER_TAG_BASE)/awx_devel \
|
-e awx_image=$(DEV_DOCKER_TAG_BASE)/awx_devel \
|
||||||
-e awx_image_tag=$(COMPOSE_TAG) \
|
-e awx_image_tag=$(COMPOSE_TAG) \
|
||||||
-e receptor_image=$(RECEPTOR_IMAGE) \
|
-e receptor_image=$(RECEPTOR_IMAGE) \
|
||||||
@@ -522,15 +550,28 @@ docker-compose-sources: .git/hooks/pre-commit
|
|||||||
-e enable_prometheus=$(PROMETHEUS) \
|
-e enable_prometheus=$(PROMETHEUS) \
|
||||||
-e enable_grafana=$(GRAFANA) \
|
-e enable_grafana=$(GRAFANA) \
|
||||||
-e enable_vault=$(VAULT) \
|
-e enable_vault=$(VAULT) \
|
||||||
|
-e vault_tls=$(VAULT_TLS) \
|
||||||
-e enable_tacacs=$(TACACS) \
|
-e enable_tacacs=$(TACACS) \
|
||||||
$(EXTRA_SOURCES_ANSIBLE_OPTS)
|
-e enable_otel=$(OTEL) \
|
||||||
|
-e enable_loki=$(LOKI) \
|
||||||
|
-e install_editable_dependencies=$(EDITABLE_DEPENDENCIES) \
|
||||||
|
-e pg_tls=$(PG_TLS) \
|
||||||
|
$(EXTRA_SOURCES_ANSIBLE_OPTS)
|
||||||
|
|
||||||
docker-compose: awx/projects docker-compose-sources
|
docker-compose: awx/projects docker-compose-sources
|
||||||
ansible-galaxy install --ignore-certs -r tools/docker-compose/ansible/requirements.yml;
|
ansible-galaxy install --ignore-certs -r tools/docker-compose/ansible/requirements.yml;
|
||||||
ansible-playbook -i tools/docker-compose/inventory tools/docker-compose/ansible/initialize_containers.yml \
|
$(ANSIBLE_PLAYBOOK) -i tools/docker-compose/inventory tools/docker-compose/ansible/initialize_containers.yml \
|
||||||
-e enable_vault=$(VAULT);
|
-e enable_vault=$(VAULT) \
|
||||||
|
-e vault_tls=$(VAULT_TLS) \
|
||||||
|
-e enable_ldap=$(LDAP); \
|
||||||
|
$(MAKE) docker-compose-up
|
||||||
|
|
||||||
|
docker-compose-up:
|
||||||
$(DOCKER_COMPOSE) -f tools/docker-compose/_sources/docker-compose.yml $(COMPOSE_OPTS) up $(COMPOSE_UP_OPTS) --remove-orphans
|
$(DOCKER_COMPOSE) -f tools/docker-compose/_sources/docker-compose.yml $(COMPOSE_OPTS) up $(COMPOSE_UP_OPTS) --remove-orphans
|
||||||
|
|
||||||
|
docker-compose-down:
|
||||||
|
$(DOCKER_COMPOSE) -f tools/docker-compose/_sources/docker-compose.yml $(COMPOSE_OPTS) down --remove-orphans
|
||||||
|
|
||||||
docker-compose-credential-plugins: awx/projects docker-compose-sources
|
docker-compose-credential-plugins: awx/projects docker-compose-sources
|
||||||
echo -e "\033[0;31mTo generate a CyberArk Conjur API key: docker exec -it tools_conjur_1 conjurctl account create quick-start\033[0m"
|
echo -e "\033[0;31mTo generate a CyberArk Conjur API key: docker exec -it tools_conjur_1 conjurctl account create quick-start\033[0m"
|
||||||
$(DOCKER_COMPOSE) -f tools/docker-compose/_sources/docker-compose.yml -f tools/docker-credential-plugins-override.yml up --no-recreate awx_1 --remove-orphans
|
$(DOCKER_COMPOSE) -f tools/docker-compose/_sources/docker-compose.yml -f tools/docker-credential-plugins-override.yml up --no-recreate awx_1 --remove-orphans
|
||||||
@@ -562,7 +603,7 @@ docker-compose-container-group-clean:
|
|||||||
.PHONY: Dockerfile.dev
|
.PHONY: Dockerfile.dev
|
||||||
## Generate Dockerfile.dev for awx_devel image
|
## Generate Dockerfile.dev for awx_devel image
|
||||||
Dockerfile.dev: tools/ansible/roles/dockerfile/templates/Dockerfile.j2
|
Dockerfile.dev: tools/ansible/roles/dockerfile/templates/Dockerfile.j2
|
||||||
ansible-playbook tools/ansible/dockerfile.yml \
|
$(ANSIBLE_PLAYBOOK) tools/ansible/dockerfile.yml \
|
||||||
-e dockerfile_name=Dockerfile.dev \
|
-e dockerfile_name=Dockerfile.dev \
|
||||||
-e build_dev=True \
|
-e build_dev=True \
|
||||||
-e receptor_image=$(RECEPTOR_IMAGE)
|
-e receptor_image=$(RECEPTOR_IMAGE)
|
||||||
@@ -573,14 +614,28 @@ docker-compose-build: Dockerfile.dev
|
|||||||
-f Dockerfile.dev \
|
-f Dockerfile.dev \
|
||||||
-t $(DEVEL_IMAGE_NAME) \
|
-t $(DEVEL_IMAGE_NAME) \
|
||||||
--build-arg BUILDKIT_INLINE_CACHE=1 \
|
--build-arg BUILDKIT_INLINE_CACHE=1 \
|
||||||
--cache-from=$(DEV_DOCKER_TAG_BASE)/awx_devel:$(COMPOSE_TAG) .
|
$(DOCKER_DEVEL_CACHE_FLAG) .
|
||||||
|
|
||||||
|
.PHONY: docker-compose-buildx
|
||||||
|
## Build awx_devel image for docker compose development environment for multiple architectures
|
||||||
|
docker-compose-buildx: Dockerfile.dev
|
||||||
|
- docker buildx create --name docker-compose-buildx
|
||||||
|
docker buildx use docker-compose-buildx
|
||||||
|
- docker buildx build \
|
||||||
|
--push \
|
||||||
|
--build-arg BUILDKIT_INLINE_CACHE=1 \
|
||||||
|
$(DOCKER_DEVEL_CACHE_FLAG) \
|
||||||
|
--platform=$(PLATFORMS) \
|
||||||
|
--tag $(DEVEL_IMAGE_NAME) \
|
||||||
|
-f Dockerfile.dev .
|
||||||
|
- docker buildx rm docker-compose-buildx
|
||||||
|
|
||||||
docker-clean:
|
docker-clean:
|
||||||
-$(foreach container_id,$(shell docker ps -f name=tools_awx -aq && docker ps -f name=tools_receptor -aq),docker stop $(container_id); docker rm -f $(container_id);)
|
-$(foreach container_id,$(shell docker ps -f name=tools_awx -aq && docker ps -f name=tools_receptor -aq),docker stop $(container_id); docker rm -f $(container_id);)
|
||||||
-$(foreach image_id,$(shell docker images --filter=reference='*/*/*awx_devel*' --filter=reference='*/*awx_devel*' --filter=reference='*awx_devel*' -aq),docker rmi --force $(image_id);)
|
-$(foreach image_id,$(shell docker images --filter=reference='*/*/*awx_devel*' --filter=reference='*/*awx_devel*' --filter=reference='*awx_devel*' -aq),docker rmi --force $(image_id);)
|
||||||
|
|
||||||
docker-clean-volumes: docker-compose-clean docker-compose-container-group-clean
|
docker-clean-volumes: docker-compose-clean docker-compose-container-group-clean
|
||||||
docker volume rm -f tools_awx_db tools_vault_1 tools_grafana_storage tools_prometheus_storage $(docker volume ls --filter name=tools_redis_socket_ -q)
|
docker volume rm -f tools_var_lib_awx tools_awx_db tools_awx_db_15 tools_vault_1 tools_ldap_1 tools_grafana_storage tools_prometheus_storage $(shell docker volume ls --filter name=tools_redis_socket_ -q)
|
||||||
|
|
||||||
docker-refresh: docker-clean docker-compose
|
docker-refresh: docker-clean docker-compose
|
||||||
|
|
||||||
@@ -602,9 +657,6 @@ clean-elk:
|
|||||||
docker rm tools_elasticsearch_1
|
docker rm tools_elasticsearch_1
|
||||||
docker rm tools_kibana_1
|
docker rm tools_kibana_1
|
||||||
|
|
||||||
psql-container:
|
|
||||||
docker run -it --net tools_default --rm postgres:12 sh -c 'exec psql -h "postgres" -p "5432" -U postgres'
|
|
||||||
|
|
||||||
VERSION:
|
VERSION:
|
||||||
@echo "awx: $(VERSION)"
|
@echo "awx: $(VERSION)"
|
||||||
|
|
||||||
@@ -625,7 +677,7 @@ version-for-buildyml:
|
|||||||
.PHONY: Dockerfile
|
.PHONY: Dockerfile
|
||||||
## Generate Dockerfile for awx image
|
## Generate Dockerfile for awx image
|
||||||
Dockerfile: tools/ansible/roles/dockerfile/templates/Dockerfile.j2
|
Dockerfile: tools/ansible/roles/dockerfile/templates/Dockerfile.j2
|
||||||
ansible-playbook tools/ansible/dockerfile.yml \
|
$(ANSIBLE_PLAYBOOK) tools/ansible/dockerfile.yml \
|
||||||
-e receptor_image=$(RECEPTOR_IMAGE) \
|
-e receptor_image=$(RECEPTOR_IMAGE) \
|
||||||
-e headless=$(HEADLESS)
|
-e headless=$(HEADLESS)
|
||||||
|
|
||||||
@@ -635,12 +687,29 @@ awx-kube-build: Dockerfile
|
|||||||
--build-arg VERSION=$(VERSION) \
|
--build-arg VERSION=$(VERSION) \
|
||||||
--build-arg SETUPTOOLS_SCM_PRETEND_VERSION=$(VERSION) \
|
--build-arg SETUPTOOLS_SCM_PRETEND_VERSION=$(VERSION) \
|
||||||
--build-arg HEADLESS=$(HEADLESS) \
|
--build-arg HEADLESS=$(HEADLESS) \
|
||||||
-t $(DEV_DOCKER_TAG_BASE)/awx:$(COMPOSE_TAG) .
|
$(DOCKER_KUBE_CACHE_FLAG) \
|
||||||
|
-t $(IMAGE_KUBE) .
|
||||||
|
|
||||||
|
## Build multi-arch awx image for deployment on Kubernetes environment.
|
||||||
|
awx-kube-buildx: Dockerfile
|
||||||
|
- docker buildx create --name awx-kube-buildx
|
||||||
|
docker buildx use awx-kube-buildx
|
||||||
|
- docker buildx build \
|
||||||
|
--push \
|
||||||
|
--build-arg VERSION=$(VERSION) \
|
||||||
|
--build-arg SETUPTOOLS_SCM_PRETEND_VERSION=$(VERSION) \
|
||||||
|
--build-arg HEADLESS=$(HEADLESS) \
|
||||||
|
--platform=$(PLATFORMS) \
|
||||||
|
$(DOCKER_KUBE_CACHE_FLAG) \
|
||||||
|
--tag $(IMAGE_KUBE) \
|
||||||
|
-f Dockerfile .
|
||||||
|
- docker buildx rm awx-kube-buildx
|
||||||
|
|
||||||
|
|
||||||
.PHONY: Dockerfile.kube-dev
|
.PHONY: Dockerfile.kube-dev
|
||||||
## Generate Docker.kube-dev for awx_kube_devel image
|
## Generate Docker.kube-dev for awx_kube_devel image
|
||||||
Dockerfile.kube-dev: tools/ansible/roles/dockerfile/templates/Dockerfile.j2
|
Dockerfile.kube-dev: tools/ansible/roles/dockerfile/templates/Dockerfile.j2
|
||||||
ansible-playbook tools/ansible/dockerfile.yml \
|
$(ANSIBLE_PLAYBOOK) tools/ansible/dockerfile.yml \
|
||||||
-e dockerfile_name=Dockerfile.kube-dev \
|
-e dockerfile_name=Dockerfile.kube-dev \
|
||||||
-e kube_dev=True \
|
-e kube_dev=True \
|
||||||
-e template_dest=_build_kube_dev \
|
-e template_dest=_build_kube_dev \
|
||||||
@@ -650,12 +719,24 @@ Dockerfile.kube-dev: tools/ansible/roles/dockerfile/templates/Dockerfile.j2
|
|||||||
awx-kube-dev-build: Dockerfile.kube-dev
|
awx-kube-dev-build: Dockerfile.kube-dev
|
||||||
DOCKER_BUILDKIT=1 docker build -f Dockerfile.kube-dev \
|
DOCKER_BUILDKIT=1 docker build -f Dockerfile.kube-dev \
|
||||||
--build-arg BUILDKIT_INLINE_CACHE=1 \
|
--build-arg BUILDKIT_INLINE_CACHE=1 \
|
||||||
--cache-from=$(DEV_DOCKER_TAG_BASE)/awx_kube_devel:$(COMPOSE_TAG) \
|
$(DOCKER_KUBE_DEV_CACHE_FLAG) \
|
||||||
-t $(DEV_DOCKER_TAG_BASE)/awx_kube_devel:$(COMPOSE_TAG) .
|
-t $(IMAGE_KUBE_DEV) .
|
||||||
|
|
||||||
|
## Build and push multi-arch awx_kube_devel image for development on local Kubernetes environment.
|
||||||
|
awx-kube-dev-buildx: Dockerfile.kube-dev
|
||||||
|
- docker buildx create --name awx-kube-dev-buildx
|
||||||
|
docker buildx use awx-kube-dev-buildx
|
||||||
|
- docker buildx build \
|
||||||
|
--push \
|
||||||
|
--build-arg BUILDKIT_INLINE_CACHE=1 \
|
||||||
|
$(DOCKER_KUBE_DEV_CACHE_FLAG) \
|
||||||
|
--platform=$(PLATFORMS) \
|
||||||
|
--tag $(IMAGE_KUBE_DEV) \
|
||||||
|
-f Dockerfile.kube-dev .
|
||||||
|
- docker buildx rm awx-kube-dev-buildx
|
||||||
|
|
||||||
kind-dev-load: awx-kube-dev-build
|
kind-dev-load: awx-kube-dev-build
|
||||||
$(KIND_BIN) load docker-image $(DEV_DOCKER_TAG_BASE)/awx_kube_devel:$(COMPOSE_TAG)
|
$(KIND_BIN) load docker-image $(IMAGE_KUBE_DEV)
|
||||||
|
|
||||||
# Translation TASKS
|
# Translation TASKS
|
||||||
# --------------------------------------
|
# --------------------------------------
|
||||||
|
|||||||
@@ -7,7 +7,7 @@ AWX provides a web-based user interface, REST API, and task engine built on top
|
|||||||
|
|
||||||
To install AWX, please view the [Install guide](./INSTALL.md).
|
To install AWX, please view the [Install guide](./INSTALL.md).
|
||||||
|
|
||||||
To learn more about using AWX, and Tower, view the [Tower docs site](http://docs.ansible.com/ansible-tower/index.html).
|
To learn more about using AWX, view the [AWX docs site](https://ansible.readthedocs.io/projects/awx/en/latest/).
|
||||||
|
|
||||||
The AWX Project Frequently Asked Questions can be found [here](https://www.ansible.com/awx-project-faq).
|
The AWX Project Frequently Asked Questions can be found [here](https://www.ansible.com/awx-project-faq).
|
||||||
|
|
||||||
|
|||||||
@@ -154,10 +154,12 @@ def manage():
|
|||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
from django.core.management import execute_from_command_line
|
from django.core.management import execute_from_command_line
|
||||||
|
|
||||||
# enforce the postgres version is equal to 12. if not, then terminate program with exit code of 1
|
# enforce the postgres version is a minimum of 12 (we need this for partitioning); if not, then terminate program with exit code of 1
|
||||||
|
# In the future if we require a feature of a version of postgres > 12 this should be updated to reflect that.
|
||||||
|
# The return of connection.pg_version is something like 12013
|
||||||
if not os.getenv('SKIP_PG_VERSION_CHECK', False) and not MODE == 'development':
|
if not os.getenv('SKIP_PG_VERSION_CHECK', False) and not MODE == 'development':
|
||||||
if (connection.pg_version // 10000) < 12:
|
if (connection.pg_version // 10000) < 12:
|
||||||
sys.stderr.write("Postgres version 12 is required\n")
|
sys.stderr.write("At a minimum, postgres version 12 is required\n")
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
if len(sys.argv) >= 2 and sys.argv[1] in ('version', '--version'): # pragma: no cover
|
if len(sys.argv) >= 2 and sys.argv[1] in ('version', '--version'): # pragma: no cover
|
||||||
|
|||||||
@@ -93,6 +93,7 @@ register(
|
|||||||
default='',
|
default='',
|
||||||
label=_('Login redirect override URL'),
|
label=_('Login redirect override URL'),
|
||||||
help_text=_('URL to which unauthorized users will be redirected to log in. If blank, users will be sent to the login page.'),
|
help_text=_('URL to which unauthorized users will be redirected to log in. If blank, users will be sent to the login page.'),
|
||||||
|
warning_text=_('Changing the redirect URL could impact the ability to login if local authentication is also disabled.'),
|
||||||
category=_('Authentication'),
|
category=_('Authentication'),
|
||||||
category_slug='authentication',
|
category_slug='authentication',
|
||||||
)
|
)
|
||||||
|
|||||||
@@ -1,450 +0,0 @@
|
|||||||
# Copyright (c) 2015 Ansible, Inc.
|
|
||||||
# All Rights Reserved.
|
|
||||||
|
|
||||||
# Python
|
|
||||||
import re
|
|
||||||
import json
|
|
||||||
from functools import reduce
|
|
||||||
|
|
||||||
# Django
|
|
||||||
from django.core.exceptions import FieldError, ValidationError, FieldDoesNotExist
|
|
||||||
from django.db import models
|
|
||||||
from django.db.models import Q, CharField, IntegerField, BooleanField, TextField, JSONField
|
|
||||||
from django.db.models.fields.related import ForeignObjectRel, ManyToManyField, ForeignKey
|
|
||||||
from django.db.models.functions import Cast
|
|
||||||
from django.contrib.contenttypes.models import ContentType
|
|
||||||
from django.contrib.contenttypes.fields import GenericForeignKey
|
|
||||||
from django.utils.encoding import force_str
|
|
||||||
from django.utils.translation import gettext_lazy as _
|
|
||||||
|
|
||||||
# Django REST Framework
|
|
||||||
from rest_framework.exceptions import ParseError, PermissionDenied
|
|
||||||
from rest_framework.filters import BaseFilterBackend
|
|
||||||
|
|
||||||
# AWX
|
|
||||||
from awx.main.utils import get_type_for_model, to_python_boolean
|
|
||||||
from awx.main.utils.db import get_all_field_names
|
|
||||||
|
|
||||||
|
|
||||||
class TypeFilterBackend(BaseFilterBackend):
|
|
||||||
"""
|
|
||||||
Filter on type field now returned with all objects.
|
|
||||||
"""
|
|
||||||
|
|
||||||
def filter_queryset(self, request, queryset, view):
|
|
||||||
try:
|
|
||||||
types = None
|
|
||||||
for key, value in request.query_params.items():
|
|
||||||
if key == 'type':
|
|
||||||
if ',' in value:
|
|
||||||
types = value.split(',')
|
|
||||||
else:
|
|
||||||
types = (value,)
|
|
||||||
if types:
|
|
||||||
types_map = {}
|
|
||||||
for ct in ContentType.objects.filter(Q(app_label='main') | Q(app_label='auth', model='user')):
|
|
||||||
ct_model = ct.model_class()
|
|
||||||
if not ct_model:
|
|
||||||
continue
|
|
||||||
ct_type = get_type_for_model(ct_model)
|
|
||||||
types_map[ct_type] = ct.pk
|
|
||||||
model = queryset.model
|
|
||||||
model_type = get_type_for_model(model)
|
|
||||||
if 'polymorphic_ctype' in get_all_field_names(model):
|
|
||||||
types_pks = set([v for k, v in types_map.items() if k in types])
|
|
||||||
queryset = queryset.filter(polymorphic_ctype_id__in=types_pks)
|
|
||||||
elif model_type in types:
|
|
||||||
queryset = queryset
|
|
||||||
else:
|
|
||||||
queryset = queryset.none()
|
|
||||||
return queryset
|
|
||||||
except FieldError as e:
|
|
||||||
# Return a 400 for invalid field names.
|
|
||||||
raise ParseError(*e.args)
|
|
||||||
|
|
||||||
|
|
||||||
def get_fields_from_path(model, path):
|
|
||||||
"""
|
|
||||||
Given a Django ORM lookup path (possibly over multiple models)
|
|
||||||
Returns the fields in the line, and also the revised lookup path
|
|
||||||
ex., given
|
|
||||||
model=Organization
|
|
||||||
path='project__timeout'
|
|
||||||
returns tuple of fields traversed as well and a corrected path,
|
|
||||||
for special cases we do substitutions
|
|
||||||
([<IntegerField for timeout>], 'project__timeout')
|
|
||||||
"""
|
|
||||||
# Store of all the fields used to detect repeats
|
|
||||||
field_list = []
|
|
||||||
new_parts = []
|
|
||||||
for name in path.split('__'):
|
|
||||||
if model is None:
|
|
||||||
raise ParseError(_('No related model for field {}.').format(name))
|
|
||||||
# HACK: Make project and inventory source filtering by old field names work for backwards compatibility.
|
|
||||||
if model._meta.object_name in ('Project', 'InventorySource'):
|
|
||||||
name = {'current_update': 'current_job', 'last_update': 'last_job', 'last_update_failed': 'last_job_failed', 'last_updated': 'last_job_run'}.get(
|
|
||||||
name, name
|
|
||||||
)
|
|
||||||
|
|
||||||
if name == 'type' and 'polymorphic_ctype' in get_all_field_names(model):
|
|
||||||
name = 'polymorphic_ctype'
|
|
||||||
new_parts.append('polymorphic_ctype__model')
|
|
||||||
else:
|
|
||||||
new_parts.append(name)
|
|
||||||
|
|
||||||
if name in getattr(model, 'PASSWORD_FIELDS', ()):
|
|
||||||
raise PermissionDenied(_('Filtering on password fields is not allowed.'))
|
|
||||||
elif name == 'pk':
|
|
||||||
field = model._meta.pk
|
|
||||||
else:
|
|
||||||
name_alt = name.replace("_", "")
|
|
||||||
if name_alt in model._meta.fields_map.keys():
|
|
||||||
field = model._meta.fields_map[name_alt]
|
|
||||||
new_parts.pop()
|
|
||||||
new_parts.append(name_alt)
|
|
||||||
else:
|
|
||||||
field = model._meta.get_field(name)
|
|
||||||
if isinstance(field, ForeignObjectRel) and getattr(field.field, '__prevent_search__', False):
|
|
||||||
raise PermissionDenied(_('Filtering on %s is not allowed.' % name))
|
|
||||||
elif getattr(field, '__prevent_search__', False):
|
|
||||||
raise PermissionDenied(_('Filtering on %s is not allowed.' % name))
|
|
||||||
if field in field_list:
|
|
||||||
# Field traversed twice, could create infinite JOINs, DoSing Tower
|
|
||||||
raise ParseError(_('Loops not allowed in filters, detected on field {}.').format(field.name))
|
|
||||||
field_list.append(field)
|
|
||||||
model = getattr(field, 'related_model', None)
|
|
||||||
|
|
||||||
return field_list, '__'.join(new_parts)
|
|
||||||
|
|
||||||
|
|
||||||
def get_field_from_path(model, path):
|
|
||||||
"""
|
|
||||||
Given a Django ORM lookup path (possibly over multiple models)
|
|
||||||
Returns the last field in the line, and the revised lookup path
|
|
||||||
ex.
|
|
||||||
(<IntegerField for timeout>, 'project__timeout')
|
|
||||||
"""
|
|
||||||
field_list, new_path = get_fields_from_path(model, path)
|
|
||||||
return (field_list[-1], new_path)
|
|
||||||
|
|
||||||
|
|
||||||
class FieldLookupBackend(BaseFilterBackend):
|
|
||||||
"""
|
|
||||||
Filter using field lookups provided via query string parameters.
|
|
||||||
"""
|
|
||||||
|
|
||||||
RESERVED_NAMES = ('page', 'page_size', 'format', 'order', 'order_by', 'search', 'type', 'host_filter', 'count_disabled', 'no_truncate', 'limit')
|
|
||||||
|
|
||||||
SUPPORTED_LOOKUPS = (
|
|
||||||
'exact',
|
|
||||||
'iexact',
|
|
||||||
'contains',
|
|
||||||
'icontains',
|
|
||||||
'startswith',
|
|
||||||
'istartswith',
|
|
||||||
'endswith',
|
|
||||||
'iendswith',
|
|
||||||
'regex',
|
|
||||||
'iregex',
|
|
||||||
'gt',
|
|
||||||
'gte',
|
|
||||||
'lt',
|
|
||||||
'lte',
|
|
||||||
'in',
|
|
||||||
'isnull',
|
|
||||||
'search',
|
|
||||||
)
|
|
||||||
|
|
||||||
# A list of fields that we know can be filtered on without the possibility
|
|
||||||
# of introducing duplicates
|
|
||||||
NO_DUPLICATES_ALLOW_LIST = (CharField, IntegerField, BooleanField, TextField)
|
|
||||||
|
|
||||||
def get_fields_from_lookup(self, model, lookup):
|
|
||||||
if '__' in lookup and lookup.rsplit('__', 1)[-1] in self.SUPPORTED_LOOKUPS:
|
|
||||||
path, suffix = lookup.rsplit('__', 1)
|
|
||||||
else:
|
|
||||||
path = lookup
|
|
||||||
suffix = 'exact'
|
|
||||||
|
|
||||||
if not path:
|
|
||||||
raise ParseError(_('Query string field name not provided.'))
|
|
||||||
|
|
||||||
# FIXME: Could build up a list of models used across relationships, use
|
|
||||||
# those lookups combined with request.user.get_queryset(Model) to make
|
|
||||||
# sure user cannot query using objects he could not view.
|
|
||||||
field_list, new_path = get_fields_from_path(model, path)
|
|
||||||
|
|
||||||
new_lookup = new_path
|
|
||||||
new_lookup = '__'.join([new_path, suffix])
|
|
||||||
return field_list, new_lookup
|
|
||||||
|
|
||||||
def get_field_from_lookup(self, model, lookup):
|
|
||||||
'''Method to match return type of single field, if needed.'''
|
|
||||||
field_list, new_lookup = self.get_fields_from_lookup(model, lookup)
|
|
||||||
return (field_list[-1], new_lookup)
|
|
||||||
|
|
||||||
def to_python_related(self, value):
|
|
||||||
value = force_str(value)
|
|
||||||
if value.lower() in ('none', 'null'):
|
|
||||||
return None
|
|
||||||
else:
|
|
||||||
return int(value)
|
|
||||||
|
|
||||||
def value_to_python_for_field(self, field, value):
|
|
||||||
if isinstance(field, models.BooleanField):
|
|
||||||
return to_python_boolean(value)
|
|
||||||
elif isinstance(field, (ForeignObjectRel, ManyToManyField, GenericForeignKey, ForeignKey)):
|
|
||||||
try:
|
|
||||||
return self.to_python_related(value)
|
|
||||||
except ValueError:
|
|
||||||
raise ParseError(_('Invalid {field_name} id: {field_id}').format(field_name=getattr(field, 'name', 'related field'), field_id=value))
|
|
||||||
else:
|
|
||||||
return field.to_python(value)
|
|
||||||
|
|
||||||
def value_to_python(self, model, lookup, value):
|
|
||||||
try:
|
|
||||||
lookup.encode("ascii")
|
|
||||||
except UnicodeEncodeError:
|
|
||||||
raise ValueError("%r is not an allowed field name. Must be ascii encodable." % lookup)
|
|
||||||
|
|
||||||
field_list, new_lookup = self.get_fields_from_lookup(model, lookup)
|
|
||||||
field = field_list[-1]
|
|
||||||
|
|
||||||
needs_distinct = not all(isinstance(f, self.NO_DUPLICATES_ALLOW_LIST) for f in field_list)
|
|
||||||
|
|
||||||
# Type names are stored without underscores internally, but are presented and
|
|
||||||
# and serialized over the API containing underscores so we remove `_`
|
|
||||||
# for polymorphic_ctype__model lookups.
|
|
||||||
if new_lookup.startswith('polymorphic_ctype__model'):
|
|
||||||
value = value.replace('_', '')
|
|
||||||
elif new_lookup.endswith('__isnull'):
|
|
||||||
value = to_python_boolean(value)
|
|
||||||
elif new_lookup.endswith('__in'):
|
|
||||||
items = []
|
|
||||||
if not value:
|
|
||||||
raise ValueError('cannot provide empty value for __in')
|
|
||||||
for item in value.split(','):
|
|
||||||
items.append(self.value_to_python_for_field(field, item))
|
|
||||||
value = items
|
|
||||||
elif new_lookup.endswith('__regex') or new_lookup.endswith('__iregex'):
|
|
||||||
try:
|
|
||||||
re.compile(value)
|
|
||||||
except re.error as e:
|
|
||||||
raise ValueError(e.args[0])
|
|
||||||
elif new_lookup.endswith('__iexact'):
|
|
||||||
if not isinstance(field, (CharField, TextField)):
|
|
||||||
raise ValueError(f'{field.name} is not a text field and cannot be filtered by case-insensitive search')
|
|
||||||
elif new_lookup.endswith('__search'):
|
|
||||||
related_model = getattr(field, 'related_model', None)
|
|
||||||
if not related_model:
|
|
||||||
raise ValueError('%s is not searchable' % new_lookup[:-8])
|
|
||||||
new_lookups = []
|
|
||||||
for rm_field in related_model._meta.fields:
|
|
||||||
if rm_field.name in ('username', 'first_name', 'last_name', 'email', 'name', 'description', 'playbook'):
|
|
||||||
new_lookups.append('{}__{}__icontains'.format(new_lookup[:-8], rm_field.name))
|
|
||||||
return value, new_lookups, needs_distinct
|
|
||||||
else:
|
|
||||||
if isinstance(field, JSONField):
|
|
||||||
new_lookup = new_lookup.replace(field.name, f'{field.name}_as_txt')
|
|
||||||
value = self.value_to_python_for_field(field, value)
|
|
||||||
return value, new_lookup, needs_distinct
|
|
||||||
|
|
||||||
def filter_queryset(self, request, queryset, view):
|
|
||||||
try:
|
|
||||||
# Apply filters specified via query_params. Each entry in the lists
|
|
||||||
# below is (negate, field, value).
|
|
||||||
and_filters = []
|
|
||||||
or_filters = []
|
|
||||||
chain_filters = []
|
|
||||||
role_filters = []
|
|
||||||
search_filters = {}
|
|
||||||
needs_distinct = False
|
|
||||||
# Can only have two values: 'AND', 'OR'
|
|
||||||
# If 'AND' is used, an item must satisfy all conditions to show up in the results.
|
|
||||||
# If 'OR' is used, an item just needs to satisfy one condition to appear in results.
|
|
||||||
search_filter_relation = 'OR'
|
|
||||||
for key, values in request.query_params.lists():
|
|
||||||
if key in self.RESERVED_NAMES:
|
|
||||||
continue
|
|
||||||
|
|
||||||
# HACK: make `created` available via API for the Django User ORM model
|
|
||||||
# so it keep compatibility with other objects which exposes the `created` attr.
|
|
||||||
if queryset.model._meta.object_name == 'User' and key.startswith('created'):
|
|
||||||
key = key.replace('created', 'date_joined')
|
|
||||||
|
|
||||||
# HACK: Make job event filtering by host name mostly work even
|
|
||||||
# when not capturing job event hosts M2M.
|
|
||||||
if queryset.model._meta.object_name == 'JobEvent' and key.startswith('hosts__name'):
|
|
||||||
key = key.replace('hosts__name', 'or__host__name')
|
|
||||||
or_filters.append((False, 'host__name__isnull', True))
|
|
||||||
|
|
||||||
# Custom __int filter suffix (internal use only).
|
|
||||||
q_int = False
|
|
||||||
if key.endswith('__int'):
|
|
||||||
key = key[:-5]
|
|
||||||
q_int = True
|
|
||||||
|
|
||||||
# RBAC filtering
|
|
||||||
if key == 'role_level':
|
|
||||||
role_filters.append(values[0])
|
|
||||||
continue
|
|
||||||
|
|
||||||
# Search across related objects.
|
|
||||||
if key.endswith('__search'):
|
|
||||||
if values and ',' in values[0]:
|
|
||||||
search_filter_relation = 'AND'
|
|
||||||
values = reduce(lambda list1, list2: list1 + list2, [i.split(',') for i in values])
|
|
||||||
for value in values:
|
|
||||||
search_value, new_keys, _ = self.value_to_python(queryset.model, key, force_str(value))
|
|
||||||
assert isinstance(new_keys, list)
|
|
||||||
search_filters[search_value] = new_keys
|
|
||||||
# by definition, search *only* joins across relations,
|
|
||||||
# so it _always_ needs a .distinct()
|
|
||||||
needs_distinct = True
|
|
||||||
continue
|
|
||||||
|
|
||||||
# Custom chain__ and or__ filters, mutually exclusive (both can
|
|
||||||
# precede not__).
|
|
||||||
q_chain = False
|
|
||||||
q_or = False
|
|
||||||
if key.startswith('chain__'):
|
|
||||||
key = key[7:]
|
|
||||||
q_chain = True
|
|
||||||
elif key.startswith('or__'):
|
|
||||||
key = key[4:]
|
|
||||||
q_or = True
|
|
||||||
|
|
||||||
# Custom not__ filter prefix.
|
|
||||||
q_not = False
|
|
||||||
if key.startswith('not__'):
|
|
||||||
key = key[5:]
|
|
||||||
q_not = True
|
|
||||||
|
|
||||||
# Convert value(s) to python and add to the appropriate list.
|
|
||||||
for value in values:
|
|
||||||
if q_int:
|
|
||||||
value = int(value)
|
|
||||||
value, new_key, distinct = self.value_to_python(queryset.model, key, value)
|
|
||||||
if distinct:
|
|
||||||
needs_distinct = True
|
|
||||||
if '_as_txt' in new_key:
|
|
||||||
fname = next(item for item in new_key.split('__') if item.endswith('_as_txt'))
|
|
||||||
queryset = queryset.annotate(**{fname: Cast(fname[:-7], output_field=TextField())})
|
|
||||||
if q_chain:
|
|
||||||
chain_filters.append((q_not, new_key, value))
|
|
||||||
elif q_or:
|
|
||||||
or_filters.append((q_not, new_key, value))
|
|
||||||
else:
|
|
||||||
and_filters.append((q_not, new_key, value))
|
|
||||||
|
|
||||||
# Now build Q objects for database query filter.
|
|
||||||
if and_filters or or_filters or chain_filters or role_filters or search_filters:
|
|
||||||
args = []
|
|
||||||
for n, k, v in and_filters:
|
|
||||||
if n:
|
|
||||||
args.append(~Q(**{k: v}))
|
|
||||||
else:
|
|
||||||
args.append(Q(**{k: v}))
|
|
||||||
for role_name in role_filters:
|
|
||||||
if not hasattr(queryset.model, 'accessible_pk_qs'):
|
|
||||||
raise ParseError(_('Cannot apply role_level filter to this list because its model does not use roles for access control.'))
|
|
||||||
args.append(Q(pk__in=queryset.model.accessible_pk_qs(request.user, role_name)))
|
|
||||||
if or_filters:
|
|
||||||
q = Q()
|
|
||||||
for n, k, v in or_filters:
|
|
||||||
if n:
|
|
||||||
q |= ~Q(**{k: v})
|
|
||||||
else:
|
|
||||||
q |= Q(**{k: v})
|
|
||||||
args.append(q)
|
|
||||||
if search_filters and search_filter_relation == 'OR':
|
|
||||||
q = Q()
|
|
||||||
for term, constrains in search_filters.items():
|
|
||||||
for constrain in constrains:
|
|
||||||
q |= Q(**{constrain: term})
|
|
||||||
args.append(q)
|
|
||||||
elif search_filters and search_filter_relation == 'AND':
|
|
||||||
for term, constrains in search_filters.items():
|
|
||||||
q_chain = Q()
|
|
||||||
for constrain in constrains:
|
|
||||||
q_chain |= Q(**{constrain: term})
|
|
||||||
queryset = queryset.filter(q_chain)
|
|
||||||
for n, k, v in chain_filters:
|
|
||||||
if n:
|
|
||||||
q = ~Q(**{k: v})
|
|
||||||
else:
|
|
||||||
q = Q(**{k: v})
|
|
||||||
queryset = queryset.filter(q)
|
|
||||||
queryset = queryset.filter(*args)
|
|
||||||
if needs_distinct:
|
|
||||||
queryset = queryset.distinct()
|
|
||||||
return queryset
|
|
||||||
except (FieldError, FieldDoesNotExist, ValueError, TypeError) as e:
|
|
||||||
raise ParseError(e.args[0])
|
|
||||||
except ValidationError as e:
|
|
||||||
raise ParseError(json.dumps(e.messages, ensure_ascii=False))
|
|
||||||
|
|
||||||
|
|
||||||
class OrderByBackend(BaseFilterBackend):
|
|
||||||
"""
|
|
||||||
Filter to apply ordering based on query string parameters.
|
|
||||||
"""
|
|
||||||
|
|
||||||
def filter_queryset(self, request, queryset, view):
|
|
||||||
try:
|
|
||||||
order_by = None
|
|
||||||
for key, value in request.query_params.items():
|
|
||||||
if key in ('order', 'order_by'):
|
|
||||||
order_by = value
|
|
||||||
if ',' in value:
|
|
||||||
order_by = value.split(',')
|
|
||||||
else:
|
|
||||||
order_by = (value,)
|
|
||||||
default_order_by = self.get_default_ordering(view)
|
|
||||||
# glue the order by and default order by together so that the default is the backup option
|
|
||||||
order_by = list(order_by or []) + list(default_order_by or [])
|
|
||||||
if order_by:
|
|
||||||
order_by = self._validate_ordering_fields(queryset.model, order_by)
|
|
||||||
# Special handling of the type field for ordering. In this
|
|
||||||
# case, we're not sorting exactly on the type field, but
|
|
||||||
# given the limited number of views with multiple types,
|
|
||||||
# sorting on polymorphic_ctype.model is effectively the same.
|
|
||||||
new_order_by = []
|
|
||||||
if 'polymorphic_ctype' in get_all_field_names(queryset.model):
|
|
||||||
for field in order_by:
|
|
||||||
if field == 'type':
|
|
||||||
new_order_by.append('polymorphic_ctype__model')
|
|
||||||
elif field == '-type':
|
|
||||||
new_order_by.append('-polymorphic_ctype__model')
|
|
||||||
else:
|
|
||||||
new_order_by.append(field)
|
|
||||||
else:
|
|
||||||
for field in order_by:
|
|
||||||
if field not in ('type', '-type'):
|
|
||||||
new_order_by.append(field)
|
|
||||||
queryset = queryset.order_by(*new_order_by)
|
|
||||||
return queryset
|
|
||||||
except FieldError as e:
|
|
||||||
# Return a 400 for invalid field names.
|
|
||||||
raise ParseError(*e.args)
|
|
||||||
|
|
||||||
def get_default_ordering(self, view):
|
|
||||||
ordering = getattr(view, 'ordering', None)
|
|
||||||
if isinstance(ordering, str):
|
|
||||||
return (ordering,)
|
|
||||||
return ordering
|
|
||||||
|
|
||||||
def _validate_ordering_fields(self, model, order_by):
|
|
||||||
for field_name in order_by:
|
|
||||||
# strip off the negation prefix `-` if it exists
|
|
||||||
prefix = ''
|
|
||||||
path = field_name
|
|
||||||
if field_name[0] == '-':
|
|
||||||
prefix = field_name[0]
|
|
||||||
path = field_name[1:]
|
|
||||||
try:
|
|
||||||
field, new_path = get_field_from_path(model, path)
|
|
||||||
new_path = '{}{}'.format(prefix, new_path)
|
|
||||||
except (FieldError, FieldDoesNotExist) as e:
|
|
||||||
raise ParseError(e.args[0])
|
|
||||||
yield new_path
|
|
||||||
@@ -30,13 +30,21 @@ from rest_framework.permissions import IsAuthenticated
|
|||||||
from rest_framework.renderers import StaticHTMLRenderer
|
from rest_framework.renderers import StaticHTMLRenderer
|
||||||
from rest_framework.negotiation import DefaultContentNegotiation
|
from rest_framework.negotiation import DefaultContentNegotiation
|
||||||
|
|
||||||
|
# django-ansible-base
|
||||||
|
from ansible_base.rest_filters.rest_framework.field_lookup_backend import FieldLookupBackend
|
||||||
|
from ansible_base.lib.utils.models import get_all_field_names
|
||||||
|
from ansible_base.lib.utils.requests import get_remote_host
|
||||||
|
from ansible_base.rbac.models import RoleEvaluation, RoleDefinition
|
||||||
|
from ansible_base.rbac.permission_registry import permission_registry
|
||||||
|
from ansible_base.jwt_consumer.common.util import validate_x_trusted_proxy_header
|
||||||
|
|
||||||
# AWX
|
# AWX
|
||||||
from awx.api.filters import FieldLookupBackend
|
|
||||||
from awx.main.models import UnifiedJob, UnifiedJobTemplate, User, Role, Credential, WorkflowJobTemplateNode, WorkflowApprovalTemplate
|
from awx.main.models import UnifiedJob, UnifiedJobTemplate, User, Role, Credential, WorkflowJobTemplateNode, WorkflowApprovalTemplate
|
||||||
|
from awx.main.models.rbac import give_creator_permissions
|
||||||
from awx.main.access import optimize_queryset
|
from awx.main.access import optimize_queryset
|
||||||
from awx.main.utils import camelcase_to_underscore, get_search_fields, getattrd, get_object_or_400, decrypt_field, get_awx_version
|
from awx.main.utils import camelcase_to_underscore, get_search_fields, getattrd, get_object_or_400, decrypt_field, get_awx_version
|
||||||
from awx.main.utils.db import get_all_field_names
|
|
||||||
from awx.main.utils.licensing import server_product_name
|
from awx.main.utils.licensing import server_product_name
|
||||||
|
from awx.main.utils.proxy import is_proxy_in_headers, delete_headers_starting_with_http
|
||||||
from awx.main.views import ApiErrorView
|
from awx.main.views import ApiErrorView
|
||||||
from awx.api.serializers import ResourceAccessListElementSerializer, CopySerializer
|
from awx.api.serializers import ResourceAccessListElementSerializer, CopySerializer
|
||||||
from awx.api.versioning import URLPathVersioning
|
from awx.api.versioning import URLPathVersioning
|
||||||
@@ -88,25 +96,31 @@ class LoggedLoginView(auth_views.LoginView):
|
|||||||
|
|
||||||
def post(self, request, *args, **kwargs):
|
def post(self, request, *args, **kwargs):
|
||||||
ret = super(LoggedLoginView, self).post(request, *args, **kwargs)
|
ret = super(LoggedLoginView, self).post(request, *args, **kwargs)
|
||||||
|
ip = get_remote_host(request) # request.META.get('REMOTE_ADDR', None)
|
||||||
if request.user.is_authenticated:
|
if request.user.is_authenticated:
|
||||||
logger.info(smart_str(u"User {} logged in from {}".format(self.request.user.username, request.META.get('REMOTE_ADDR', None))))
|
logger.info(smart_str(u"User {} logged in from {}".format(self.request.user.username, ip)))
|
||||||
ret.set_cookie('userLoggedIn', 'true')
|
ret.set_cookie(
|
||||||
|
'userLoggedIn', 'true', secure=getattr(settings, 'SESSION_COOKIE_SECURE', False), samesite=getattr(settings, 'USER_COOKIE_SAMESITE', 'Lax')
|
||||||
|
)
|
||||||
ret.setdefault('X-API-Session-Cookie-Name', getattr(settings, 'SESSION_COOKIE_NAME', 'awx_sessionid'))
|
ret.setdefault('X-API-Session-Cookie-Name', getattr(settings, 'SESSION_COOKIE_NAME', 'awx_sessionid'))
|
||||||
|
|
||||||
return ret
|
return ret
|
||||||
else:
|
else:
|
||||||
if 'username' in self.request.POST:
|
if 'username' in self.request.POST:
|
||||||
logger.warning(smart_str(u"Login failed for user {} from {}".format(self.request.POST.get('username'), request.META.get('REMOTE_ADDR', None))))
|
logger.warning(smart_str(u"Login failed for user {} from {}".format(self.request.POST.get('username'), ip)))
|
||||||
ret.status_code = 401
|
ret.status_code = 401
|
||||||
return ret
|
return ret
|
||||||
|
|
||||||
|
|
||||||
class LoggedLogoutView(auth_views.LogoutView):
|
class LoggedLogoutView(auth_views.LogoutView):
|
||||||
|
|
||||||
|
success_url_allowed_hosts = set(settings.LOGOUT_ALLOWED_HOSTS.split(",")) if settings.LOGOUT_ALLOWED_HOSTS else set()
|
||||||
|
|
||||||
def dispatch(self, request, *args, **kwargs):
|
def dispatch(self, request, *args, **kwargs):
|
||||||
original_user = getattr(request, 'user', None)
|
original_user = getattr(request, 'user', None)
|
||||||
ret = super(LoggedLogoutView, self).dispatch(request, *args, **kwargs)
|
ret = super(LoggedLogoutView, self).dispatch(request, *args, **kwargs)
|
||||||
current_user = getattr(request, 'user', None)
|
current_user = getattr(request, 'user', None)
|
||||||
ret.set_cookie('userLoggedIn', 'false')
|
ret.set_cookie('userLoggedIn', 'false', secure=getattr(settings, 'SESSION_COOKIE_SECURE', False))
|
||||||
if (not current_user or not getattr(current_user, 'pk', True)) and current_user != original_user:
|
if (not current_user or not getattr(current_user, 'pk', True)) and current_user != original_user:
|
||||||
logger.info("User {} logged out.".format(original_user.username))
|
logger.info("User {} logged out.".format(original_user.username))
|
||||||
return ret
|
return ret
|
||||||
@@ -141,22 +155,23 @@ class APIView(views.APIView):
|
|||||||
Store the Django REST Framework Request object as an attribute on the
|
Store the Django REST Framework Request object as an attribute on the
|
||||||
normal Django request, store time the request started.
|
normal Django request, store time the request started.
|
||||||
"""
|
"""
|
||||||
|
remote_headers = ['REMOTE_ADDR', 'REMOTE_HOST']
|
||||||
|
|
||||||
self.time_started = time.time()
|
self.time_started = time.time()
|
||||||
if getattr(settings, 'SQL_DEBUG', False):
|
if getattr(settings, 'SQL_DEBUG', False):
|
||||||
self.queries_before = len(connection.queries)
|
self.queries_before = len(connection.queries)
|
||||||
|
|
||||||
|
if 'HTTP_X_TRUSTED_PROXY' in request.environ:
|
||||||
|
if validate_x_trusted_proxy_header(request.environ['HTTP_X_TRUSTED_PROXY']):
|
||||||
|
remote_headers = settings.REMOTE_HOST_HEADERS
|
||||||
|
else:
|
||||||
|
logger.warning("Request appeared to be a trusted upstream proxy but failed to provide a matching shared secret.")
|
||||||
|
|
||||||
# If there are any custom headers in REMOTE_HOST_HEADERS, make sure
|
# If there are any custom headers in REMOTE_HOST_HEADERS, make sure
|
||||||
# they respect the allowed proxy list
|
# they respect the allowed proxy list
|
||||||
if all(
|
if settings.PROXY_IP_ALLOWED_LIST:
|
||||||
[
|
if not is_proxy_in_headers(self.request, settings.PROXY_IP_ALLOWED_LIST, remote_headers):
|
||||||
settings.PROXY_IP_ALLOWED_LIST,
|
delete_headers_starting_with_http(request, settings.REMOTE_HOST_HEADERS)
|
||||||
request.environ.get('REMOTE_ADDR') not in settings.PROXY_IP_ALLOWED_LIST,
|
|
||||||
request.environ.get('REMOTE_HOST') not in settings.PROXY_IP_ALLOWED_LIST,
|
|
||||||
]
|
|
||||||
):
|
|
||||||
for custom_header in settings.REMOTE_HOST_HEADERS:
|
|
||||||
if custom_header.startswith('HTTP_'):
|
|
||||||
request.environ.pop(custom_header, None)
|
|
||||||
|
|
||||||
drf_request = super(APIView, self).initialize_request(request, *args, **kwargs)
|
drf_request = super(APIView, self).initialize_request(request, *args, **kwargs)
|
||||||
request.drf_request = drf_request
|
request.drf_request = drf_request
|
||||||
@@ -201,17 +216,21 @@ class APIView(views.APIView):
|
|||||||
return response
|
return response
|
||||||
|
|
||||||
if response.status_code >= 400:
|
if response.status_code >= 400:
|
||||||
|
ip = get_remote_host(request) # request.META.get('REMOTE_ADDR', None)
|
||||||
msg_data = {
|
msg_data = {
|
||||||
'status_code': response.status_code,
|
'status_code': response.status_code,
|
||||||
'user_name': request.user,
|
'user_name': request.user,
|
||||||
'url_path': request.path,
|
'url_path': request.path,
|
||||||
'remote_addr': request.META.get('REMOTE_ADDR', None),
|
'remote_addr': ip,
|
||||||
}
|
}
|
||||||
|
|
||||||
if type(response.data) is dict:
|
if type(response.data) is dict:
|
||||||
msg_data['error'] = response.data.get('error', response.status_text)
|
msg_data['error'] = response.data.get('error', response.status_text)
|
||||||
elif type(response.data) is list:
|
elif type(response.data) is list:
|
||||||
msg_data['error'] = ", ".join(list(map(lambda x: x.get('error', response.status_text), response.data)))
|
if len(response.data) > 0 and isinstance(response.data[0], str):
|
||||||
|
msg_data['error'] = str(response.data[0])
|
||||||
|
else:
|
||||||
|
msg_data['error'] = ", ".join(list(map(lambda x: x.get('error', response.status_text), response.data)))
|
||||||
else:
|
else:
|
||||||
msg_data['error'] = response.status_text
|
msg_data['error'] = response.status_text
|
||||||
|
|
||||||
@@ -471,7 +490,11 @@ class ListAPIView(generics.ListAPIView, GenericAPIView):
|
|||||||
|
|
||||||
class ListCreateAPIView(ListAPIView, generics.ListCreateAPIView):
|
class ListCreateAPIView(ListAPIView, generics.ListCreateAPIView):
|
||||||
# Base class for a list view that allows creating new objects.
|
# Base class for a list view that allows creating new objects.
|
||||||
pass
|
def perform_create(self, serializer):
|
||||||
|
super().perform_create(serializer)
|
||||||
|
if serializer.Meta.model in permission_registry.all_registered_models:
|
||||||
|
if self.request and self.request.user:
|
||||||
|
give_creator_permissions(self.request.user, serializer.instance)
|
||||||
|
|
||||||
|
|
||||||
class ParentMixin(object):
|
class ParentMixin(object):
|
||||||
@@ -791,6 +814,7 @@ class RetrieveUpdateDestroyAPIView(RetrieveUpdateAPIView, DestroyAPIView):
|
|||||||
|
|
||||||
|
|
||||||
class ResourceAccessList(ParentMixin, ListAPIView):
|
class ResourceAccessList(ParentMixin, ListAPIView):
|
||||||
|
deprecated = True
|
||||||
serializer_class = ResourceAccessListElementSerializer
|
serializer_class = ResourceAccessListElementSerializer
|
||||||
ordering = ('username',)
|
ordering = ('username',)
|
||||||
|
|
||||||
@@ -798,6 +822,15 @@ class ResourceAccessList(ParentMixin, ListAPIView):
|
|||||||
obj = self.get_parent_object()
|
obj = self.get_parent_object()
|
||||||
|
|
||||||
content_type = ContentType.objects.get_for_model(obj)
|
content_type = ContentType.objects.get_for_model(obj)
|
||||||
|
|
||||||
|
if settings.ANSIBLE_BASE_ROLE_SYSTEM_ACTIVATED:
|
||||||
|
ancestors = set(RoleEvaluation.objects.filter(content_type_id=content_type.id, object_id=obj.id).values_list('role_id', flat=True))
|
||||||
|
qs = User.objects.filter(has_roles__in=ancestors) | User.objects.filter(is_superuser=True)
|
||||||
|
auditor_role = RoleDefinition.objects.filter(name="System Auditor").first()
|
||||||
|
if auditor_role:
|
||||||
|
qs |= User.objects.filter(role_assignments__role_definition=auditor_role)
|
||||||
|
return qs.distinct()
|
||||||
|
|
||||||
roles = set(Role.objects.filter(content_type=content_type, object_id=obj.id))
|
roles = set(Role.objects.filter(content_type=content_type, object_id=obj.id))
|
||||||
|
|
||||||
ancestors = set()
|
ancestors = set()
|
||||||
@@ -957,7 +990,7 @@ class CopyAPIView(GenericAPIView):
|
|||||||
None, None, self.model, obj, request.user, create_kwargs=create_kwargs, copy_name=serializer.validated_data.get('name', '')
|
None, None, self.model, obj, request.user, create_kwargs=create_kwargs, copy_name=serializer.validated_data.get('name', '')
|
||||||
)
|
)
|
||||||
if hasattr(new_obj, 'admin_role') and request.user not in new_obj.admin_role.members.all():
|
if hasattr(new_obj, 'admin_role') and request.user not in new_obj.admin_role.members.all():
|
||||||
new_obj.admin_role.members.add(request.user)
|
give_creator_permissions(request.user, new_obj)
|
||||||
if sub_objs:
|
if sub_objs:
|
||||||
permission_check_func = None
|
permission_check_func = None
|
||||||
if hasattr(type(self), 'deep_copy_permission_check_func'):
|
if hasattr(type(self), 'deep_copy_permission_check_func'):
|
||||||
|
|||||||
@@ -36,11 +36,13 @@ class Metadata(metadata.SimpleMetadata):
|
|||||||
field_info = OrderedDict()
|
field_info = OrderedDict()
|
||||||
field_info['type'] = self.label_lookup[field]
|
field_info['type'] = self.label_lookup[field]
|
||||||
field_info['required'] = getattr(field, 'required', False)
|
field_info['required'] = getattr(field, 'required', False)
|
||||||
|
field_info['hidden'] = getattr(field, 'hidden', False)
|
||||||
|
|
||||||
text_attrs = [
|
text_attrs = [
|
||||||
'read_only',
|
'read_only',
|
||||||
'label',
|
'label',
|
||||||
'help_text',
|
'help_text',
|
||||||
|
'warning_text',
|
||||||
'min_length',
|
'min_length',
|
||||||
'max_length',
|
'max_length',
|
||||||
'min_value',
|
'min_value',
|
||||||
@@ -101,7 +103,7 @@ class Metadata(metadata.SimpleMetadata):
|
|||||||
default = field.get_default()
|
default = field.get_default()
|
||||||
if type(default) is UUID:
|
if type(default) is UUID:
|
||||||
default = 'xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx'
|
default = 'xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx'
|
||||||
if field.field_name == 'TOWER_URL_BASE' and default == 'https://towerhost':
|
if field.field_name == 'TOWER_URL_BASE' and default == 'https://platformhost':
|
||||||
default = '{}://{}'.format(self.request.scheme, self.request.get_host())
|
default = '{}://{}'.format(self.request.scheme, self.request.get_host())
|
||||||
field_info['default'] = default
|
field_info['default'] = default
|
||||||
except serializers.SkipField:
|
except serializers.SkipField:
|
||||||
|
|||||||
@@ -6,7 +6,7 @@ import copy
|
|||||||
import json
|
import json
|
||||||
import logging
|
import logging
|
||||||
import re
|
import re
|
||||||
from collections import OrderedDict
|
from collections import Counter, OrderedDict
|
||||||
from datetime import timedelta
|
from datetime import timedelta
|
||||||
from uuid import uuid4
|
from uuid import uuid4
|
||||||
|
|
||||||
@@ -43,9 +43,14 @@ from rest_framework.utils.serializer_helpers import ReturnList
|
|||||||
# Django-Polymorphic
|
# Django-Polymorphic
|
||||||
from polymorphic.models import PolymorphicModel
|
from polymorphic.models import PolymorphicModel
|
||||||
|
|
||||||
|
# django-ansible-base
|
||||||
|
from ansible_base.lib.utils.models import get_type_for_model
|
||||||
|
from ansible_base.rbac.models import RoleEvaluation, ObjectRole
|
||||||
|
from ansible_base.rbac import permission_registry
|
||||||
|
|
||||||
# AWX
|
# AWX
|
||||||
from awx.main.access import get_user_capabilities
|
from awx.main.access import get_user_capabilities
|
||||||
from awx.main.constants import ACTIVE_STATES, CENSOR_VALUE
|
from awx.main.constants import ACTIVE_STATES, CENSOR_VALUE, org_role_to_permission
|
||||||
from awx.main.models import (
|
from awx.main.models import (
|
||||||
ActivityStream,
|
ActivityStream,
|
||||||
AdHocCommand,
|
AdHocCommand,
|
||||||
@@ -80,6 +85,7 @@ from awx.main.models import (
|
|||||||
Project,
|
Project,
|
||||||
ProjectUpdate,
|
ProjectUpdate,
|
||||||
ProjectUpdateEvent,
|
ProjectUpdateEvent,
|
||||||
|
ReceptorAddress,
|
||||||
RefreshToken,
|
RefreshToken,
|
||||||
Role,
|
Role,
|
||||||
Schedule,
|
Schedule,
|
||||||
@@ -99,10 +105,9 @@ from awx.main.models import (
|
|||||||
CLOUD_INVENTORY_SOURCES,
|
CLOUD_INVENTORY_SOURCES,
|
||||||
)
|
)
|
||||||
from awx.main.models.base import VERBOSITY_CHOICES, NEW_JOB_TYPE_CHOICES
|
from awx.main.models.base import VERBOSITY_CHOICES, NEW_JOB_TYPE_CHOICES
|
||||||
from awx.main.models.rbac import get_roles_on_resource, role_summary_fields_generator
|
from awx.main.models.rbac import role_summary_fields_generator, give_creator_permissions, get_role_codenames, to_permissions, get_role_from_object_role
|
||||||
from awx.main.fields import ImplicitRoleField
|
from awx.main.fields import ImplicitRoleField
|
||||||
from awx.main.utils import (
|
from awx.main.utils import (
|
||||||
get_type_for_model,
|
|
||||||
get_model_for_type,
|
get_model_for_type,
|
||||||
camelcase_to_underscore,
|
camelcase_to_underscore,
|
||||||
getattrd,
|
getattrd,
|
||||||
@@ -189,6 +194,7 @@ SUMMARIZABLE_FK_FIELDS = {
|
|||||||
'webhook_credential': DEFAULT_SUMMARY_FIELDS + ('kind', 'cloud', 'credential_type_id'),
|
'webhook_credential': DEFAULT_SUMMARY_FIELDS + ('kind', 'cloud', 'credential_type_id'),
|
||||||
'approved_or_denied_by': ('id', 'username', 'first_name', 'last_name'),
|
'approved_or_denied_by': ('id', 'username', 'first_name', 'last_name'),
|
||||||
'credential_type': DEFAULT_SUMMARY_FIELDS,
|
'credential_type': DEFAULT_SUMMARY_FIELDS,
|
||||||
|
'resource': ('ansible_id', 'resource_type'),
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
@@ -635,7 +641,7 @@ class BaseSerializer(serializers.ModelSerializer, metaclass=BaseSerializerMetacl
|
|||||||
exclusions = self.get_validation_exclusions(self.instance)
|
exclusions = self.get_validation_exclusions(self.instance)
|
||||||
obj = self.instance or self.Meta.model()
|
obj = self.instance or self.Meta.model()
|
||||||
for k, v in attrs.items():
|
for k, v in attrs.items():
|
||||||
if k not in exclusions:
|
if k not in exclusions and k != 'canonical_address_port':
|
||||||
setattr(obj, k, v)
|
setattr(obj, k, v)
|
||||||
obj.full_clean(exclude=exclusions)
|
obj.full_clean(exclude=exclusions)
|
||||||
# full_clean may modify values on the instance; copy those changes
|
# full_clean may modify values on the instance; copy those changes
|
||||||
@@ -2201,6 +2207,99 @@ class BulkHostCreateSerializer(serializers.Serializer):
|
|||||||
return return_data
|
return return_data
|
||||||
|
|
||||||
|
|
||||||
|
class BulkHostDeleteSerializer(serializers.Serializer):
|
||||||
|
hosts = serializers.ListField(
|
||||||
|
allow_empty=False,
|
||||||
|
max_length=100000,
|
||||||
|
write_only=True,
|
||||||
|
help_text=_('List of hosts ids to be deleted, e.g. [105, 130, 131, 200]'),
|
||||||
|
)
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
model = Host
|
||||||
|
fields = ('hosts',)
|
||||||
|
|
||||||
|
def validate(self, attrs):
|
||||||
|
request = self.context.get('request', None)
|
||||||
|
max_hosts = settings.BULK_HOST_MAX_DELETE
|
||||||
|
# Validating the number of hosts to be deleted
|
||||||
|
if len(attrs['hosts']) > max_hosts:
|
||||||
|
raise serializers.ValidationError(
|
||||||
|
{
|
||||||
|
"ERROR": 'Number of hosts exceeds system setting BULK_HOST_MAX_DELETE',
|
||||||
|
"BULK_HOST_MAX_DELETE": max_hosts,
|
||||||
|
"Hosts_count": len(attrs['hosts']),
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
# Getting list of all host objects, filtered by the list of the hosts to delete
|
||||||
|
attrs['host_qs'] = Host.objects.get_queryset().filter(pk__in=attrs['hosts']).only('id', 'inventory_id', 'name')
|
||||||
|
|
||||||
|
# Converting the queryset data in a dict. to reduce the number of queries when
|
||||||
|
# manipulating the data
|
||||||
|
attrs['hosts_data'] = attrs['host_qs'].values()
|
||||||
|
|
||||||
|
if len(attrs['host_qs']) == 0:
|
||||||
|
error_hosts = {host: "Hosts do not exist or you lack permission to delete it" for host in attrs['hosts']}
|
||||||
|
raise serializers.ValidationError({'hosts': error_hosts})
|
||||||
|
|
||||||
|
if len(attrs['host_qs']) < len(attrs['hosts']):
|
||||||
|
hosts_exists = [host['id'] for host in attrs['hosts_data']]
|
||||||
|
failed_hosts = list(set(attrs['hosts']).difference(hosts_exists))
|
||||||
|
error_hosts = {host: "Hosts do not exist or you lack permission to delete it" for host in failed_hosts}
|
||||||
|
raise serializers.ValidationError({'hosts': error_hosts})
|
||||||
|
|
||||||
|
# Getting all inventories that the hosts can be in
|
||||||
|
inv_list = list(set([host['inventory_id'] for host in attrs['hosts_data']]))
|
||||||
|
|
||||||
|
# Checking that the user have permission to all inventories
|
||||||
|
errors = dict()
|
||||||
|
for inv in Inventory.objects.get_queryset().filter(pk__in=inv_list):
|
||||||
|
if request and not request.user.is_superuser:
|
||||||
|
if request.user not in inv.admin_role:
|
||||||
|
errors[inv.name] = "Lack permissions to delete hosts from this inventory."
|
||||||
|
if errors != {}:
|
||||||
|
raise PermissionDenied({"inventories": errors})
|
||||||
|
|
||||||
|
# check the inventory type only if the user have permission to it.
|
||||||
|
errors = dict()
|
||||||
|
for inv in Inventory.objects.get_queryset().filter(pk__in=inv_list):
|
||||||
|
if inv.kind != '':
|
||||||
|
errors[inv.name] = "Hosts can only be deleted from manual inventories."
|
||||||
|
if errors != {}:
|
||||||
|
raise serializers.ValidationError({"inventories": errors})
|
||||||
|
attrs['inventories'] = inv_list
|
||||||
|
return attrs
|
||||||
|
|
||||||
|
def delete(self, validated_data):
|
||||||
|
result = {"hosts": dict()}
|
||||||
|
changes = {'deleted_hosts': dict()}
|
||||||
|
for inventory in validated_data['inventories']:
|
||||||
|
changes['deleted_hosts'][inventory] = list()
|
||||||
|
|
||||||
|
for host in validated_data['hosts_data']:
|
||||||
|
result["hosts"][host["id"]] = f"The host {host['name']} was deleted"
|
||||||
|
changes['deleted_hosts'][host["inventory_id"]].append({"host_id": host["id"], "host_name": host["name"]})
|
||||||
|
|
||||||
|
try:
|
||||||
|
validated_data['host_qs'].delete()
|
||||||
|
except Exception as e:
|
||||||
|
raise serializers.ValidationError({"detail": _(f"cannot delete hosts, host deletion error {e}")})
|
||||||
|
|
||||||
|
request = self.context.get('request', None)
|
||||||
|
|
||||||
|
for inventory in validated_data['inventories']:
|
||||||
|
activity_entry = ActivityStream.objects.create(
|
||||||
|
operation='update',
|
||||||
|
object1='inventory',
|
||||||
|
changes=json.dumps(changes['deleted_hosts'][inventory]),
|
||||||
|
actor=request.user,
|
||||||
|
)
|
||||||
|
activity_entry.inventory.add(inventory)
|
||||||
|
|
||||||
|
return result
|
||||||
|
|
||||||
|
|
||||||
class GroupTreeSerializer(GroupSerializer):
|
class GroupTreeSerializer(GroupSerializer):
|
||||||
children = serializers.SerializerMethodField()
|
children = serializers.SerializerMethodField()
|
||||||
|
|
||||||
@@ -2664,6 +2763,30 @@ class ResourceAccessListElementSerializer(UserSerializer):
|
|||||||
if 'summary_fields' not in ret:
|
if 'summary_fields' not in ret:
|
||||||
ret['summary_fields'] = {}
|
ret['summary_fields'] = {}
|
||||||
|
|
||||||
|
team_content_type = ContentType.objects.get_for_model(Team)
|
||||||
|
content_type = ContentType.objects.get_for_model(obj)
|
||||||
|
|
||||||
|
reversed_org_map = {}
|
||||||
|
for k, v in org_role_to_permission.items():
|
||||||
|
reversed_org_map[v] = k
|
||||||
|
reversed_role_map = {}
|
||||||
|
for k, v in to_permissions.items():
|
||||||
|
reversed_role_map[v] = k
|
||||||
|
|
||||||
|
def get_roles_from_perms(perm_list):
|
||||||
|
"""given a list of permission codenames return a list of role names"""
|
||||||
|
role_names = set()
|
||||||
|
for codename in perm_list:
|
||||||
|
action = codename.split('_', 1)[0]
|
||||||
|
if action in reversed_role_map:
|
||||||
|
role_names.add(reversed_role_map[action])
|
||||||
|
elif codename in reversed_org_map:
|
||||||
|
if isinstance(obj, Organization):
|
||||||
|
role_names.add(reversed_org_map[codename])
|
||||||
|
if 'view_organization' not in role_names:
|
||||||
|
role_names.add('read_role')
|
||||||
|
return list(role_names)
|
||||||
|
|
||||||
def format_role_perm(role):
|
def format_role_perm(role):
|
||||||
role_dict = {'id': role.id, 'name': role.name, 'description': role.description}
|
role_dict = {'id': role.id, 'name': role.name, 'description': role.description}
|
||||||
try:
|
try:
|
||||||
@@ -2679,13 +2802,21 @@ class ResourceAccessListElementSerializer(UserSerializer):
|
|||||||
else:
|
else:
|
||||||
# Singleton roles should not be managed from this view, as per copy/edit rework spec
|
# Singleton roles should not be managed from this view, as per copy/edit rework spec
|
||||||
role_dict['user_capabilities'] = {'unattach': False}
|
role_dict['user_capabilities'] = {'unattach': False}
|
||||||
return {'role': role_dict, 'descendant_roles': get_roles_on_resource(obj, role)}
|
|
||||||
|
model_name = content_type.model
|
||||||
|
if isinstance(obj, Organization):
|
||||||
|
descendant_perms = [codename for codename in get_role_codenames(role) if codename.endswith(model_name) or codename.startswith('add_')]
|
||||||
|
else:
|
||||||
|
descendant_perms = [codename for codename in get_role_codenames(role) if codename.endswith(model_name)]
|
||||||
|
|
||||||
|
return {'role': role_dict, 'descendant_roles': get_roles_from_perms(descendant_perms)}
|
||||||
|
|
||||||
def format_team_role_perm(naive_team_role, permissive_role_ids):
|
def format_team_role_perm(naive_team_role, permissive_role_ids):
|
||||||
ret = []
|
ret = []
|
||||||
|
team = naive_team_role.content_object
|
||||||
team_role = naive_team_role
|
team_role = naive_team_role
|
||||||
if naive_team_role.role_field == 'admin_role':
|
if naive_team_role.role_field == 'admin_role':
|
||||||
team_role = naive_team_role.content_object.member_role
|
team_role = team.member_role
|
||||||
for role in team_role.children.filter(id__in=permissive_role_ids).all():
|
for role in team_role.children.filter(id__in=permissive_role_ids).all():
|
||||||
role_dict = {
|
role_dict = {
|
||||||
'id': role.id,
|
'id': role.id,
|
||||||
@@ -2705,13 +2836,87 @@ class ResourceAccessListElementSerializer(UserSerializer):
|
|||||||
else:
|
else:
|
||||||
# Singleton roles should not be managed from this view, as per copy/edit rework spec
|
# Singleton roles should not be managed from this view, as per copy/edit rework spec
|
||||||
role_dict['user_capabilities'] = {'unattach': False}
|
role_dict['user_capabilities'] = {'unattach': False}
|
||||||
ret.append({'role': role_dict, 'descendant_roles': get_roles_on_resource(obj, team_role)})
|
|
||||||
|
descendant_perms = list(
|
||||||
|
RoleEvaluation.objects.filter(role__in=team.has_roles.all(), object_id=obj.id, content_type_id=content_type.id)
|
||||||
|
.values_list('codename', flat=True)
|
||||||
|
.distinct()
|
||||||
|
)
|
||||||
|
|
||||||
|
ret.append({'role': role_dict, 'descendant_roles': get_roles_from_perms(descendant_perms)})
|
||||||
return ret
|
return ret
|
||||||
|
|
||||||
team_content_type = ContentType.objects.get_for_model(Team)
|
gfk_kwargs = dict(content_type_id=content_type.id, object_id=obj.id)
|
||||||
content_type = ContentType.objects.get_for_model(obj)
|
direct_permissive_role_ids = Role.objects.filter(**gfk_kwargs).values_list('id', flat=True)
|
||||||
|
|
||||||
|
if settings.ANSIBLE_BASE_ROLE_SYSTEM_ACTIVATED:
|
||||||
|
ret['summary_fields']['direct_access'] = []
|
||||||
|
ret['summary_fields']['indirect_access'] = []
|
||||||
|
|
||||||
|
new_roles_seen = set()
|
||||||
|
all_team_roles = set()
|
||||||
|
all_permissive_role_ids = set()
|
||||||
|
for evaluation in RoleEvaluation.objects.filter(role__in=user.has_roles.all(), **gfk_kwargs).prefetch_related('role'):
|
||||||
|
new_role = evaluation.role
|
||||||
|
if new_role.id in new_roles_seen:
|
||||||
|
continue
|
||||||
|
new_roles_seen.add(new_role.id)
|
||||||
|
old_role = get_role_from_object_role(new_role)
|
||||||
|
all_permissive_role_ids.add(old_role.id)
|
||||||
|
|
||||||
|
if int(new_role.object_id) == obj.id and new_role.content_type_id == content_type.id:
|
||||||
|
ret['summary_fields']['direct_access'].append(format_role_perm(old_role))
|
||||||
|
elif new_role.content_type_id == team_content_type.id:
|
||||||
|
all_team_roles.add(old_role)
|
||||||
|
else:
|
||||||
|
ret['summary_fields']['indirect_access'].append(format_role_perm(old_role))
|
||||||
|
|
||||||
|
# Lazy role creation gives us a big problem, where some intermediate roles are not easy to find
|
||||||
|
# like when a team has indirect permission, so here we get all roles the users teams have
|
||||||
|
# these contribute to all potential permission-granting roles of the object
|
||||||
|
user_teams_qs = permission_registry.team_model.objects.filter(member_roles__in=ObjectRole.objects.filter(users=user))
|
||||||
|
team_obj_roles = ObjectRole.objects.filter(teams__in=user_teams_qs)
|
||||||
|
for evaluation in RoleEvaluation.objects.filter(role__in=team_obj_roles, **gfk_kwargs).prefetch_related('role'):
|
||||||
|
new_role = evaluation.role
|
||||||
|
if new_role.id in new_roles_seen:
|
||||||
|
continue
|
||||||
|
new_roles_seen.add(new_role.id)
|
||||||
|
old_role = get_role_from_object_role(new_role)
|
||||||
|
all_permissive_role_ids.add(old_role.id)
|
||||||
|
|
||||||
|
# In DAB RBAC, superuser is strictly a user flag, and global roles are not in the RoleEvaluation table
|
||||||
|
if user.is_superuser:
|
||||||
|
ret['summary_fields'].setdefault('indirect_access', [])
|
||||||
|
all_role_names = [field.name for field in obj._meta.get_fields() if isinstance(field, ImplicitRoleField)]
|
||||||
|
ret['summary_fields']['indirect_access'].append(
|
||||||
|
{
|
||||||
|
"role": {
|
||||||
|
"id": None,
|
||||||
|
"name": _("System Administrator"),
|
||||||
|
"description": _("Can manage all aspects of the system"),
|
||||||
|
"user_capabilities": {"unattach": False},
|
||||||
|
},
|
||||||
|
"descendant_roles": all_role_names,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
elif user.is_system_auditor:
|
||||||
|
ret['summary_fields'].setdefault('indirect_access', [])
|
||||||
|
ret['summary_fields']['indirect_access'].append(
|
||||||
|
{
|
||||||
|
"role": {
|
||||||
|
"id": None,
|
||||||
|
"name": _("System Auditor"),
|
||||||
|
"description": _("Can view all aspects of the system"),
|
||||||
|
"user_capabilities": {"unattach": False},
|
||||||
|
},
|
||||||
|
"descendant_roles": ["read_role"],
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
ret['summary_fields']['direct_access'].extend([y for x in (format_team_role_perm(r, all_permissive_role_ids) for r in all_team_roles) for y in x])
|
||||||
|
|
||||||
|
return ret
|
||||||
|
|
||||||
direct_permissive_role_ids = Role.objects.filter(content_type=content_type, object_id=obj.id).values_list('id', flat=True)
|
|
||||||
all_permissive_role_ids = Role.objects.filter(content_type=content_type, object_id=obj.id).values_list('ancestors__id', flat=True)
|
all_permissive_role_ids = Role.objects.filter(content_type=content_type, object_id=obj.id).values_list('ancestors__id', flat=True)
|
||||||
|
|
||||||
direct_access_roles = user.roles.filter(id__in=direct_permissive_role_ids).all()
|
direct_access_roles = user.roles.filter(id__in=direct_permissive_role_ids).all()
|
||||||
@@ -2980,7 +3185,7 @@ class CredentialSerializerCreate(CredentialSerializer):
|
|||||||
credential = super(CredentialSerializerCreate, self).create(validated_data)
|
credential = super(CredentialSerializerCreate, self).create(validated_data)
|
||||||
|
|
||||||
if user:
|
if user:
|
||||||
credential.admin_role.members.add(user)
|
give_creator_permissions(user, credential)
|
||||||
if team:
|
if team:
|
||||||
if not credential.organization or team.organization.id != credential.organization.id:
|
if not credential.organization or team.organization.id != credential.organization.id:
|
||||||
raise serializers.ValidationError({"detail": _("Credential organization must be set and match before assigning to a team")})
|
raise serializers.ValidationError({"detail": _("Credential organization must be set and match before assigning to a team")})
|
||||||
@@ -5074,16 +5279,21 @@ class NotificationTemplateSerializer(BaseSerializer):
|
|||||||
body = messages[event].get('body', {})
|
body = messages[event].get('body', {})
|
||||||
if body:
|
if body:
|
||||||
try:
|
try:
|
||||||
rendered_body = (
|
sandbox.ImmutableSandboxedEnvironment(undefined=DescriptiveUndefined).from_string(body).render(JobNotificationMixin.context_stub())
|
||||||
sandbox.ImmutableSandboxedEnvironment(undefined=DescriptiveUndefined).from_string(body).render(JobNotificationMixin.context_stub())
|
|
||||||
)
|
# https://github.com/ansible/awx/issues/14410
|
||||||
potential_body = json.loads(rendered_body)
|
|
||||||
if not isinstance(potential_body, dict):
|
# When rendering something such as "{{ job.id }}"
|
||||||
error_list.append(
|
# the return type is not a dict, unlike "{{ job_metadata }}" which is a dict
|
||||||
_("Webhook body for '{}' should be a json dictionary. Found type '{}'.".format(event, type(potential_body).__name__))
|
|
||||||
)
|
# potential_body = json.loads(rendered_body)
|
||||||
except json.JSONDecodeError as exc:
|
|
||||||
error_list.append(_("Webhook body for '{}' is not a valid json dictionary ({}).".format(event, exc)))
|
# if not isinstance(potential_body, dict):
|
||||||
|
# error_list.append(
|
||||||
|
# _("Webhook body for '{}' should be a json dictionary. Found type '{}'.".format(event, type(potential_body).__name__))
|
||||||
|
# )
|
||||||
|
except Exception as exc:
|
||||||
|
error_list.append(_("Webhook body for '{}' is not valid. The following gave an error ({}).".format(event, exc)))
|
||||||
|
|
||||||
if error_list:
|
if error_list:
|
||||||
raise serializers.ValidationError(error_list)
|
raise serializers.ValidationError(error_list)
|
||||||
@@ -5171,7 +5381,7 @@ class NotificationSerializer(BaseSerializer):
|
|||||||
)
|
)
|
||||||
|
|
||||||
def get_body(self, obj):
|
def get_body(self, obj):
|
||||||
if obj.notification_type in ('webhook', 'pagerduty'):
|
if obj.notification_type in ('webhook', 'pagerduty', 'awssns'):
|
||||||
if isinstance(obj.body, dict):
|
if isinstance(obj.body, dict):
|
||||||
if 'body' in obj.body:
|
if 'body' in obj.body:
|
||||||
return obj.body['body']
|
return obj.body['body']
|
||||||
@@ -5193,9 +5403,9 @@ class NotificationSerializer(BaseSerializer):
|
|||||||
def to_representation(self, obj):
|
def to_representation(self, obj):
|
||||||
ret = super(NotificationSerializer, self).to_representation(obj)
|
ret = super(NotificationSerializer, self).to_representation(obj)
|
||||||
|
|
||||||
if obj.notification_type == 'webhook':
|
if obj.notification_type in ('webhook', 'awssns'):
|
||||||
ret.pop('subject')
|
ret.pop('subject')
|
||||||
if obj.notification_type not in ('email', 'webhook', 'pagerduty'):
|
if obj.notification_type not in ('email', 'webhook', 'pagerduty', 'awssns'):
|
||||||
ret.pop('body')
|
ret.pop('body')
|
||||||
return ret
|
return ret
|
||||||
|
|
||||||
@@ -5356,17 +5566,25 @@ class ScheduleSerializer(LaunchConfigurationBaseSerializer, SchedulePreviewSeria
|
|||||||
class InstanceLinkSerializer(BaseSerializer):
|
class InstanceLinkSerializer(BaseSerializer):
|
||||||
class Meta:
|
class Meta:
|
||||||
model = InstanceLink
|
model = InstanceLink
|
||||||
fields = ('id', 'url', 'related', 'source', 'target', 'link_state')
|
fields = ('id', 'related', 'source', 'target', 'target_full_address', 'link_state')
|
||||||
|
|
||||||
source = serializers.SlugRelatedField(slug_field="hostname", queryset=Instance.objects.all())
|
source = serializers.SlugRelatedField(slug_field="hostname", queryset=Instance.objects.all())
|
||||||
target = serializers.SlugRelatedField(slug_field="hostname", queryset=Instance.objects.all())
|
|
||||||
|
target = serializers.SerializerMethodField()
|
||||||
|
target_full_address = serializers.SerializerMethodField()
|
||||||
|
|
||||||
def get_related(self, obj):
|
def get_related(self, obj):
|
||||||
res = super(InstanceLinkSerializer, self).get_related(obj)
|
res = super(InstanceLinkSerializer, self).get_related(obj)
|
||||||
res['source_instance'] = self.reverse('api:instance_detail', kwargs={'pk': obj.source.id})
|
res['source_instance'] = self.reverse('api:instance_detail', kwargs={'pk': obj.source.id})
|
||||||
res['target_instance'] = self.reverse('api:instance_detail', kwargs={'pk': obj.target.id})
|
res['target_address'] = self.reverse('api:receptor_address_detail', kwargs={'pk': obj.target.id})
|
||||||
return res
|
return res
|
||||||
|
|
||||||
|
def get_target(self, obj):
|
||||||
|
return obj.target.instance.hostname
|
||||||
|
|
||||||
|
def get_target_full_address(self, obj):
|
||||||
|
return obj.target.get_full_address()
|
||||||
|
|
||||||
|
|
||||||
class InstanceNodeSerializer(BaseSerializer):
|
class InstanceNodeSerializer(BaseSerializer):
|
||||||
class Meta:
|
class Meta:
|
||||||
@@ -5374,6 +5592,29 @@ class InstanceNodeSerializer(BaseSerializer):
|
|||||||
fields = ('id', 'hostname', 'node_type', 'node_state', 'enabled')
|
fields = ('id', 'hostname', 'node_type', 'node_state', 'enabled')
|
||||||
|
|
||||||
|
|
||||||
|
class ReceptorAddressSerializer(BaseSerializer):
|
||||||
|
full_address = serializers.SerializerMethodField()
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
model = ReceptorAddress
|
||||||
|
fields = (
|
||||||
|
'id',
|
||||||
|
'url',
|
||||||
|
'address',
|
||||||
|
'port',
|
||||||
|
'protocol',
|
||||||
|
'websocket_path',
|
||||||
|
'is_internal',
|
||||||
|
'canonical',
|
||||||
|
'instance',
|
||||||
|
'peers_from_control_nodes',
|
||||||
|
'full_address',
|
||||||
|
)
|
||||||
|
|
||||||
|
def get_full_address(self, obj):
|
||||||
|
return obj.get_full_address()
|
||||||
|
|
||||||
|
|
||||||
class InstanceSerializer(BaseSerializer):
|
class InstanceSerializer(BaseSerializer):
|
||||||
show_capabilities = ['edit']
|
show_capabilities = ['edit']
|
||||||
|
|
||||||
@@ -5382,11 +5623,17 @@ class InstanceSerializer(BaseSerializer):
|
|||||||
jobs_running = serializers.IntegerField(help_text=_('Count of jobs in the running or waiting state that are targeted for this instance'), read_only=True)
|
jobs_running = serializers.IntegerField(help_text=_('Count of jobs in the running or waiting state that are targeted for this instance'), read_only=True)
|
||||||
jobs_total = serializers.IntegerField(help_text=_('Count of all jobs that target this instance'), read_only=True)
|
jobs_total = serializers.IntegerField(help_text=_('Count of all jobs that target this instance'), read_only=True)
|
||||||
health_check_pending = serializers.SerializerMethodField()
|
health_check_pending = serializers.SerializerMethodField()
|
||||||
peers = serializers.SlugRelatedField(many=True, required=False, slug_field="hostname", queryset=Instance.objects.all())
|
peers = serializers.PrimaryKeyRelatedField(
|
||||||
|
help_text=_('Primary keys of receptor addresses to peer to.'), many=True, required=False, queryset=ReceptorAddress.objects.all()
|
||||||
|
)
|
||||||
|
reverse_peers = serializers.SerializerMethodField()
|
||||||
|
listener_port = serializers.IntegerField(source='canonical_address_port', required=False, allow_null=True)
|
||||||
|
peers_from_control_nodes = serializers.BooleanField(source='canonical_address_peers_from_control_nodes', required=False)
|
||||||
|
protocol = serializers.SerializerMethodField()
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
model = Instance
|
model = Instance
|
||||||
read_only_fields = ('ip_address', 'uuid', 'version')
|
read_only_fields = ('ip_address', 'uuid', 'version', 'managed', 'reverse_peers')
|
||||||
fields = (
|
fields = (
|
||||||
'id',
|
'id',
|
||||||
'hostname',
|
'hostname',
|
||||||
@@ -5417,10 +5664,13 @@ class InstanceSerializer(BaseSerializer):
|
|||||||
'managed_by_policy',
|
'managed_by_policy',
|
||||||
'node_type',
|
'node_type',
|
||||||
'node_state',
|
'node_state',
|
||||||
|
'managed',
|
||||||
'ip_address',
|
'ip_address',
|
||||||
'listener_port',
|
|
||||||
'peers',
|
'peers',
|
||||||
|
'reverse_peers',
|
||||||
|
'listener_port',
|
||||||
'peers_from_control_nodes',
|
'peers_from_control_nodes',
|
||||||
|
'protocol',
|
||||||
)
|
)
|
||||||
extra_kwargs = {
|
extra_kwargs = {
|
||||||
'node_type': {'initial': Instance.Types.EXECUTION, 'default': Instance.Types.EXECUTION},
|
'node_type': {'initial': Instance.Types.EXECUTION, 'default': Instance.Types.EXECUTION},
|
||||||
@@ -5442,16 +5692,54 @@ class InstanceSerializer(BaseSerializer):
|
|||||||
|
|
||||||
def get_related(self, obj):
|
def get_related(self, obj):
|
||||||
res = super(InstanceSerializer, self).get_related(obj)
|
res = super(InstanceSerializer, self).get_related(obj)
|
||||||
|
res['receptor_addresses'] = self.reverse('api:instance_receptor_addresses_list', kwargs={'pk': obj.pk})
|
||||||
res['jobs'] = self.reverse('api:instance_unified_jobs_list', kwargs={'pk': obj.pk})
|
res['jobs'] = self.reverse('api:instance_unified_jobs_list', kwargs={'pk': obj.pk})
|
||||||
res['instance_groups'] = self.reverse('api:instance_instance_groups_list', kwargs={'pk': obj.pk})
|
|
||||||
if obj.node_type in [Instance.Types.EXECUTION, Instance.Types.HOP]:
|
|
||||||
res['install_bundle'] = self.reverse('api:instance_install_bundle', kwargs={'pk': obj.pk})
|
|
||||||
res['peers'] = self.reverse('api:instance_peers_list', kwargs={"pk": obj.pk})
|
res['peers'] = self.reverse('api:instance_peers_list', kwargs={"pk": obj.pk})
|
||||||
|
res['instance_groups'] = self.reverse('api:instance_instance_groups_list', kwargs={'pk': obj.pk})
|
||||||
|
if obj.node_type in [Instance.Types.EXECUTION, Instance.Types.HOP] and not obj.managed:
|
||||||
|
res['install_bundle'] = self.reverse('api:instance_install_bundle', kwargs={'pk': obj.pk})
|
||||||
if self.context['request'].user.is_superuser or self.context['request'].user.is_system_auditor:
|
if self.context['request'].user.is_superuser or self.context['request'].user.is_system_auditor:
|
||||||
if obj.node_type == 'execution':
|
if obj.node_type == 'execution':
|
||||||
res['health_check'] = self.reverse('api:instance_health_check', kwargs={'pk': obj.pk})
|
res['health_check'] = self.reverse('api:instance_health_check', kwargs={'pk': obj.pk})
|
||||||
return res
|
return res
|
||||||
|
|
||||||
|
def create_or_update(self, validated_data, obj=None, create=True):
|
||||||
|
# create a managed receptor address if listener port is defined
|
||||||
|
port = validated_data.pop('listener_port', -1)
|
||||||
|
peers_from_control_nodes = validated_data.pop('peers_from_control_nodes', -1)
|
||||||
|
|
||||||
|
# delete the receptor address if the port is explicitly set to None
|
||||||
|
if obj and port == None:
|
||||||
|
obj.receptor_addresses.filter(address=obj.hostname).delete()
|
||||||
|
|
||||||
|
if create:
|
||||||
|
instance = super(InstanceSerializer, self).create(validated_data)
|
||||||
|
else:
|
||||||
|
instance = super(InstanceSerializer, self).update(obj, validated_data)
|
||||||
|
instance.refresh_from_db() # instance canonical address lookup is deferred, so needs to be reloaded
|
||||||
|
|
||||||
|
# only create or update if port is defined in validated_data or already exists in the
|
||||||
|
# canonical address
|
||||||
|
# this prevents creating a receptor address if peers_from_control_nodes is in
|
||||||
|
# validated_data but a port is not set
|
||||||
|
if (port != None and port != -1) or instance.canonical_address_port:
|
||||||
|
kwargs = {}
|
||||||
|
if port != -1:
|
||||||
|
kwargs['port'] = port
|
||||||
|
if peers_from_control_nodes != -1:
|
||||||
|
kwargs['peers_from_control_nodes'] = peers_from_control_nodes
|
||||||
|
if kwargs:
|
||||||
|
kwargs['canonical'] = True
|
||||||
|
instance.receptor_addresses.update_or_create(address=instance.hostname, defaults=kwargs)
|
||||||
|
|
||||||
|
return instance
|
||||||
|
|
||||||
|
def create(self, validated_data):
|
||||||
|
return self.create_or_update(validated_data, create=True)
|
||||||
|
|
||||||
|
def update(self, obj, validated_data):
|
||||||
|
return self.create_or_update(validated_data, obj, create=False)
|
||||||
|
|
||||||
def get_summary_fields(self, obj):
|
def get_summary_fields(self, obj):
|
||||||
summary = super().get_summary_fields(obj)
|
summary = super().get_summary_fields(obj)
|
||||||
|
|
||||||
@@ -5461,6 +5749,16 @@ class InstanceSerializer(BaseSerializer):
|
|||||||
|
|
||||||
return summary
|
return summary
|
||||||
|
|
||||||
|
def get_reverse_peers(self, obj):
|
||||||
|
return Instance.objects.prefetch_related('peers').filter(peers__in=obj.receptor_addresses.all()).values_list('id', flat=True)
|
||||||
|
|
||||||
|
def get_protocol(self, obj):
|
||||||
|
# note: don't create a different query for receptor addresses, as this is prefetched on the View for optimization
|
||||||
|
for addr in obj.receptor_addresses.all():
|
||||||
|
if addr.canonical:
|
||||||
|
return addr.protocol
|
||||||
|
return ""
|
||||||
|
|
||||||
def get_consumed_capacity(self, obj):
|
def get_consumed_capacity(self, obj):
|
||||||
return obj.consumed_capacity
|
return obj.consumed_capacity
|
||||||
|
|
||||||
@@ -5474,47 +5772,20 @@ class InstanceSerializer(BaseSerializer):
|
|||||||
return obj.health_check_pending
|
return obj.health_check_pending
|
||||||
|
|
||||||
def validate(self, attrs):
|
def validate(self, attrs):
|
||||||
def get_field_from_model_or_attrs(fd):
|
# Oddly, using 'source' on a DRF field populates attrs with the source name, so we should rename it back
|
||||||
return attrs.get(fd, self.instance and getattr(self.instance, fd) or None)
|
if 'canonical_address_port' in attrs:
|
||||||
|
attrs['listener_port'] = attrs.pop('canonical_address_port')
|
||||||
def check_peers_changed():
|
if 'canonical_address_peers_from_control_nodes' in attrs:
|
||||||
'''
|
attrs['peers_from_control_nodes'] = attrs.pop('canonical_address_peers_from_control_nodes')
|
||||||
return True if
|
|
||||||
- 'peers' in attrs
|
|
||||||
- instance peers matches peers in attrs
|
|
||||||
'''
|
|
||||||
return self.instance and 'peers' in attrs and set(self.instance.peers.all()) != set(attrs['peers'])
|
|
||||||
|
|
||||||
if not self.instance and not settings.IS_K8S:
|
if not self.instance and not settings.IS_K8S:
|
||||||
raise serializers.ValidationError(_("Can only create instances on Kubernetes or OpenShift."))
|
raise serializers.ValidationError(_("Can only create instances on Kubernetes or OpenShift."))
|
||||||
|
|
||||||
node_type = get_field_from_model_or_attrs("node_type")
|
# cannot enable peers_from_control_nodes if listener_port is not set
|
||||||
peers_from_control_nodes = get_field_from_model_or_attrs("peers_from_control_nodes")
|
if attrs.get('peers_from_control_nodes'):
|
||||||
listener_port = get_field_from_model_or_attrs("listener_port")
|
port = attrs.get('listener_port', -1) # -1 denotes missing, None denotes explicit null
|
||||||
peers = attrs.get('peers', [])
|
if (port is None) or (port == -1 and self.instance and self.instance.canonical_address is None):
|
||||||
|
raise serializers.ValidationError(_("Cannot enable peers_from_control_nodes if listener_port is not set."))
|
||||||
if peers_from_control_nodes and node_type not in (Instance.Types.EXECUTION, Instance.Types.HOP):
|
|
||||||
raise serializers.ValidationError(_("peers_from_control_nodes can only be enabled for execution or hop nodes."))
|
|
||||||
|
|
||||||
if node_type in [Instance.Types.CONTROL, Instance.Types.HYBRID]:
|
|
||||||
if check_peers_changed():
|
|
||||||
raise serializers.ValidationError(
|
|
||||||
_("Setting peers manually for control nodes is not allowed. Enable peers_from_control_nodes on the hop and execution nodes instead.")
|
|
||||||
)
|
|
||||||
|
|
||||||
if not listener_port and peers_from_control_nodes:
|
|
||||||
raise serializers.ValidationError(_("Field listener_port must be a valid integer when peers_from_control_nodes is enabled."))
|
|
||||||
|
|
||||||
if not listener_port and self.instance and self.instance.peers_from.exists():
|
|
||||||
raise serializers.ValidationError(_("Field listener_port must be a valid integer when other nodes peer to it."))
|
|
||||||
|
|
||||||
for peer in peers:
|
|
||||||
if peer.listener_port is None:
|
|
||||||
raise serializers.ValidationError(_("Field listener_port must be set on peer ") + peer.hostname + ".")
|
|
||||||
|
|
||||||
if not settings.IS_K8S:
|
|
||||||
if check_peers_changed():
|
|
||||||
raise serializers.ValidationError(_("Cannot change peers."))
|
|
||||||
|
|
||||||
return super().validate(attrs)
|
return super().validate(attrs)
|
||||||
|
|
||||||
@@ -5534,8 +5805,8 @@ class InstanceSerializer(BaseSerializer):
|
|||||||
raise serializers.ValidationError(_("Can only change the state on Kubernetes or OpenShift."))
|
raise serializers.ValidationError(_("Can only change the state on Kubernetes or OpenShift."))
|
||||||
if value != Instance.States.DEPROVISIONING:
|
if value != Instance.States.DEPROVISIONING:
|
||||||
raise serializers.ValidationError(_("Can only change instances to the 'deprovisioning' state."))
|
raise serializers.ValidationError(_("Can only change instances to the 'deprovisioning' state."))
|
||||||
if self.instance.node_type not in (Instance.Types.EXECUTION, Instance.Types.HOP):
|
if self.instance.managed:
|
||||||
raise serializers.ValidationError(_("Can only deprovision execution or hop nodes."))
|
raise serializers.ValidationError(_("Cannot deprovision managed nodes."))
|
||||||
else:
|
else:
|
||||||
if value and value != Instance.States.INSTALLED:
|
if value and value != Instance.States.INSTALLED:
|
||||||
raise serializers.ValidationError(_("Can only create instances in the 'installed' state."))
|
raise serializers.ValidationError(_("Can only create instances in the 'installed' state."))
|
||||||
@@ -5554,18 +5825,48 @@ class InstanceSerializer(BaseSerializer):
|
|||||||
def validate_listener_port(self, value):
|
def validate_listener_port(self, value):
|
||||||
"""
|
"""
|
||||||
Cannot change listener port, unless going from none to integer, and vice versa
|
Cannot change listener port, unless going from none to integer, and vice versa
|
||||||
|
If instance is managed, cannot change listener port at all
|
||||||
"""
|
"""
|
||||||
if value and self.instance and self.instance.listener_port and self.instance.listener_port != value:
|
if self.instance:
|
||||||
raise serializers.ValidationError(_("Cannot change listener port."))
|
canonical_address_port = self.instance.canonical_address_port
|
||||||
|
if value and canonical_address_port and canonical_address_port != value:
|
||||||
|
raise serializers.ValidationError(_("Cannot change listener port."))
|
||||||
|
if self.instance.managed and value != canonical_address_port:
|
||||||
|
raise serializers.ValidationError(_("Cannot change listener port for managed nodes."))
|
||||||
|
return value
|
||||||
|
|
||||||
|
def validate_peers(self, value):
|
||||||
|
# cannot peer to an instance more than once
|
||||||
|
peers_instances = Counter(p.instance_id for p in value)
|
||||||
|
if any(count > 1 for count in peers_instances.values()):
|
||||||
|
raise serializers.ValidationError(_("Cannot peer to the same instance more than once."))
|
||||||
|
|
||||||
|
if self.instance:
|
||||||
|
instance_addresses = set(self.instance.receptor_addresses.all())
|
||||||
|
setting_peers = set(value)
|
||||||
|
peers_changed = set(self.instance.peers.all()) != setting_peers
|
||||||
|
|
||||||
|
if not settings.IS_K8S and peers_changed:
|
||||||
|
raise serializers.ValidationError(_("Cannot change peers."))
|
||||||
|
|
||||||
|
if self.instance.managed and peers_changed:
|
||||||
|
raise serializers.ValidationError(_("Setting peers manually for managed nodes is not allowed."))
|
||||||
|
|
||||||
|
# cannot peer to self
|
||||||
|
if instance_addresses & setting_peers:
|
||||||
|
raise serializers.ValidationError(_("Instance cannot peer to its own address."))
|
||||||
|
|
||||||
|
# cannot peer to an instance that is already peered to this instance
|
||||||
|
if instance_addresses:
|
||||||
|
for p in setting_peers:
|
||||||
|
if set(p.instance.peers.all()) & instance_addresses:
|
||||||
|
raise serializers.ValidationError(_(f"Instance {p.instance.hostname} is already peered to this instance."))
|
||||||
|
|
||||||
return value
|
return value
|
||||||
|
|
||||||
def validate_peers_from_control_nodes(self, value):
|
def validate_peers_from_control_nodes(self, value):
|
||||||
"""
|
if self.instance and self.instance.managed and self.instance.canonical_address_peers_from_control_nodes != value:
|
||||||
Can only enable for K8S based deployments
|
raise serializers.ValidationError(_("Cannot change peers_from_control_nodes for managed nodes."))
|
||||||
"""
|
|
||||||
if value and not settings.IS_K8S:
|
|
||||||
raise serializers.ValidationError(_("Can only be enabled on Kubernetes or Openshift."))
|
|
||||||
|
|
||||||
return value
|
return value
|
||||||
|
|
||||||
|
|||||||
22
awx/api/templates/api/bulk_host_delete_view.md
Normal file
22
awx/api/templates/api/bulk_host_delete_view.md
Normal file
@@ -0,0 +1,22 @@
|
|||||||
|
# Bulk Host Delete
|
||||||
|
|
||||||
|
This endpoint allows the client to delete multiple hosts from inventories.
|
||||||
|
They may do this by providing a list of hosts ID's to be deleted.
|
||||||
|
|
||||||
|
Example:
|
||||||
|
|
||||||
|
{
|
||||||
|
"hosts": [1, 2, 3, 4, 5]
|
||||||
|
}
|
||||||
|
|
||||||
|
Return data:
|
||||||
|
|
||||||
|
{
|
||||||
|
"hosts": {
|
||||||
|
"1": "The host a1 was deleted",
|
||||||
|
"2": "The host a2 was deleted",
|
||||||
|
"3": "The host a3 was deleted",
|
||||||
|
"4": "The host a4 was deleted",
|
||||||
|
"5": "The host a5 was deleted",
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -17,19 +17,18 @@ custom_worksign_public_keyfile: receptor/work_public_key.pem
|
|||||||
custom_tls_certfile: receptor/tls/receptor.crt
|
custom_tls_certfile: receptor/tls/receptor.crt
|
||||||
custom_tls_keyfile: receptor/tls/receptor.key
|
custom_tls_keyfile: receptor/tls/receptor.key
|
||||||
custom_ca_certfile: receptor/tls/ca/mesh-CA.crt
|
custom_ca_certfile: receptor/tls/ca/mesh-CA.crt
|
||||||
receptor_protocol: 'tcp'
|
{% if listener_port %}
|
||||||
{% if instance.listener_port %}
|
receptor_protocol: {{ listener_protocol }}
|
||||||
receptor_listener: true
|
receptor_listener: true
|
||||||
receptor_port: {{ instance.listener_port }}
|
receptor_port: {{ listener_port }}
|
||||||
{% else %}
|
{% else %}
|
||||||
receptor_listener: false
|
receptor_listener: false
|
||||||
{% endif %}
|
{% endif %}
|
||||||
{% if peers %}
|
{% if peers %}
|
||||||
receptor_peers:
|
receptor_peers:
|
||||||
{% for peer in peers %}
|
{% for peer in peers %}
|
||||||
- host: {{ peer.host }}
|
- address: {{ peer.address }}
|
||||||
port: {{ peer.port }}
|
protocol: {{ peer.protocol }}
|
||||||
protocol: tcp
|
|
||||||
{% endfor %}
|
{% endfor %}
|
||||||
{% endif %}
|
{% endif %}
|
||||||
{% verbatim %}
|
{% verbatim %}
|
||||||
|
|||||||
@@ -2,6 +2,12 @@
|
|||||||
- hosts: all
|
- hosts: all
|
||||||
become: yes
|
become: yes
|
||||||
tasks:
|
tasks:
|
||||||
|
- name: Create the receptor group
|
||||||
|
group:
|
||||||
|
{% verbatim %}
|
||||||
|
name: "{{ receptor_group }}"
|
||||||
|
{% endverbatim %}
|
||||||
|
state: present
|
||||||
- name: Create the receptor user
|
- name: Create the receptor user
|
||||||
user:
|
user:
|
||||||
{% verbatim %}
|
{% verbatim %}
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
---
|
---
|
||||||
collections:
|
collections:
|
||||||
- name: ansible.receptor
|
- name: ansible.receptor
|
||||||
version: 2.0.2
|
version: 2.0.3
|
||||||
|
|||||||
@@ -10,6 +10,7 @@ from awx.api.views import (
|
|||||||
InstanceInstanceGroupsList,
|
InstanceInstanceGroupsList,
|
||||||
InstanceHealthCheck,
|
InstanceHealthCheck,
|
||||||
InstancePeersList,
|
InstancePeersList,
|
||||||
|
InstanceReceptorAddressesList,
|
||||||
)
|
)
|
||||||
from awx.api.views.instance_install_bundle import InstanceInstallBundle
|
from awx.api.views.instance_install_bundle import InstanceInstallBundle
|
||||||
|
|
||||||
@@ -21,6 +22,7 @@ urls = [
|
|||||||
re_path(r'^(?P<pk>[0-9]+)/instance_groups/$', InstanceInstanceGroupsList.as_view(), name='instance_instance_groups_list'),
|
re_path(r'^(?P<pk>[0-9]+)/instance_groups/$', InstanceInstanceGroupsList.as_view(), name='instance_instance_groups_list'),
|
||||||
re_path(r'^(?P<pk>[0-9]+)/health_check/$', InstanceHealthCheck.as_view(), name='instance_health_check'),
|
re_path(r'^(?P<pk>[0-9]+)/health_check/$', InstanceHealthCheck.as_view(), name='instance_health_check'),
|
||||||
re_path(r'^(?P<pk>[0-9]+)/peers/$', InstancePeersList.as_view(), name='instance_peers_list'),
|
re_path(r'^(?P<pk>[0-9]+)/peers/$', InstancePeersList.as_view(), name='instance_peers_list'),
|
||||||
|
re_path(r'^(?P<pk>[0-9]+)/receptor_addresses/$', InstanceReceptorAddressesList.as_view(), name='instance_receptor_addresses_list'),
|
||||||
re_path(r'^(?P<pk>[0-9]+)/install_bundle/$', InstanceInstallBundle.as_view(), name='instance_install_bundle'),
|
re_path(r'^(?P<pk>[0-9]+)/install_bundle/$', InstanceInstallBundle.as_view(), name='instance_install_bundle'),
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|||||||
17
awx/api/urls/receptor_address.py
Normal file
17
awx/api/urls/receptor_address.py
Normal file
@@ -0,0 +1,17 @@
|
|||||||
|
# Copyright (c) 2017 Ansible, Inc.
|
||||||
|
# All Rights Reserved.
|
||||||
|
|
||||||
|
from django.urls import re_path
|
||||||
|
|
||||||
|
from awx.api.views import (
|
||||||
|
ReceptorAddressesList,
|
||||||
|
ReceptorAddressDetail,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
urls = [
|
||||||
|
re_path(r'^$', ReceptorAddressesList.as_view(), name='receptor_addresses_list'),
|
||||||
|
re_path(r'^(?P<pk>[0-9]+)/$', ReceptorAddressDetail.as_view(), name='receptor_address_detail'),
|
||||||
|
]
|
||||||
|
|
||||||
|
__all__ = ['urls']
|
||||||
@@ -36,6 +36,7 @@ from awx.api.views import (
|
|||||||
from awx.api.views.bulk import (
|
from awx.api.views.bulk import (
|
||||||
BulkView,
|
BulkView,
|
||||||
BulkHostCreateView,
|
BulkHostCreateView,
|
||||||
|
BulkHostDeleteView,
|
||||||
BulkJobLaunchView,
|
BulkJobLaunchView,
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -84,6 +85,7 @@ from .oauth2_root import urls as oauth2_root_urls
|
|||||||
from .workflow_approval_template import urls as workflow_approval_template_urls
|
from .workflow_approval_template import urls as workflow_approval_template_urls
|
||||||
from .workflow_approval import urls as workflow_approval_urls
|
from .workflow_approval import urls as workflow_approval_urls
|
||||||
from .analytics import urls as analytics_urls
|
from .analytics import urls as analytics_urls
|
||||||
|
from .receptor_address import urls as receptor_address_urls
|
||||||
|
|
||||||
v2_urls = [
|
v2_urls = [
|
||||||
re_path(r'^$', ApiV2RootView.as_view(), name='api_v2_root_view'),
|
re_path(r'^$', ApiV2RootView.as_view(), name='api_v2_root_view'),
|
||||||
@@ -152,7 +154,9 @@ v2_urls = [
|
|||||||
re_path(r'^workflow_approvals/', include(workflow_approval_urls)),
|
re_path(r'^workflow_approvals/', include(workflow_approval_urls)),
|
||||||
re_path(r'^bulk/$', BulkView.as_view(), name='bulk'),
|
re_path(r'^bulk/$', BulkView.as_view(), name='bulk'),
|
||||||
re_path(r'^bulk/host_create/$', BulkHostCreateView.as_view(), name='bulk_host_create'),
|
re_path(r'^bulk/host_create/$', BulkHostCreateView.as_view(), name='bulk_host_create'),
|
||||||
|
re_path(r'^bulk/host_delete/$', BulkHostDeleteView.as_view(), name='bulk_host_delete'),
|
||||||
re_path(r'^bulk/job_launch/$', BulkJobLaunchView.as_view(), name='bulk_job_launch'),
|
re_path(r'^bulk/job_launch/$', BulkJobLaunchView.as_view(), name='bulk_job_launch'),
|
||||||
|
re_path(r'^receptor_addresses/', include(receptor_address_urls)),
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -1,10 +1,11 @@
|
|||||||
from django.urls import re_path
|
from django.urls import re_path
|
||||||
|
|
||||||
from awx.api.views.webhooks import WebhookKeyView, GithubWebhookReceiver, GitlabWebhookReceiver
|
from awx.api.views.webhooks import WebhookKeyView, GithubWebhookReceiver, GitlabWebhookReceiver, BitbucketDcWebhookReceiver
|
||||||
|
|
||||||
|
|
||||||
urlpatterns = [
|
urlpatterns = [
|
||||||
re_path(r'^webhook_key/$', WebhookKeyView.as_view(), name='webhook_key'),
|
re_path(r'^webhook_key/$', WebhookKeyView.as_view(), name='webhook_key'),
|
||||||
re_path(r'^github/$', GithubWebhookReceiver.as_view(), name='webhook_receiver_github'),
|
re_path(r'^github/$', GithubWebhookReceiver.as_view(), name='webhook_receiver_github'),
|
||||||
re_path(r'^gitlab/$', GitlabWebhookReceiver.as_view(), name='webhook_receiver_gitlab'),
|
re_path(r'^gitlab/$', GitlabWebhookReceiver.as_view(), name='webhook_receiver_gitlab'),
|
||||||
|
re_path(r'^bitbucket_dc/$', BitbucketDcWebhookReceiver.as_view(), name='webhook_receiver_bitbucket_dc'),
|
||||||
]
|
]
|
||||||
|
|||||||
@@ -2,28 +2,21 @@
|
|||||||
# All Rights Reserved.
|
# All Rights Reserved.
|
||||||
|
|
||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
from django.urls import NoReverseMatch
|
|
||||||
|
|
||||||
from rest_framework.reverse import _reverse
|
from rest_framework.reverse import reverse as drf_reverse
|
||||||
from rest_framework.versioning import URLPathVersioning as BaseVersioning
|
from rest_framework.versioning import URLPathVersioning as BaseVersioning
|
||||||
|
|
||||||
|
|
||||||
def drf_reverse(viewname, args=None, kwargs=None, request=None, format=None, **extra):
|
def is_optional_api_urlpattern_prefix_request(request):
|
||||||
"""
|
if settings.OPTIONAL_API_URLPATTERN_PREFIX and request:
|
||||||
Copy and monkey-patch `rest_framework.reverse.reverse` to prevent adding unwarranted
|
if request.path.startswith(f"/api/{settings.OPTIONAL_API_URLPATTERN_PREFIX}"):
|
||||||
query string parameters.
|
return True
|
||||||
"""
|
return False
|
||||||
scheme = getattr(request, 'versioning_scheme', None)
|
|
||||||
if scheme is not None:
|
|
||||||
try:
|
|
||||||
url = scheme.reverse(viewname, args, kwargs, request, format, **extra)
|
|
||||||
except NoReverseMatch:
|
|
||||||
# In case the versioning scheme reversal fails, fallback to the
|
|
||||||
# default implementation
|
|
||||||
url = _reverse(viewname, args, kwargs, request, format, **extra)
|
|
||||||
else:
|
|
||||||
url = _reverse(viewname, args, kwargs, request, format, **extra)
|
|
||||||
|
|
||||||
|
|
||||||
|
def transform_optional_api_urlpattern_prefix_url(request, url):
|
||||||
|
if is_optional_api_urlpattern_prefix_request(request):
|
||||||
|
url = url.replace('/api', f"/api/{settings.OPTIONAL_API_URLPATTERN_PREFIX}")
|
||||||
return url
|
return url
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -60,6 +60,11 @@ from oauth2_provider.models import get_access_token_model
|
|||||||
import pytz
|
import pytz
|
||||||
from wsgiref.util import FileWrapper
|
from wsgiref.util import FileWrapper
|
||||||
|
|
||||||
|
# django-ansible-base
|
||||||
|
from ansible_base.lib.utils.requests import get_remote_hosts
|
||||||
|
from ansible_base.rbac.models import RoleEvaluation, ObjectRole
|
||||||
|
from ansible_base.resource_registry.shared_types import OrganizationType, TeamType, UserType
|
||||||
|
|
||||||
# AWX
|
# AWX
|
||||||
from awx.main.tasks.system import send_notifications, update_inventory_computed_fields
|
from awx.main.tasks.system import send_notifications, update_inventory_computed_fields
|
||||||
from awx.main.access import get_user_queryset
|
from awx.main.access import get_user_queryset
|
||||||
@@ -87,6 +92,7 @@ from awx.api.generics import (
|
|||||||
from awx.api.views.labels import LabelSubListCreateAttachDetachView
|
from awx.api.views.labels import LabelSubListCreateAttachDetachView
|
||||||
from awx.api.versioning import reverse
|
from awx.api.versioning import reverse
|
||||||
from awx.main import models
|
from awx.main import models
|
||||||
|
from awx.main.models.rbac import get_role_definition
|
||||||
from awx.main.utils import (
|
from awx.main.utils import (
|
||||||
camelcase_to_underscore,
|
camelcase_to_underscore,
|
||||||
extract_ansible_vars,
|
extract_ansible_vars,
|
||||||
@@ -124,10 +130,15 @@ from awx.api.views.mixin import (
|
|||||||
from awx.api.pagination import UnifiedJobEventPagination
|
from awx.api.pagination import UnifiedJobEventPagination
|
||||||
from awx.main.utils import set_environ
|
from awx.main.utils import set_environ
|
||||||
|
|
||||||
|
|
||||||
logger = logging.getLogger('awx.api.views')
|
logger = logging.getLogger('awx.api.views')
|
||||||
|
|
||||||
|
|
||||||
def unpartitioned_event_horizon(cls):
|
def unpartitioned_event_horizon(cls):
|
||||||
|
with connection.cursor() as cursor:
|
||||||
|
cursor.execute(f"SELECT 1 FROM INFORMATION_SCHEMA.TABLES WHERE table_name = '_unpartitioned_{cls._meta.db_table}';")
|
||||||
|
if not cursor.fetchone():
|
||||||
|
return 0
|
||||||
with connection.cursor() as cursor:
|
with connection.cursor() as cursor:
|
||||||
try:
|
try:
|
||||||
cursor.execute(f'SELECT MAX(id) FROM _unpartitioned_{cls._meta.db_table}')
|
cursor.execute(f'SELECT MAX(id) FROM _unpartitioned_{cls._meta.db_table}')
|
||||||
@@ -268,16 +279,24 @@ class DashboardJobsGraphView(APIView):
|
|||||||
|
|
||||||
success_query = user_unified_jobs.filter(status='successful')
|
success_query = user_unified_jobs.filter(status='successful')
|
||||||
failed_query = user_unified_jobs.filter(status='failed')
|
failed_query = user_unified_jobs.filter(status='failed')
|
||||||
|
canceled_query = user_unified_jobs.filter(status='canceled')
|
||||||
|
error_query = user_unified_jobs.filter(status='error')
|
||||||
|
|
||||||
if job_type == 'inv_sync':
|
if job_type == 'inv_sync':
|
||||||
success_query = success_query.filter(instance_of=models.InventoryUpdate)
|
success_query = success_query.filter(instance_of=models.InventoryUpdate)
|
||||||
failed_query = failed_query.filter(instance_of=models.InventoryUpdate)
|
failed_query = failed_query.filter(instance_of=models.InventoryUpdate)
|
||||||
|
canceled_query = canceled_query.filter(instance_of=models.InventoryUpdate)
|
||||||
|
error_query = error_query.filter(instance_of=models.InventoryUpdate)
|
||||||
elif job_type == 'playbook_run':
|
elif job_type == 'playbook_run':
|
||||||
success_query = success_query.filter(instance_of=models.Job)
|
success_query = success_query.filter(instance_of=models.Job)
|
||||||
failed_query = failed_query.filter(instance_of=models.Job)
|
failed_query = failed_query.filter(instance_of=models.Job)
|
||||||
|
canceled_query = canceled_query.filter(instance_of=models.Job)
|
||||||
|
error_query = error_query.filter(instance_of=models.Job)
|
||||||
elif job_type == 'scm_update':
|
elif job_type == 'scm_update':
|
||||||
success_query = success_query.filter(instance_of=models.ProjectUpdate)
|
success_query = success_query.filter(instance_of=models.ProjectUpdate)
|
||||||
failed_query = failed_query.filter(instance_of=models.ProjectUpdate)
|
failed_query = failed_query.filter(instance_of=models.ProjectUpdate)
|
||||||
|
canceled_query = canceled_query.filter(instance_of=models.ProjectUpdate)
|
||||||
|
error_query = error_query.filter(instance_of=models.ProjectUpdate)
|
||||||
|
|
||||||
end = now()
|
end = now()
|
||||||
interval = 'day'
|
interval = 'day'
|
||||||
@@ -293,10 +312,12 @@ class DashboardJobsGraphView(APIView):
|
|||||||
else:
|
else:
|
||||||
return Response({'error': _('Unknown period "%s"') % str(period)}, status=status.HTTP_400_BAD_REQUEST)
|
return Response({'error': _('Unknown period "%s"') % str(period)}, status=status.HTTP_400_BAD_REQUEST)
|
||||||
|
|
||||||
dashboard_data = {"jobs": {"successful": [], "failed": []}}
|
dashboard_data = {"jobs": {"successful": [], "failed": [], "canceled": [], "error": []}}
|
||||||
|
|
||||||
succ_list = dashboard_data['jobs']['successful']
|
succ_list = dashboard_data['jobs']['successful']
|
||||||
fail_list = dashboard_data['jobs']['failed']
|
fail_list = dashboard_data['jobs']['failed']
|
||||||
|
canceled_list = dashboard_data['jobs']['canceled']
|
||||||
|
error_list = dashboard_data['jobs']['error']
|
||||||
|
|
||||||
qs_s = (
|
qs_s = (
|
||||||
success_query.filter(finished__range=(start, end))
|
success_query.filter(finished__range=(start, end))
|
||||||
@@ -314,6 +335,22 @@ class DashboardJobsGraphView(APIView):
|
|||||||
.annotate(agg=Count('id', distinct=True))
|
.annotate(agg=Count('id', distinct=True))
|
||||||
)
|
)
|
||||||
data_f = {item['d']: item['agg'] for item in qs_f}
|
data_f = {item['d']: item['agg'] for item in qs_f}
|
||||||
|
qs_c = (
|
||||||
|
canceled_query.filter(finished__range=(start, end))
|
||||||
|
.annotate(d=Trunc('finished', interval, tzinfo=end.tzinfo))
|
||||||
|
.order_by()
|
||||||
|
.values('d')
|
||||||
|
.annotate(agg=Count('id', distinct=True))
|
||||||
|
)
|
||||||
|
data_c = {item['d']: item['agg'] for item in qs_c}
|
||||||
|
qs_e = (
|
||||||
|
error_query.filter(finished__range=(start, end))
|
||||||
|
.annotate(d=Trunc('finished', interval, tzinfo=end.tzinfo))
|
||||||
|
.order_by()
|
||||||
|
.values('d')
|
||||||
|
.annotate(agg=Count('id', distinct=True))
|
||||||
|
)
|
||||||
|
data_e = {item['d']: item['agg'] for item in qs_e}
|
||||||
|
|
||||||
start_date = start.replace(hour=0, minute=0, second=0, microsecond=0)
|
start_date = start.replace(hour=0, minute=0, second=0, microsecond=0)
|
||||||
for d in itertools.count():
|
for d in itertools.count():
|
||||||
@@ -322,6 +359,8 @@ class DashboardJobsGraphView(APIView):
|
|||||||
break
|
break
|
||||||
succ_list.append([time.mktime(date.timetuple()), data_s.get(date, 0)])
|
succ_list.append([time.mktime(date.timetuple()), data_s.get(date, 0)])
|
||||||
fail_list.append([time.mktime(date.timetuple()), data_f.get(date, 0)])
|
fail_list.append([time.mktime(date.timetuple()), data_f.get(date, 0)])
|
||||||
|
canceled_list.append([time.mktime(date.timetuple()), data_c.get(date, 0)])
|
||||||
|
error_list.append([time.mktime(date.timetuple()), data_e.get(date, 0)])
|
||||||
|
|
||||||
return Response(dashboard_data)
|
return Response(dashboard_data)
|
||||||
|
|
||||||
@@ -333,12 +372,20 @@ class InstanceList(ListCreateAPIView):
|
|||||||
search_fields = ('hostname',)
|
search_fields = ('hostname',)
|
||||||
ordering = ('id',)
|
ordering = ('id',)
|
||||||
|
|
||||||
|
def get_queryset(self):
|
||||||
|
qs = super().get_queryset().prefetch_related('receptor_addresses')
|
||||||
|
return qs
|
||||||
|
|
||||||
|
|
||||||
class InstanceDetail(RetrieveUpdateAPIView):
|
class InstanceDetail(RetrieveUpdateAPIView):
|
||||||
name = _("Instance Detail")
|
name = _("Instance Detail")
|
||||||
model = models.Instance
|
model = models.Instance
|
||||||
serializer_class = serializers.InstanceSerializer
|
serializer_class = serializers.InstanceSerializer
|
||||||
|
|
||||||
|
def get_queryset(self):
|
||||||
|
qs = super().get_queryset().prefetch_related('receptor_addresses')
|
||||||
|
return qs
|
||||||
|
|
||||||
def update_raw_data(self, data):
|
def update_raw_data(self, data):
|
||||||
# these fields are only valid on creation of an instance, so they unwanted on detail view
|
# these fields are only valid on creation of an instance, so they unwanted on detail view
|
||||||
data.pop('node_type', None)
|
data.pop('node_type', None)
|
||||||
@@ -371,13 +418,37 @@ class InstanceUnifiedJobsList(SubListAPIView):
|
|||||||
|
|
||||||
|
|
||||||
class InstancePeersList(SubListAPIView):
|
class InstancePeersList(SubListAPIView):
|
||||||
name = _("Instance Peers")
|
name = _("Peers")
|
||||||
|
model = models.ReceptorAddress
|
||||||
|
serializer_class = serializers.ReceptorAddressSerializer
|
||||||
parent_model = models.Instance
|
parent_model = models.Instance
|
||||||
model = models.Instance
|
|
||||||
serializer_class = serializers.InstanceSerializer
|
|
||||||
parent_access = 'read'
|
parent_access = 'read'
|
||||||
search_fields = {'hostname'}
|
|
||||||
relationship = 'peers'
|
relationship = 'peers'
|
||||||
|
search_fields = ('address',)
|
||||||
|
|
||||||
|
|
||||||
|
class InstanceReceptorAddressesList(SubListAPIView):
|
||||||
|
name = _("Receptor Addresses")
|
||||||
|
model = models.ReceptorAddress
|
||||||
|
parent_key = 'instance'
|
||||||
|
parent_model = models.Instance
|
||||||
|
serializer_class = serializers.ReceptorAddressSerializer
|
||||||
|
search_fields = ('address',)
|
||||||
|
|
||||||
|
|
||||||
|
class ReceptorAddressesList(ListAPIView):
|
||||||
|
name = _("Receptor Addresses")
|
||||||
|
model = models.ReceptorAddress
|
||||||
|
serializer_class = serializers.ReceptorAddressSerializer
|
||||||
|
search_fields = ('address',)
|
||||||
|
|
||||||
|
|
||||||
|
class ReceptorAddressDetail(RetrieveAPIView):
|
||||||
|
name = _("Receptor Address Detail")
|
||||||
|
model = models.ReceptorAddress
|
||||||
|
serializer_class = serializers.ReceptorAddressSerializer
|
||||||
|
parent_model = models.Instance
|
||||||
|
relationship = 'receptor_addresses'
|
||||||
|
|
||||||
|
|
||||||
class InstanceInstanceGroupsList(InstanceGroupMembershipMixin, SubListCreateAttachDetachAPIView):
|
class InstanceInstanceGroupsList(InstanceGroupMembershipMixin, SubListCreateAttachDetachAPIView):
|
||||||
@@ -472,6 +543,7 @@ class InstanceGroupAccessList(ResourceAccessList):
|
|||||||
|
|
||||||
|
|
||||||
class InstanceGroupObjectRolesList(SubListAPIView):
|
class InstanceGroupObjectRolesList(SubListAPIView):
|
||||||
|
deprecated = True
|
||||||
model = models.Role
|
model = models.Role
|
||||||
serializer_class = serializers.RoleSerializer
|
serializer_class = serializers.RoleSerializer
|
||||||
parent_model = models.InstanceGroup
|
parent_model = models.InstanceGroup
|
||||||
@@ -641,16 +713,81 @@ class AuthView(APIView):
|
|||||||
return Response(data)
|
return Response(data)
|
||||||
|
|
||||||
|
|
||||||
|
def immutablesharedfields(cls):
|
||||||
|
'''
|
||||||
|
Class decorator to prevent modifying shared resources when ALLOW_LOCAL_RESOURCE_MANAGEMENT setting is set to False.
|
||||||
|
|
||||||
|
Works by overriding these view methods:
|
||||||
|
- create
|
||||||
|
- delete
|
||||||
|
- perform_update
|
||||||
|
create and delete are overridden to raise a PermissionDenied exception.
|
||||||
|
perform_update is overridden to check if any shared fields are being modified,
|
||||||
|
and raise a PermissionDenied exception if so.
|
||||||
|
'''
|
||||||
|
# create instead of perform_create because some of our views
|
||||||
|
# override create instead of perform_create
|
||||||
|
if hasattr(cls, 'create'):
|
||||||
|
cls.original_create = cls.create
|
||||||
|
|
||||||
|
@functools.wraps(cls.create)
|
||||||
|
def create_wrapper(*args, **kwargs):
|
||||||
|
if settings.ALLOW_LOCAL_RESOURCE_MANAGEMENT:
|
||||||
|
return cls.original_create(*args, **kwargs)
|
||||||
|
raise PermissionDenied({'detail': _('Creation of this resource is not allowed. Create this resource via the platform ingress.')})
|
||||||
|
|
||||||
|
cls.create = create_wrapper
|
||||||
|
|
||||||
|
if hasattr(cls, 'delete'):
|
||||||
|
cls.original_delete = cls.delete
|
||||||
|
|
||||||
|
@functools.wraps(cls.delete)
|
||||||
|
def delete_wrapper(*args, **kwargs):
|
||||||
|
if settings.ALLOW_LOCAL_RESOURCE_MANAGEMENT:
|
||||||
|
return cls.original_delete(*args, **kwargs)
|
||||||
|
raise PermissionDenied({'detail': _('Deletion of this resource is not allowed. Delete this resource via the platform ingress.')})
|
||||||
|
|
||||||
|
cls.delete = delete_wrapper
|
||||||
|
|
||||||
|
if hasattr(cls, 'perform_update'):
|
||||||
|
cls.original_perform_update = cls.perform_update
|
||||||
|
|
||||||
|
@functools.wraps(cls.perform_update)
|
||||||
|
def update_wrapper(*args, **kwargs):
|
||||||
|
if not settings.ALLOW_LOCAL_RESOURCE_MANAGEMENT:
|
||||||
|
view, serializer = args
|
||||||
|
instance = view.get_object()
|
||||||
|
if instance:
|
||||||
|
if isinstance(instance, models.Organization):
|
||||||
|
shared_fields = OrganizationType._declared_fields.keys()
|
||||||
|
elif isinstance(instance, models.User):
|
||||||
|
shared_fields = UserType._declared_fields.keys()
|
||||||
|
elif isinstance(instance, models.Team):
|
||||||
|
shared_fields = TeamType._declared_fields.keys()
|
||||||
|
attrs = serializer.validated_data
|
||||||
|
for field in shared_fields:
|
||||||
|
if field in attrs and getattr(instance, field) != attrs[field]:
|
||||||
|
raise PermissionDenied({field: _(f"Cannot change shared field '{field}'. Alter this field via the platform ingress.")})
|
||||||
|
return cls.original_perform_update(*args, **kwargs)
|
||||||
|
|
||||||
|
cls.perform_update = update_wrapper
|
||||||
|
|
||||||
|
return cls
|
||||||
|
|
||||||
|
|
||||||
|
@immutablesharedfields
|
||||||
class TeamList(ListCreateAPIView):
|
class TeamList(ListCreateAPIView):
|
||||||
model = models.Team
|
model = models.Team
|
||||||
serializer_class = serializers.TeamSerializer
|
serializer_class = serializers.TeamSerializer
|
||||||
|
|
||||||
|
|
||||||
|
@immutablesharedfields
|
||||||
class TeamDetail(RetrieveUpdateDestroyAPIView):
|
class TeamDetail(RetrieveUpdateDestroyAPIView):
|
||||||
model = models.Team
|
model = models.Team
|
||||||
serializer_class = serializers.TeamSerializer
|
serializer_class = serializers.TeamSerializer
|
||||||
|
|
||||||
|
|
||||||
|
@immutablesharedfields
|
||||||
class TeamUsersList(BaseUsersList):
|
class TeamUsersList(BaseUsersList):
|
||||||
model = models.User
|
model = models.User
|
||||||
serializer_class = serializers.UserSerializer
|
serializer_class = serializers.UserSerializer
|
||||||
@@ -660,6 +797,7 @@ class TeamUsersList(BaseUsersList):
|
|||||||
|
|
||||||
|
|
||||||
class TeamRolesList(SubListAttachDetachAPIView):
|
class TeamRolesList(SubListAttachDetachAPIView):
|
||||||
|
deprecated = True
|
||||||
model = models.Role
|
model = models.Role
|
||||||
serializer_class = serializers.RoleSerializerWithParentAccess
|
serializer_class = serializers.RoleSerializerWithParentAccess
|
||||||
metadata_class = RoleMetadata
|
metadata_class = RoleMetadata
|
||||||
@@ -699,10 +837,12 @@ class TeamRolesList(SubListAttachDetachAPIView):
|
|||||||
|
|
||||||
|
|
||||||
class TeamObjectRolesList(SubListAPIView):
|
class TeamObjectRolesList(SubListAPIView):
|
||||||
|
deprecated = True
|
||||||
model = models.Role
|
model = models.Role
|
||||||
serializer_class = serializers.RoleSerializer
|
serializer_class = serializers.RoleSerializer
|
||||||
parent_model = models.Team
|
parent_model = models.Team
|
||||||
search_fields = ('role_field', 'content_type__model')
|
search_fields = ('role_field', 'content_type__model')
|
||||||
|
deprecated = True
|
||||||
|
|
||||||
def get_queryset(self):
|
def get_queryset(self):
|
||||||
po = self.get_parent_object()
|
po = self.get_parent_object()
|
||||||
@@ -720,8 +860,15 @@ class TeamProjectsList(SubListAPIView):
|
|||||||
self.check_parent_access(team)
|
self.check_parent_access(team)
|
||||||
model_ct = ContentType.objects.get_for_model(self.model)
|
model_ct = ContentType.objects.get_for_model(self.model)
|
||||||
parent_ct = ContentType.objects.get_for_model(self.parent_model)
|
parent_ct = ContentType.objects.get_for_model(self.parent_model)
|
||||||
proj_roles = models.Role.objects.filter(Q(ancestors__content_type=parent_ct) & Q(ancestors__object_id=team.pk), content_type=model_ct)
|
|
||||||
return self.model.accessible_objects(self.request.user, 'read_role').filter(pk__in=[t.content_object.pk for t in proj_roles])
|
rd = get_role_definition(team.member_role)
|
||||||
|
role = ObjectRole.objects.filter(object_id=team.id, content_type=parent_ct, role_definition=rd).first()
|
||||||
|
if role is None:
|
||||||
|
# Team has no permissions, therefore team has no projects
|
||||||
|
return self.model.objects.none()
|
||||||
|
else:
|
||||||
|
project_qs = self.model.accessible_objects(self.request.user, 'read_role')
|
||||||
|
return project_qs.filter(id__in=RoleEvaluation.objects.filter(content_type_id=model_ct.id, role=role).values_list('object_id'))
|
||||||
|
|
||||||
|
|
||||||
class TeamActivityStreamList(SubListAPIView):
|
class TeamActivityStreamList(SubListAPIView):
|
||||||
@@ -736,10 +883,23 @@ class TeamActivityStreamList(SubListAPIView):
|
|||||||
self.check_parent_access(parent)
|
self.check_parent_access(parent)
|
||||||
|
|
||||||
qs = self.request.user.get_queryset(self.model)
|
qs = self.request.user.get_queryset(self.model)
|
||||||
|
|
||||||
return qs.filter(
|
return qs.filter(
|
||||||
Q(team=parent)
|
Q(team=parent)
|
||||||
| Q(project__in=models.Project.accessible_objects(parent, 'read_role'))
|
| Q(
|
||||||
| Q(credential__in=models.Credential.accessible_objects(parent, 'read_role'))
|
project__in=RoleEvaluation.objects.filter(
|
||||||
|
role__in=parent.has_roles.all(), content_type_id=ContentType.objects.get_for_model(models.Project).id, codename='view_project'
|
||||||
|
)
|
||||||
|
.values_list('object_id')
|
||||||
|
.distinct()
|
||||||
|
)
|
||||||
|
| Q(
|
||||||
|
credential__in=RoleEvaluation.objects.filter(
|
||||||
|
role__in=parent.has_roles.all(), content_type_id=ContentType.objects.get_for_model(models.Credential).id, codename='view_credential'
|
||||||
|
)
|
||||||
|
.values_list('object_id')
|
||||||
|
.distinct()
|
||||||
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@@ -991,10 +1151,12 @@ class ProjectAccessList(ResourceAccessList):
|
|||||||
|
|
||||||
|
|
||||||
class ProjectObjectRolesList(SubListAPIView):
|
class ProjectObjectRolesList(SubListAPIView):
|
||||||
|
deprecated = True
|
||||||
model = models.Role
|
model = models.Role
|
||||||
serializer_class = serializers.RoleSerializer
|
serializer_class = serializers.RoleSerializer
|
||||||
parent_model = models.Project
|
parent_model = models.Project
|
||||||
search_fields = ('role_field', 'content_type__model')
|
search_fields = ('role_field', 'content_type__model')
|
||||||
|
deprecated = True
|
||||||
|
|
||||||
def get_queryset(self):
|
def get_queryset(self):
|
||||||
po = self.get_parent_object()
|
po = self.get_parent_object()
|
||||||
@@ -1007,6 +1169,7 @@ class ProjectCopy(CopyAPIView):
|
|||||||
copy_return_serializer_class = serializers.ProjectSerializer
|
copy_return_serializer_class = serializers.ProjectSerializer
|
||||||
|
|
||||||
|
|
||||||
|
@immutablesharedfields
|
||||||
class UserList(ListCreateAPIView):
|
class UserList(ListCreateAPIView):
|
||||||
model = models.User
|
model = models.User
|
||||||
serializer_class = serializers.UserSerializer
|
serializer_class = serializers.UserSerializer
|
||||||
@@ -1152,6 +1315,7 @@ class UserTeamsList(SubListAPIView):
|
|||||||
|
|
||||||
|
|
||||||
class UserRolesList(SubListAttachDetachAPIView):
|
class UserRolesList(SubListAttachDetachAPIView):
|
||||||
|
deprecated = True
|
||||||
model = models.Role
|
model = models.Role
|
||||||
serializer_class = serializers.RoleSerializerWithParentAccess
|
serializer_class = serializers.RoleSerializerWithParentAccess
|
||||||
metadata_class = RoleMetadata
|
metadata_class = RoleMetadata
|
||||||
@@ -1176,7 +1340,16 @@ class UserRolesList(SubListAttachDetachAPIView):
|
|||||||
user = get_object_or_400(models.User, pk=self.kwargs['pk'])
|
user = get_object_or_400(models.User, pk=self.kwargs['pk'])
|
||||||
role = get_object_or_400(models.Role, pk=sub_id)
|
role = get_object_or_400(models.Role, pk=sub_id)
|
||||||
|
|
||||||
credential_content_type = ContentType.objects.get_for_model(models.Credential)
|
content_types = ContentType.objects.get_for_models(models.Organization, models.Team, models.Credential) # dict of {model: content_type}
|
||||||
|
# Prevent user to be associated with team/org when ALLOW_LOCAL_RESOURCE_MANAGEMENT is False
|
||||||
|
if not settings.ALLOW_LOCAL_RESOURCE_MANAGEMENT:
|
||||||
|
for model in [models.Organization, models.Team]:
|
||||||
|
ct = content_types[model]
|
||||||
|
if role.content_type == ct and role.role_field in ['member_role', 'admin_role']:
|
||||||
|
data = dict(msg=_(f"Cannot directly modify user membership to {ct.model}. Direct shared resource management disabled"))
|
||||||
|
return Response(data, status=status.HTTP_403_FORBIDDEN)
|
||||||
|
|
||||||
|
credential_content_type = content_types[models.Credential]
|
||||||
if role.content_type == credential_content_type:
|
if role.content_type == credential_content_type:
|
||||||
if 'disassociate' not in request.data and role.content_object.organization and user not in role.content_object.organization.member_role:
|
if 'disassociate' not in request.data and role.content_object.organization and user not in role.content_object.organization.member_role:
|
||||||
data = dict(msg=_("You cannot grant credential access to a user not in the credentials' organization"))
|
data = dict(msg=_("You cannot grant credential access to a user not in the credentials' organization"))
|
||||||
@@ -1248,6 +1421,7 @@ class UserActivityStreamList(SubListAPIView):
|
|||||||
return qs.filter(Q(actor=parent) | Q(user__in=[parent]))
|
return qs.filter(Q(actor=parent) | Q(user__in=[parent]))
|
||||||
|
|
||||||
|
|
||||||
|
@immutablesharedfields
|
||||||
class UserDetail(RetrieveUpdateDestroyAPIView):
|
class UserDetail(RetrieveUpdateDestroyAPIView):
|
||||||
model = models.User
|
model = models.User
|
||||||
serializer_class = serializers.UserSerializer
|
serializer_class = serializers.UserSerializer
|
||||||
@@ -1393,7 +1567,7 @@ class OrganizationCredentialList(SubListCreateAPIView):
|
|||||||
self.check_parent_access(organization)
|
self.check_parent_access(organization)
|
||||||
|
|
||||||
user_visible = models.Credential.accessible_objects(self.request.user, 'read_role').all()
|
user_visible = models.Credential.accessible_objects(self.request.user, 'read_role').all()
|
||||||
org_set = models.Credential.accessible_objects(organization.admin_role, 'read_role').all()
|
org_set = models.Credential.objects.filter(organization=organization)
|
||||||
|
|
||||||
if self.request.user.is_superuser or self.request.user.is_system_auditor:
|
if self.request.user.is_superuser or self.request.user.is_system_auditor:
|
||||||
return org_set
|
return org_set
|
||||||
@@ -1426,10 +1600,12 @@ class CredentialAccessList(ResourceAccessList):
|
|||||||
|
|
||||||
|
|
||||||
class CredentialObjectRolesList(SubListAPIView):
|
class CredentialObjectRolesList(SubListAPIView):
|
||||||
|
deprecated = True
|
||||||
model = models.Role
|
model = models.Role
|
||||||
serializer_class = serializers.RoleSerializer
|
serializer_class = serializers.RoleSerializer
|
||||||
parent_model = models.Credential
|
parent_model = models.Credential
|
||||||
search_fields = ('role_field', 'content_type__model')
|
search_fields = ('role_field', 'content_type__model')
|
||||||
|
deprecated = True
|
||||||
|
|
||||||
def get_queryset(self):
|
def get_queryset(self):
|
||||||
po = self.get_parent_object()
|
po = self.get_parent_object()
|
||||||
@@ -2216,12 +2392,13 @@ class JobTemplateList(ListCreateAPIView):
|
|||||||
serializer_class = serializers.JobTemplateSerializer
|
serializer_class = serializers.JobTemplateSerializer
|
||||||
always_allow_superuser = False
|
always_allow_superuser = False
|
||||||
|
|
||||||
def post(self, request, *args, **kwargs):
|
def check_permissions(self, request):
|
||||||
ret = super(JobTemplateList, self).post(request, *args, **kwargs)
|
if request.method == 'POST':
|
||||||
if ret.status_code == 201:
|
can_access, messages = request.user.can_access_with_errors(self.model, 'add', request.data)
|
||||||
job_template = models.JobTemplate.objects.get(id=ret.data['id'])
|
if not can_access:
|
||||||
job_template.admin_role.members.add(request.user)
|
self.permission_denied(request, message=messages)
|
||||||
return ret
|
|
||||||
|
super(JobTemplateList, self).check_permissions(request)
|
||||||
|
|
||||||
|
|
||||||
class JobTemplateDetail(RelatedJobsPreventDeleteMixin, RetrieveUpdateDestroyAPIView):
|
class JobTemplateDetail(RelatedJobsPreventDeleteMixin, RetrieveUpdateDestroyAPIView):
|
||||||
@@ -2602,12 +2779,7 @@ class JobTemplateCallback(GenericAPIView):
|
|||||||
host for the current request.
|
host for the current request.
|
||||||
"""
|
"""
|
||||||
# Find the list of remote host names/IPs to check.
|
# Find the list of remote host names/IPs to check.
|
||||||
remote_hosts = set()
|
remote_hosts = set(get_remote_hosts(self.request))
|
||||||
for header in settings.REMOTE_HOST_HEADERS:
|
|
||||||
for value in self.request.META.get(header, '').split(','):
|
|
||||||
value = value.strip()
|
|
||||||
if value:
|
|
||||||
remote_hosts.add(value)
|
|
||||||
# Add the reverse lookup of IP addresses.
|
# Add the reverse lookup of IP addresses.
|
||||||
for rh in list(remote_hosts):
|
for rh in list(remote_hosts):
|
||||||
try:
|
try:
|
||||||
@@ -2768,10 +2940,12 @@ class JobTemplateAccessList(ResourceAccessList):
|
|||||||
|
|
||||||
|
|
||||||
class JobTemplateObjectRolesList(SubListAPIView):
|
class JobTemplateObjectRolesList(SubListAPIView):
|
||||||
|
deprecated = True
|
||||||
model = models.Role
|
model = models.Role
|
||||||
serializer_class = serializers.RoleSerializer
|
serializer_class = serializers.RoleSerializer
|
||||||
parent_model = models.JobTemplate
|
parent_model = models.JobTemplate
|
||||||
search_fields = ('role_field', 'content_type__model')
|
search_fields = ('role_field', 'content_type__model')
|
||||||
|
deprecated = True
|
||||||
|
|
||||||
def get_queryset(self):
|
def get_queryset(self):
|
||||||
po = self.get_parent_object()
|
po = self.get_parent_object()
|
||||||
@@ -2945,6 +3119,14 @@ class WorkflowJobTemplateList(ListCreateAPIView):
|
|||||||
serializer_class = serializers.WorkflowJobTemplateSerializer
|
serializer_class = serializers.WorkflowJobTemplateSerializer
|
||||||
always_allow_superuser = False
|
always_allow_superuser = False
|
||||||
|
|
||||||
|
def check_permissions(self, request):
|
||||||
|
if request.method == 'POST':
|
||||||
|
can_access, messages = request.user.can_access_with_errors(self.model, 'add', request.data)
|
||||||
|
if not can_access:
|
||||||
|
self.permission_denied(request, message=messages)
|
||||||
|
|
||||||
|
super(WorkflowJobTemplateList, self).check_permissions(request)
|
||||||
|
|
||||||
|
|
||||||
class WorkflowJobTemplateDetail(RelatedJobsPreventDeleteMixin, RetrieveUpdateDestroyAPIView):
|
class WorkflowJobTemplateDetail(RelatedJobsPreventDeleteMixin, RetrieveUpdateDestroyAPIView):
|
||||||
model = models.WorkflowJobTemplate
|
model = models.WorkflowJobTemplate
|
||||||
@@ -3154,10 +3336,12 @@ class WorkflowJobTemplateAccessList(ResourceAccessList):
|
|||||||
|
|
||||||
|
|
||||||
class WorkflowJobTemplateObjectRolesList(SubListAPIView):
|
class WorkflowJobTemplateObjectRolesList(SubListAPIView):
|
||||||
|
deprecated = True
|
||||||
model = models.Role
|
model = models.Role
|
||||||
serializer_class = serializers.RoleSerializer
|
serializer_class = serializers.RoleSerializer
|
||||||
parent_model = models.WorkflowJobTemplate
|
parent_model = models.WorkflowJobTemplate
|
||||||
search_fields = ('role_field', 'content_type__model')
|
search_fields = ('role_field', 'content_type__model')
|
||||||
|
deprecated = True
|
||||||
|
|
||||||
def get_queryset(self):
|
def get_queryset(self):
|
||||||
po = self.get_parent_object()
|
po = self.get_parent_object()
|
||||||
@@ -4166,6 +4350,7 @@ class ActivityStreamDetail(RetrieveAPIView):
|
|||||||
|
|
||||||
|
|
||||||
class RoleList(ListAPIView):
|
class RoleList(ListAPIView):
|
||||||
|
deprecated = True
|
||||||
model = models.Role
|
model = models.Role
|
||||||
serializer_class = serializers.RoleSerializer
|
serializer_class = serializers.RoleSerializer
|
||||||
permission_classes = (IsAuthenticated,)
|
permission_classes = (IsAuthenticated,)
|
||||||
@@ -4173,11 +4358,13 @@ class RoleList(ListAPIView):
|
|||||||
|
|
||||||
|
|
||||||
class RoleDetail(RetrieveAPIView):
|
class RoleDetail(RetrieveAPIView):
|
||||||
|
deprecated = True
|
||||||
model = models.Role
|
model = models.Role
|
||||||
serializer_class = serializers.RoleSerializer
|
serializer_class = serializers.RoleSerializer
|
||||||
|
|
||||||
|
|
||||||
class RoleUsersList(SubListAttachDetachAPIView):
|
class RoleUsersList(SubListAttachDetachAPIView):
|
||||||
|
deprecated = True
|
||||||
model = models.User
|
model = models.User
|
||||||
serializer_class = serializers.UserSerializer
|
serializer_class = serializers.UserSerializer
|
||||||
parent_model = models.Role
|
parent_model = models.Role
|
||||||
@@ -4198,7 +4385,15 @@ class RoleUsersList(SubListAttachDetachAPIView):
|
|||||||
user = get_object_or_400(models.User, pk=sub_id)
|
user = get_object_or_400(models.User, pk=sub_id)
|
||||||
role = self.get_parent_object()
|
role = self.get_parent_object()
|
||||||
|
|
||||||
credential_content_type = ContentType.objects.get_for_model(models.Credential)
|
content_types = ContentType.objects.get_for_models(models.Organization, models.Team, models.Credential) # dict of {model: content_type}
|
||||||
|
if not settings.ALLOW_LOCAL_RESOURCE_MANAGEMENT:
|
||||||
|
for model in [models.Organization, models.Team]:
|
||||||
|
ct = content_types[model]
|
||||||
|
if role.content_type == ct and role.role_field in ['member_role', 'admin_role']:
|
||||||
|
data = dict(msg=_(f"Cannot directly modify user membership to {ct.model}. Direct shared resource management disabled"))
|
||||||
|
return Response(data, status=status.HTTP_403_FORBIDDEN)
|
||||||
|
|
||||||
|
credential_content_type = content_types[models.Credential]
|
||||||
if role.content_type == credential_content_type:
|
if role.content_type == credential_content_type:
|
||||||
if 'disassociate' not in request.data and role.content_object.organization and user not in role.content_object.organization.member_role:
|
if 'disassociate' not in request.data and role.content_object.organization and user not in role.content_object.organization.member_role:
|
||||||
data = dict(msg=_("You cannot grant credential access to a user not in the credentials' organization"))
|
data = dict(msg=_("You cannot grant credential access to a user not in the credentials' organization"))
|
||||||
@@ -4212,6 +4407,7 @@ class RoleUsersList(SubListAttachDetachAPIView):
|
|||||||
|
|
||||||
|
|
||||||
class RoleTeamsList(SubListAttachDetachAPIView):
|
class RoleTeamsList(SubListAttachDetachAPIView):
|
||||||
|
deprecated = True
|
||||||
model = models.Team
|
model = models.Team
|
||||||
serializer_class = serializers.TeamSerializer
|
serializer_class = serializers.TeamSerializer
|
||||||
parent_model = models.Role
|
parent_model = models.Role
|
||||||
@@ -4256,10 +4452,12 @@ class RoleTeamsList(SubListAttachDetachAPIView):
|
|||||||
team.member_role.children.remove(role)
|
team.member_role.children.remove(role)
|
||||||
else:
|
else:
|
||||||
team.member_role.children.add(role)
|
team.member_role.children.add(role)
|
||||||
|
|
||||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||||
|
|
||||||
|
|
||||||
class RoleParentsList(SubListAPIView):
|
class RoleParentsList(SubListAPIView):
|
||||||
|
deprecated = True
|
||||||
model = models.Role
|
model = models.Role
|
||||||
serializer_class = serializers.RoleSerializer
|
serializer_class = serializers.RoleSerializer
|
||||||
parent_model = models.Role
|
parent_model = models.Role
|
||||||
@@ -4273,6 +4471,7 @@ class RoleParentsList(SubListAPIView):
|
|||||||
|
|
||||||
|
|
||||||
class RoleChildrenList(SubListAPIView):
|
class RoleChildrenList(SubListAPIView):
|
||||||
|
deprecated = True
|
||||||
model = models.Role
|
model = models.Role
|
||||||
serializer_class = serializers.RoleSerializer
|
serializer_class = serializers.RoleSerializer
|
||||||
parent_model = models.Role
|
parent_model = models.Role
|
||||||
|
|||||||
@@ -48,23 +48,23 @@ class AnalyticsRootView(APIView):
|
|||||||
|
|
||||||
def get(self, request, format=None):
|
def get(self, request, format=None):
|
||||||
data = OrderedDict()
|
data = OrderedDict()
|
||||||
data['authorized'] = reverse('api:analytics_authorized')
|
data['authorized'] = reverse('api:analytics_authorized', request=request)
|
||||||
data['reports'] = reverse('api:analytics_reports_list')
|
data['reports'] = reverse('api:analytics_reports_list', request=request)
|
||||||
data['report_options'] = reverse('api:analytics_report_options_list')
|
data['report_options'] = reverse('api:analytics_report_options_list', request=request)
|
||||||
data['adoption_rate'] = reverse('api:analytics_adoption_rate')
|
data['adoption_rate'] = reverse('api:analytics_adoption_rate', request=request)
|
||||||
data['adoption_rate_options'] = reverse('api:analytics_adoption_rate_options')
|
data['adoption_rate_options'] = reverse('api:analytics_adoption_rate_options', request=request)
|
||||||
data['event_explorer'] = reverse('api:analytics_event_explorer')
|
data['event_explorer'] = reverse('api:analytics_event_explorer', request=request)
|
||||||
data['event_explorer_options'] = reverse('api:analytics_event_explorer_options')
|
data['event_explorer_options'] = reverse('api:analytics_event_explorer_options', request=request)
|
||||||
data['host_explorer'] = reverse('api:analytics_host_explorer')
|
data['host_explorer'] = reverse('api:analytics_host_explorer', request=request)
|
||||||
data['host_explorer_options'] = reverse('api:analytics_host_explorer_options')
|
data['host_explorer_options'] = reverse('api:analytics_host_explorer_options', request=request)
|
||||||
data['job_explorer'] = reverse('api:analytics_job_explorer')
|
data['job_explorer'] = reverse('api:analytics_job_explorer', request=request)
|
||||||
data['job_explorer_options'] = reverse('api:analytics_job_explorer_options')
|
data['job_explorer_options'] = reverse('api:analytics_job_explorer_options', request=request)
|
||||||
data['probe_templates'] = reverse('api:analytics_probe_templates_explorer')
|
data['probe_templates'] = reverse('api:analytics_probe_templates_explorer', request=request)
|
||||||
data['probe_templates_options'] = reverse('api:analytics_probe_templates_options')
|
data['probe_templates_options'] = reverse('api:analytics_probe_templates_options', request=request)
|
||||||
data['probe_template_for_hosts'] = reverse('api:analytics_probe_template_for_hosts_explorer')
|
data['probe_template_for_hosts'] = reverse('api:analytics_probe_template_for_hosts_explorer', request=request)
|
||||||
data['probe_template_for_hosts_options'] = reverse('api:analytics_probe_template_for_hosts_options')
|
data['probe_template_for_hosts_options'] = reverse('api:analytics_probe_template_for_hosts_options', request=request)
|
||||||
data['roi_templates'] = reverse('api:analytics_roi_templates_explorer')
|
data['roi_templates'] = reverse('api:analytics_roi_templates_explorer', request=request)
|
||||||
data['roi_templates_options'] = reverse('api:analytics_roi_templates_options')
|
data['roi_templates_options'] = reverse('api:analytics_roi_templates_options', request=request)
|
||||||
return Response(data)
|
return Response(data)
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -34,6 +34,7 @@ class BulkView(APIView):
|
|||||||
'''List top level resources'''
|
'''List top level resources'''
|
||||||
data = OrderedDict()
|
data = OrderedDict()
|
||||||
data['host_create'] = reverse('api:bulk_host_create', request=request)
|
data['host_create'] = reverse('api:bulk_host_create', request=request)
|
||||||
|
data['host_delete'] = reverse('api:bulk_host_delete', request=request)
|
||||||
data['job_launch'] = reverse('api:bulk_job_launch', request=request)
|
data['job_launch'] = reverse('api:bulk_job_launch', request=request)
|
||||||
return Response(data)
|
return Response(data)
|
||||||
|
|
||||||
@@ -72,3 +73,20 @@ class BulkHostCreateView(GenericAPIView):
|
|||||||
result = serializer.create(serializer.validated_data)
|
result = serializer.create(serializer.validated_data)
|
||||||
return Response(result, status=status.HTTP_201_CREATED)
|
return Response(result, status=status.HTTP_201_CREATED)
|
||||||
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
||||||
|
|
||||||
|
|
||||||
|
class BulkHostDeleteView(GenericAPIView):
|
||||||
|
permission_classes = [IsAuthenticated]
|
||||||
|
model = Host
|
||||||
|
serializer_class = serializers.BulkHostDeleteSerializer
|
||||||
|
allowed_methods = ['GET', 'POST', 'OPTIONS']
|
||||||
|
|
||||||
|
def get(self, request):
|
||||||
|
return Response({"detail": "Bulk delete hosts with this endpoint"}, status=status.HTTP_200_OK)
|
||||||
|
|
||||||
|
def post(self, request):
|
||||||
|
serializer = serializers.BulkHostDeleteSerializer(data=request.data, context={'request': request})
|
||||||
|
if serializer.is_valid():
|
||||||
|
result = serializer.delete(serializer.validated_data)
|
||||||
|
return Response(result, status=status.HTTP_201_CREATED)
|
||||||
|
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
||||||
|
|||||||
@@ -124,10 +124,19 @@ def generate_inventory_yml(instance_obj):
|
|||||||
|
|
||||||
|
|
||||||
def generate_group_vars_all_yml(instance_obj):
|
def generate_group_vars_all_yml(instance_obj):
|
||||||
|
# get peers
|
||||||
peers = []
|
peers = []
|
||||||
for instance in instance_obj.peers.all():
|
for addr in instance_obj.peers.select_related('instance'):
|
||||||
peers.append(dict(host=instance.hostname, port=instance.listener_port))
|
peers.append(dict(address=addr.get_full_address(), protocol=addr.protocol))
|
||||||
all_yaml = render_to_string("instance_install_bundle/group_vars/all.yml", context=dict(instance=instance_obj, peers=peers))
|
context = dict(instance=instance_obj, peers=peers)
|
||||||
|
|
||||||
|
canonical_addr = instance_obj.canonical_address
|
||||||
|
if canonical_addr:
|
||||||
|
context['listener_port'] = canonical_addr.port
|
||||||
|
protocol = canonical_addr.protocol if canonical_addr.protocol != 'wss' else 'ws'
|
||||||
|
context['listener_protocol'] = protocol
|
||||||
|
|
||||||
|
all_yaml = render_to_string("instance_install_bundle/group_vars/all.yml", context=context)
|
||||||
# convert consecutive newlines with a single newline
|
# convert consecutive newlines with a single newline
|
||||||
return re.sub(r'\n+', '\n', all_yaml)
|
return re.sub(r'\n+', '\n', all_yaml)
|
||||||
|
|
||||||
|
|||||||
@@ -152,6 +152,7 @@ class InventoryObjectRolesList(SubListAPIView):
|
|||||||
serializer_class = RoleSerializer
|
serializer_class = RoleSerializer
|
||||||
parent_model = Inventory
|
parent_model = Inventory
|
||||||
search_fields = ('role_field', 'content_type__model')
|
search_fields = ('role_field', 'content_type__model')
|
||||||
|
deprecated = True
|
||||||
|
|
||||||
def get_queryset(self):
|
def get_queryset(self):
|
||||||
po = self.get_parent_object()
|
po = self.get_parent_object()
|
||||||
|
|||||||
@@ -17,7 +17,7 @@ class MeshVisualizer(APIView):
|
|||||||
def get(self, request, format=None):
|
def get(self, request, format=None):
|
||||||
data = {
|
data = {
|
||||||
'nodes': InstanceNodeSerializer(Instance.objects.all(), many=True).data,
|
'nodes': InstanceNodeSerializer(Instance.objects.all(), many=True).data,
|
||||||
'links': InstanceLinkSerializer(InstanceLink.objects.select_related('target', 'source'), many=True).data,
|
'links': InstanceLinkSerializer(InstanceLink.objects.select_related('target__instance', 'source'), many=True).data,
|
||||||
}
|
}
|
||||||
|
|
||||||
return Response(data)
|
return Response(data)
|
||||||
|
|||||||
@@ -53,15 +53,18 @@ from awx.api.serializers import (
|
|||||||
CredentialSerializer,
|
CredentialSerializer,
|
||||||
)
|
)
|
||||||
from awx.api.views.mixin import RelatedJobsPreventDeleteMixin, OrganizationCountsMixin
|
from awx.api.views.mixin import RelatedJobsPreventDeleteMixin, OrganizationCountsMixin
|
||||||
|
from awx.api.views import immutablesharedfields
|
||||||
|
|
||||||
logger = logging.getLogger('awx.api.views.organization')
|
logger = logging.getLogger('awx.api.views.organization')
|
||||||
|
|
||||||
|
|
||||||
|
@immutablesharedfields
|
||||||
class OrganizationList(OrganizationCountsMixin, ListCreateAPIView):
|
class OrganizationList(OrganizationCountsMixin, ListCreateAPIView):
|
||||||
model = Organization
|
model = Organization
|
||||||
serializer_class = OrganizationSerializer
|
serializer_class = OrganizationSerializer
|
||||||
|
|
||||||
|
|
||||||
|
@immutablesharedfields
|
||||||
class OrganizationDetail(RelatedJobsPreventDeleteMixin, RetrieveUpdateDestroyAPIView):
|
class OrganizationDetail(RelatedJobsPreventDeleteMixin, RetrieveUpdateDestroyAPIView):
|
||||||
model = Organization
|
model = Organization
|
||||||
serializer_class = OrganizationSerializer
|
serializer_class = OrganizationSerializer
|
||||||
@@ -104,6 +107,7 @@ class OrganizationInventoriesList(SubListAPIView):
|
|||||||
relationship = 'inventories'
|
relationship = 'inventories'
|
||||||
|
|
||||||
|
|
||||||
|
@immutablesharedfields
|
||||||
class OrganizationUsersList(BaseUsersList):
|
class OrganizationUsersList(BaseUsersList):
|
||||||
model = User
|
model = User
|
||||||
serializer_class = UserSerializer
|
serializer_class = UserSerializer
|
||||||
@@ -112,6 +116,7 @@ class OrganizationUsersList(BaseUsersList):
|
|||||||
ordering = ('username',)
|
ordering = ('username',)
|
||||||
|
|
||||||
|
|
||||||
|
@immutablesharedfields
|
||||||
class OrganizationAdminsList(BaseUsersList):
|
class OrganizationAdminsList(BaseUsersList):
|
||||||
model = User
|
model = User
|
||||||
serializer_class = UserSerializer
|
serializer_class = UserSerializer
|
||||||
@@ -150,6 +155,7 @@ class OrganizationWorkflowJobTemplatesList(SubListCreateAPIView):
|
|||||||
parent_key = 'organization'
|
parent_key = 'organization'
|
||||||
|
|
||||||
|
|
||||||
|
@immutablesharedfields
|
||||||
class OrganizationTeamsList(SubListCreateAttachDetachAPIView):
|
class OrganizationTeamsList(SubListCreateAttachDetachAPIView):
|
||||||
model = Team
|
model = Team
|
||||||
serializer_class = TeamSerializer
|
serializer_class = TeamSerializer
|
||||||
@@ -226,6 +232,7 @@ class OrganizationObjectRolesList(SubListAPIView):
|
|||||||
serializer_class = RoleSerializer
|
serializer_class = RoleSerializer
|
||||||
parent_model = Organization
|
parent_model = Organization
|
||||||
search_fields = ('role_field', 'content_type__model')
|
search_fields = ('role_field', 'content_type__model')
|
||||||
|
deprecated = True
|
||||||
|
|
||||||
def get_queryset(self):
|
def get_queryset(self):
|
||||||
po = self.get_parent_object()
|
po = self.get_parent_object()
|
||||||
|
|||||||
@@ -13,6 +13,7 @@ from django.utils.decorators import method_decorator
|
|||||||
from django.views.decorators.csrf import ensure_csrf_cookie
|
from django.views.decorators.csrf import ensure_csrf_cookie
|
||||||
from django.template.loader import render_to_string
|
from django.template.loader import render_to_string
|
||||||
from django.utils.translation import gettext_lazy as _
|
from django.utils.translation import gettext_lazy as _
|
||||||
|
from django.urls import reverse as django_reverse
|
||||||
|
|
||||||
from rest_framework.permissions import AllowAny, IsAuthenticated
|
from rest_framework.permissions import AllowAny, IsAuthenticated
|
||||||
from rest_framework.response import Response
|
from rest_framework.response import Response
|
||||||
@@ -27,7 +28,7 @@ from awx.main.analytics import all_collectors
|
|||||||
from awx.main.ha import is_ha_environment
|
from awx.main.ha import is_ha_environment
|
||||||
from awx.main.utils import get_awx_version, get_custom_venv_choices
|
from awx.main.utils import get_awx_version, get_custom_venv_choices
|
||||||
from awx.main.utils.licensing import validate_entitlement_manifest
|
from awx.main.utils.licensing import validate_entitlement_manifest
|
||||||
from awx.api.versioning import reverse, drf_reverse
|
from awx.api.versioning import URLPathVersioning, is_optional_api_urlpattern_prefix_request, reverse, drf_reverse
|
||||||
from awx.main.constants import PRIVILEGE_ESCALATION_METHODS
|
from awx.main.constants import PRIVILEGE_ESCALATION_METHODS
|
||||||
from awx.main.models import Project, Organization, Instance, InstanceGroup, JobTemplate
|
from awx.main.models import Project, Organization, Instance, InstanceGroup, JobTemplate
|
||||||
from awx.main.utils import set_environ
|
from awx.main.utils import set_environ
|
||||||
@@ -39,19 +40,19 @@ logger = logging.getLogger('awx.api.views.root')
|
|||||||
class ApiRootView(APIView):
|
class ApiRootView(APIView):
|
||||||
permission_classes = (AllowAny,)
|
permission_classes = (AllowAny,)
|
||||||
name = _('REST API')
|
name = _('REST API')
|
||||||
versioning_class = None
|
versioning_class = URLPathVersioning
|
||||||
swagger_topic = 'Versioning'
|
swagger_topic = 'Versioning'
|
||||||
|
|
||||||
@method_decorator(ensure_csrf_cookie)
|
@method_decorator(ensure_csrf_cookie)
|
||||||
def get(self, request, format=None):
|
def get(self, request, format=None):
|
||||||
'''List supported API versions'''
|
'''List supported API versions'''
|
||||||
|
v2 = reverse('api:api_v2_root_view', request=request, kwargs={'version': 'v2'})
|
||||||
v2 = reverse('api:api_v2_root_view', kwargs={'version': 'v2'})
|
|
||||||
data = OrderedDict()
|
data = OrderedDict()
|
||||||
data['description'] = _('AWX REST API')
|
data['description'] = _('AWX REST API')
|
||||||
data['current_version'] = v2
|
data['current_version'] = v2
|
||||||
data['available_versions'] = dict(v2=v2)
|
data['available_versions'] = dict(v2=v2)
|
||||||
data['oauth2'] = drf_reverse('api:oauth_authorization_root_view')
|
if not is_optional_api_urlpattern_prefix_request(request):
|
||||||
|
data['oauth2'] = drf_reverse('api:oauth_authorization_root_view')
|
||||||
data['custom_logo'] = settings.CUSTOM_LOGO
|
data['custom_logo'] = settings.CUSTOM_LOGO
|
||||||
data['custom_login_info'] = settings.CUSTOM_LOGIN_INFO
|
data['custom_login_info'] = settings.CUSTOM_LOGIN_INFO
|
||||||
data['login_redirect_override'] = settings.LOGIN_REDIRECT_OVERRIDE
|
data['login_redirect_override'] = settings.LOGIN_REDIRECT_OVERRIDE
|
||||||
@@ -84,6 +85,7 @@ class ApiVersionRootView(APIView):
|
|||||||
data['ping'] = reverse('api:api_v2_ping_view', request=request)
|
data['ping'] = reverse('api:api_v2_ping_view', request=request)
|
||||||
data['instances'] = reverse('api:instance_list', request=request)
|
data['instances'] = reverse('api:instance_list', request=request)
|
||||||
data['instance_groups'] = reverse('api:instance_group_list', request=request)
|
data['instance_groups'] = reverse('api:instance_group_list', request=request)
|
||||||
|
data['receptor_addresses'] = reverse('api:receptor_addresses_list', request=request)
|
||||||
data['config'] = reverse('api:api_v2_config_view', request=request)
|
data['config'] = reverse('api:api_v2_config_view', request=request)
|
||||||
data['settings'] = reverse('api:setting_category_list', request=request)
|
data['settings'] = reverse('api:setting_category_list', request=request)
|
||||||
data['me'] = reverse('api:user_me_list', request=request)
|
data['me'] = reverse('api:user_me_list', request=request)
|
||||||
@@ -129,6 +131,10 @@ class ApiVersionRootView(APIView):
|
|||||||
data['mesh_visualizer'] = reverse('api:mesh_visualizer_view', request=request)
|
data['mesh_visualizer'] = reverse('api:mesh_visualizer_view', request=request)
|
||||||
data['bulk'] = reverse('api:bulk', request=request)
|
data['bulk'] = reverse('api:bulk', request=request)
|
||||||
data['analytics'] = reverse('api:analytics_root_view', request=request)
|
data['analytics'] = reverse('api:analytics_root_view', request=request)
|
||||||
|
data['service_index'] = django_reverse('service-index-root')
|
||||||
|
data['role_definitions'] = django_reverse('roledefinition-list')
|
||||||
|
data['role_user_assignments'] = django_reverse('roleuserassignment-list')
|
||||||
|
data['role_team_assignments'] = django_reverse('roleteamassignment-list')
|
||||||
return Response(data)
|
return Response(data)
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
from hashlib import sha1
|
from hashlib import sha1, sha256
|
||||||
import hmac
|
import hmac
|
||||||
import logging
|
import logging
|
||||||
import urllib.parse
|
import urllib.parse
|
||||||
@@ -99,14 +99,31 @@ class WebhookReceiverBase(APIView):
|
|||||||
def get_signature(self):
|
def get_signature(self):
|
||||||
raise NotImplementedError
|
raise NotImplementedError
|
||||||
|
|
||||||
|
def must_check_signature(self):
|
||||||
|
return True
|
||||||
|
|
||||||
|
def is_ignored_request(self):
|
||||||
|
return False
|
||||||
|
|
||||||
def check_signature(self, obj):
|
def check_signature(self, obj):
|
||||||
if not obj.webhook_key:
|
if not obj.webhook_key:
|
||||||
raise PermissionDenied
|
raise PermissionDenied
|
||||||
|
if not self.must_check_signature():
|
||||||
|
logger.debug("skipping signature validation")
|
||||||
|
return
|
||||||
|
|
||||||
mac = hmac.new(force_bytes(obj.webhook_key), msg=force_bytes(self.request.body), digestmod=sha1)
|
hash_alg, expected_digest = self.get_signature()
|
||||||
logger.debug("header signature: %s", self.get_signature())
|
if hash_alg == 'sha1':
|
||||||
|
mac = hmac.new(force_bytes(obj.webhook_key), msg=force_bytes(self.request.body), digestmod=sha1)
|
||||||
|
elif hash_alg == 'sha256':
|
||||||
|
mac = hmac.new(force_bytes(obj.webhook_key), msg=force_bytes(self.request.body), digestmod=sha256)
|
||||||
|
else:
|
||||||
|
logger.debug("Unsupported signature type, supported: sha1, sha256, received: {}".format(hash_alg))
|
||||||
|
raise PermissionDenied
|
||||||
|
|
||||||
|
logger.debug("header signature: %s", expected_digest)
|
||||||
logger.debug("calculated signature: %s", force_bytes(mac.hexdigest()))
|
logger.debug("calculated signature: %s", force_bytes(mac.hexdigest()))
|
||||||
if not hmac.compare_digest(force_bytes(mac.hexdigest()), self.get_signature()):
|
if not hmac.compare_digest(force_bytes(mac.hexdigest()), expected_digest):
|
||||||
raise PermissionDenied
|
raise PermissionDenied
|
||||||
|
|
||||||
@csrf_exempt
|
@csrf_exempt
|
||||||
@@ -118,6 +135,10 @@ class WebhookReceiverBase(APIView):
|
|||||||
obj = self.get_object()
|
obj = self.get_object()
|
||||||
self.check_signature(obj)
|
self.check_signature(obj)
|
||||||
|
|
||||||
|
if self.is_ignored_request():
|
||||||
|
# This was an ignored request type (e.g. ping), don't act on it
|
||||||
|
return Response({'message': _("Webhook ignored")}, status=status.HTTP_200_OK)
|
||||||
|
|
||||||
event_type = self.get_event_type()
|
event_type = self.get_event_type()
|
||||||
event_guid = self.get_event_guid()
|
event_guid = self.get_event_guid()
|
||||||
event_ref = self.get_event_ref()
|
event_ref = self.get_event_ref()
|
||||||
@@ -186,7 +207,7 @@ class GithubWebhookReceiver(WebhookReceiverBase):
|
|||||||
if hash_alg != 'sha1':
|
if hash_alg != 'sha1':
|
||||||
logger.debug("Unsupported signature type, expected: sha1, received: {}".format(hash_alg))
|
logger.debug("Unsupported signature type, expected: sha1, received: {}".format(hash_alg))
|
||||||
raise PermissionDenied
|
raise PermissionDenied
|
||||||
return force_bytes(signature)
|
return hash_alg, force_bytes(signature)
|
||||||
|
|
||||||
|
|
||||||
class GitlabWebhookReceiver(WebhookReceiverBase):
|
class GitlabWebhookReceiver(WebhookReceiverBase):
|
||||||
@@ -214,15 +235,73 @@ class GitlabWebhookReceiver(WebhookReceiverBase):
|
|||||||
|
|
||||||
return "{}://{}/api/v4/projects/{}/statuses/{}".format(parsed.scheme, parsed.netloc, project['id'], self.get_event_ref())
|
return "{}://{}/api/v4/projects/{}/statuses/{}".format(parsed.scheme, parsed.netloc, project['id'], self.get_event_ref())
|
||||||
|
|
||||||
def get_signature(self):
|
|
||||||
return force_bytes(self.request.META.get('HTTP_X_GITLAB_TOKEN') or '')
|
|
||||||
|
|
||||||
def check_signature(self, obj):
|
def check_signature(self, obj):
|
||||||
if not obj.webhook_key:
|
if not obj.webhook_key:
|
||||||
raise PermissionDenied
|
raise PermissionDenied
|
||||||
|
|
||||||
|
token_from_request = force_bytes(self.request.META.get('HTTP_X_GITLAB_TOKEN') or '')
|
||||||
|
|
||||||
# GitLab only returns the secret token, not an hmac hash. Use
|
# GitLab only returns the secret token, not an hmac hash. Use
|
||||||
# the hmac `compare_digest` helper function to prevent timing
|
# the hmac `compare_digest` helper function to prevent timing
|
||||||
# analysis by attackers.
|
# analysis by attackers.
|
||||||
if not hmac.compare_digest(force_bytes(obj.webhook_key), self.get_signature()):
|
if not hmac.compare_digest(force_bytes(obj.webhook_key), token_from_request):
|
||||||
raise PermissionDenied
|
raise PermissionDenied
|
||||||
|
|
||||||
|
|
||||||
|
class BitbucketDcWebhookReceiver(WebhookReceiverBase):
|
||||||
|
service = 'bitbucket_dc'
|
||||||
|
|
||||||
|
ref_keys = {
|
||||||
|
'repo:refs_changed': 'changes.0.toHash',
|
||||||
|
'mirror:repo_synchronized': 'changes.0.toHash',
|
||||||
|
'pr:opened': 'pullRequest.toRef.latestCommit',
|
||||||
|
'pr:from_ref_updated': 'pullRequest.toRef.latestCommit',
|
||||||
|
'pr:modified': 'pullRequest.toRef.latestCommit',
|
||||||
|
}
|
||||||
|
|
||||||
|
def get_event_type(self):
|
||||||
|
return self.request.META.get('HTTP_X_EVENT_KEY')
|
||||||
|
|
||||||
|
def get_event_guid(self):
|
||||||
|
return self.request.META.get('HTTP_X_REQUEST_ID')
|
||||||
|
|
||||||
|
def get_event_status_api(self):
|
||||||
|
# https://<bitbucket-base-url>/rest/build-status/1.0/commits/<commit-hash>
|
||||||
|
if self.get_event_type() not in self.ref_keys.keys():
|
||||||
|
return
|
||||||
|
if self.get_event_ref() is None:
|
||||||
|
return
|
||||||
|
any_url = None
|
||||||
|
if 'actor' in self.request.data:
|
||||||
|
any_url = self.request.data['actor'].get('links', {}).get('self')
|
||||||
|
if any_url is None and 'repository' in self.request.data:
|
||||||
|
any_url = self.request.data['repository'].get('links', {}).get('self')
|
||||||
|
if any_url is None:
|
||||||
|
return
|
||||||
|
any_url = any_url[0].get('href')
|
||||||
|
if any_url is None:
|
||||||
|
return
|
||||||
|
parsed = urllib.parse.urlparse(any_url)
|
||||||
|
|
||||||
|
return "{}://{}/rest/build-status/1.0/commits/{}".format(parsed.scheme, parsed.netloc, self.get_event_ref())
|
||||||
|
|
||||||
|
def is_ignored_request(self):
|
||||||
|
return self.get_event_type() not in [
|
||||||
|
'repo:refs_changed',
|
||||||
|
'mirror:repo_synchronized',
|
||||||
|
'pr:opened',
|
||||||
|
'pr:from_ref_updated',
|
||||||
|
'pr:modified',
|
||||||
|
]
|
||||||
|
|
||||||
|
def must_check_signature(self):
|
||||||
|
# Bitbucket does not sign ping requests...
|
||||||
|
return self.get_event_type() != 'diagnostics:ping'
|
||||||
|
|
||||||
|
def get_signature(self):
|
||||||
|
header_sig = self.request.META.get('HTTP_X_HUB_SIGNATURE')
|
||||||
|
if not header_sig:
|
||||||
|
logger.debug("Expected signature missing from header key HTTP_X_HUB_SIGNATURE")
|
||||||
|
raise PermissionDenied
|
||||||
|
hash_alg, signature = header_sig.split('=')
|
||||||
|
return hash_alg, force_bytes(signature)
|
||||||
|
|||||||
@@ -55,6 +55,7 @@ register(
|
|||||||
# Optional; category_slug will be slugified version of category if not
|
# Optional; category_slug will be slugified version of category if not
|
||||||
# explicitly provided.
|
# explicitly provided.
|
||||||
category_slug='cows',
|
category_slug='cows',
|
||||||
|
hidden=True,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -61,6 +61,10 @@ class StringListBooleanField(ListField):
|
|||||||
|
|
||||||
def to_representation(self, value):
|
def to_representation(self, value):
|
||||||
try:
|
try:
|
||||||
|
if isinstance(value, str):
|
||||||
|
# https://github.com/encode/django-rest-framework/commit/a180bde0fd965915718b070932418cabc831cee1
|
||||||
|
# DRF changed truthy and falsy lists to be capitalized
|
||||||
|
value = value.lower()
|
||||||
if isinstance(value, (list, tuple)):
|
if isinstance(value, (list, tuple)):
|
||||||
return super(StringListBooleanField, self).to_representation(value)
|
return super(StringListBooleanField, self).to_representation(value)
|
||||||
elif value in BooleanField.TRUE_VALUES:
|
elif value in BooleanField.TRUE_VALUES:
|
||||||
@@ -78,6 +82,8 @@ class StringListBooleanField(ListField):
|
|||||||
|
|
||||||
def to_internal_value(self, data):
|
def to_internal_value(self, data):
|
||||||
try:
|
try:
|
||||||
|
if isinstance(data, str):
|
||||||
|
data = data.lower()
|
||||||
if isinstance(data, (list, tuple)):
|
if isinstance(data, (list, tuple)):
|
||||||
return super(StringListBooleanField, self).to_internal_value(data)
|
return super(StringListBooleanField, self).to_internal_value(data)
|
||||||
elif data in BooleanField.TRUE_VALUES:
|
elif data in BooleanField.TRUE_VALUES:
|
||||||
|
|||||||
@@ -7,8 +7,10 @@ import json
|
|||||||
# Django
|
# Django
|
||||||
from django.db import models
|
from django.db import models
|
||||||
|
|
||||||
|
from ansible_base.lib.utils.models import prevent_search
|
||||||
|
|
||||||
# AWX
|
# AWX
|
||||||
from awx.main.models.base import CreatedModifiedModel, prevent_search
|
from awx.main.models.base import CreatedModifiedModel
|
||||||
from awx.main.utils import encrypt_field
|
from awx.main.utils import encrypt_field
|
||||||
from awx.conf import settings_registry
|
from awx.conf import settings_registry
|
||||||
|
|
||||||
|
|||||||
@@ -127,6 +127,8 @@ class SettingsRegistry(object):
|
|||||||
encrypted = bool(field_kwargs.pop('encrypted', False))
|
encrypted = bool(field_kwargs.pop('encrypted', False))
|
||||||
defined_in_file = bool(field_kwargs.pop('defined_in_file', False))
|
defined_in_file = bool(field_kwargs.pop('defined_in_file', False))
|
||||||
unit = field_kwargs.pop('unit', None)
|
unit = field_kwargs.pop('unit', None)
|
||||||
|
hidden = field_kwargs.pop('hidden', False)
|
||||||
|
warning_text = field_kwargs.pop('warning_text', None)
|
||||||
if getattr(field_kwargs.get('child', None), 'source', None) is not None:
|
if getattr(field_kwargs.get('child', None), 'source', None) is not None:
|
||||||
field_kwargs['child'].source = None
|
field_kwargs['child'].source = None
|
||||||
field_instance = field_class(**field_kwargs)
|
field_instance = field_class(**field_kwargs)
|
||||||
@@ -134,12 +136,14 @@ class SettingsRegistry(object):
|
|||||||
field_instance.category = category
|
field_instance.category = category
|
||||||
field_instance.depends_on = depends_on
|
field_instance.depends_on = depends_on
|
||||||
field_instance.unit = unit
|
field_instance.unit = unit
|
||||||
|
field_instance.hidden = hidden
|
||||||
if placeholder is not empty:
|
if placeholder is not empty:
|
||||||
field_instance.placeholder = placeholder
|
field_instance.placeholder = placeholder
|
||||||
field_instance.defined_in_file = defined_in_file
|
field_instance.defined_in_file = defined_in_file
|
||||||
if field_instance.defined_in_file:
|
if field_instance.defined_in_file:
|
||||||
field_instance.help_text = str(_('This value has been set manually in a settings file.')) + '\n\n' + str(field_instance.help_text)
|
field_instance.help_text = str(_('This value has been set manually in a settings file.')) + '\n\n' + str(field_instance.help_text)
|
||||||
field_instance.encrypted = encrypted
|
field_instance.encrypted = encrypted
|
||||||
|
field_instance.warning_text = warning_text
|
||||||
original_field_instance = field_instance
|
original_field_instance = field_instance
|
||||||
if field_class != original_field_class:
|
if field_class != original_field_class:
|
||||||
original_field_instance = original_field_class(**field_kwargs)
|
original_field_instance = original_field_class(**field_kwargs)
|
||||||
|
|||||||
@@ -1,6 +1,7 @@
|
|||||||
# Python
|
# Python
|
||||||
import contextlib
|
import contextlib
|
||||||
import logging
|
import logging
|
||||||
|
import psycopg
|
||||||
import threading
|
import threading
|
||||||
import time
|
import time
|
||||||
import os
|
import os
|
||||||
@@ -13,7 +14,7 @@ from django.conf import settings, UserSettingsHolder
|
|||||||
from django.core.cache import cache as django_cache
|
from django.core.cache import cache as django_cache
|
||||||
from django.core.exceptions import ImproperlyConfigured, SynchronousOnlyOperation
|
from django.core.exceptions import ImproperlyConfigured, SynchronousOnlyOperation
|
||||||
from django.db import transaction, connection
|
from django.db import transaction, connection
|
||||||
from django.db.utils import Error as DBError, ProgrammingError
|
from django.db.utils import DatabaseError, ProgrammingError
|
||||||
from django.utils.functional import cached_property
|
from django.utils.functional import cached_property
|
||||||
|
|
||||||
# Django REST Framework
|
# Django REST Framework
|
||||||
@@ -80,18 +81,26 @@ def _ctit_db_wrapper(trans_safe=False):
|
|||||||
logger.debug('Obtaining database settings in spite of broken transaction.')
|
logger.debug('Obtaining database settings in spite of broken transaction.')
|
||||||
transaction.set_rollback(False)
|
transaction.set_rollback(False)
|
||||||
yield
|
yield
|
||||||
except DBError as exc:
|
except ProgrammingError as e:
|
||||||
|
# Exception raised for programming errors
|
||||||
|
# Examples may be table not found or already exists,
|
||||||
|
# this generally means we can't fetch Tower configuration
|
||||||
|
# because the database hasn't actually finished migrating yet;
|
||||||
|
# this is usually a sign that a service in a container (such as ws_broadcast)
|
||||||
|
# has come up *before* the database has finished migrating, and
|
||||||
|
# especially that the conf.settings table doesn't exist yet
|
||||||
|
# syntax error in the SQL statement, wrong number of parameters specified, etc.
|
||||||
if trans_safe:
|
if trans_safe:
|
||||||
level = logger.warning
|
logger.debug(f'Database settings are not available, using defaults. error: {str(e)}')
|
||||||
if isinstance(exc, ProgrammingError):
|
else:
|
||||||
if 'relation' in str(exc) and 'does not exist' in str(exc):
|
logger.exception('Error modifying something related to database settings.')
|
||||||
# this generally means we can't fetch Tower configuration
|
except DatabaseError as e:
|
||||||
# because the database hasn't actually finished migrating yet;
|
if trans_safe:
|
||||||
# this is usually a sign that a service in a container (such as ws_broadcast)
|
cause = e.__cause__
|
||||||
# has come up *before* the database has finished migrating, and
|
if cause and hasattr(cause, 'sqlstate'):
|
||||||
# especially that the conf.settings table doesn't exist yet
|
sqlstate = cause.sqlstate
|
||||||
level = logger.debug
|
sqlstate_str = psycopg.errors.lookup(sqlstate)
|
||||||
level(f'Database settings are not available, using defaults. error: {str(exc)}')
|
logger.error('SQL Error state: {} - {}'.format(sqlstate, sqlstate_str))
|
||||||
else:
|
else:
|
||||||
logger.exception('Error modifying something related to database settings.')
|
logger.exception('Error modifying something related to database settings.')
|
||||||
finally:
|
finally:
|
||||||
|
|||||||
@@ -130,9 +130,9 @@ def test_default_setting(settings, mocker):
|
|||||||
settings.registry.register('AWX_SOME_SETTING', field_class=fields.CharField, category=_('System'), category_slug='system', default='DEFAULT')
|
settings.registry.register('AWX_SOME_SETTING', field_class=fields.CharField, category=_('System'), category_slug='system', default='DEFAULT')
|
||||||
|
|
||||||
settings_to_cache = mocker.Mock(**{'order_by.return_value': []})
|
settings_to_cache = mocker.Mock(**{'order_by.return_value': []})
|
||||||
with mocker.patch('awx.conf.models.Setting.objects.filter', return_value=settings_to_cache):
|
mocker.patch('awx.conf.models.Setting.objects.filter', return_value=settings_to_cache)
|
||||||
assert settings.AWX_SOME_SETTING == 'DEFAULT'
|
assert settings.AWX_SOME_SETTING == 'DEFAULT'
|
||||||
assert settings.cache.get('AWX_SOME_SETTING') == 'DEFAULT'
|
assert settings.cache.get('AWX_SOME_SETTING') == 'DEFAULT'
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.defined_in_file(AWX_SOME_SETTING='DEFAULT')
|
@pytest.mark.defined_in_file(AWX_SOME_SETTING='DEFAULT')
|
||||||
@@ -146,9 +146,9 @@ def test_setting_is_not_from_setting_file(settings, mocker):
|
|||||||
settings.registry.register('AWX_SOME_SETTING', field_class=fields.CharField, category=_('System'), category_slug='system', default='DEFAULT')
|
settings.registry.register('AWX_SOME_SETTING', field_class=fields.CharField, category=_('System'), category_slug='system', default='DEFAULT')
|
||||||
|
|
||||||
settings_to_cache = mocker.Mock(**{'order_by.return_value': []})
|
settings_to_cache = mocker.Mock(**{'order_by.return_value': []})
|
||||||
with mocker.patch('awx.conf.models.Setting.objects.filter', return_value=settings_to_cache):
|
mocker.patch('awx.conf.models.Setting.objects.filter', return_value=settings_to_cache)
|
||||||
assert settings.AWX_SOME_SETTING == 'DEFAULT'
|
assert settings.AWX_SOME_SETTING == 'DEFAULT'
|
||||||
assert settings.registry.get_setting_field('AWX_SOME_SETTING').defined_in_file is False
|
assert settings.registry.get_setting_field('AWX_SOME_SETTING').defined_in_file is False
|
||||||
|
|
||||||
|
|
||||||
def test_empty_setting(settings, mocker):
|
def test_empty_setting(settings, mocker):
|
||||||
@@ -156,10 +156,10 @@ def test_empty_setting(settings, mocker):
|
|||||||
settings.registry.register('AWX_SOME_SETTING', field_class=fields.CharField, category=_('System'), category_slug='system')
|
settings.registry.register('AWX_SOME_SETTING', field_class=fields.CharField, category=_('System'), category_slug='system')
|
||||||
|
|
||||||
mocks = mocker.Mock(**{'order_by.return_value': mocker.Mock(**{'__iter__': lambda self: iter([]), 'first.return_value': None})})
|
mocks = mocker.Mock(**{'order_by.return_value': mocker.Mock(**{'__iter__': lambda self: iter([]), 'first.return_value': None})})
|
||||||
with mocker.patch('awx.conf.models.Setting.objects.filter', return_value=mocks):
|
mocker.patch('awx.conf.models.Setting.objects.filter', return_value=mocks)
|
||||||
with pytest.raises(AttributeError):
|
with pytest.raises(AttributeError):
|
||||||
settings.AWX_SOME_SETTING
|
settings.AWX_SOME_SETTING
|
||||||
assert settings.cache.get('AWX_SOME_SETTING') == SETTING_CACHE_NOTSET
|
assert settings.cache.get('AWX_SOME_SETTING') == SETTING_CACHE_NOTSET
|
||||||
|
|
||||||
|
|
||||||
def test_setting_from_db(settings, mocker):
|
def test_setting_from_db(settings, mocker):
|
||||||
@@ -168,9 +168,9 @@ def test_setting_from_db(settings, mocker):
|
|||||||
|
|
||||||
setting_from_db = mocker.Mock(key='AWX_SOME_SETTING', value='FROM_DB')
|
setting_from_db = mocker.Mock(key='AWX_SOME_SETTING', value='FROM_DB')
|
||||||
mocks = mocker.Mock(**{'order_by.return_value': mocker.Mock(**{'__iter__': lambda self: iter([setting_from_db]), 'first.return_value': setting_from_db})})
|
mocks = mocker.Mock(**{'order_by.return_value': mocker.Mock(**{'__iter__': lambda self: iter([setting_from_db]), 'first.return_value': setting_from_db})})
|
||||||
with mocker.patch('awx.conf.models.Setting.objects.filter', return_value=mocks):
|
mocker.patch('awx.conf.models.Setting.objects.filter', return_value=mocks)
|
||||||
assert settings.AWX_SOME_SETTING == 'FROM_DB'
|
assert settings.AWX_SOME_SETTING == 'FROM_DB'
|
||||||
assert settings.cache.get('AWX_SOME_SETTING') == 'FROM_DB'
|
assert settings.cache.get('AWX_SOME_SETTING') == 'FROM_DB'
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.defined_in_file(AWX_SOME_SETTING='DEFAULT')
|
@pytest.mark.defined_in_file(AWX_SOME_SETTING='DEFAULT')
|
||||||
@@ -205,8 +205,8 @@ def test_db_setting_update(settings, mocker):
|
|||||||
|
|
||||||
existing_setting = mocker.Mock(key='AWX_SOME_SETTING', value='FROM_DB')
|
existing_setting = mocker.Mock(key='AWX_SOME_SETTING', value='FROM_DB')
|
||||||
setting_list = mocker.Mock(**{'order_by.return_value.first.return_value': existing_setting})
|
setting_list = mocker.Mock(**{'order_by.return_value.first.return_value': existing_setting})
|
||||||
with mocker.patch('awx.conf.models.Setting.objects.filter', return_value=setting_list):
|
mocker.patch('awx.conf.models.Setting.objects.filter', return_value=setting_list)
|
||||||
settings.AWX_SOME_SETTING = 'NEW-VALUE'
|
settings.AWX_SOME_SETTING = 'NEW-VALUE'
|
||||||
|
|
||||||
assert existing_setting.value == 'NEW-VALUE'
|
assert existing_setting.value == 'NEW-VALUE'
|
||||||
existing_setting.save.assert_called_with(update_fields=['value'])
|
existing_setting.save.assert_called_with(update_fields=['value'])
|
||||||
@@ -217,8 +217,8 @@ def test_db_setting_deletion(settings, mocker):
|
|||||||
settings.registry.register('AWX_SOME_SETTING', field_class=fields.CharField, category=_('System'), category_slug='system')
|
settings.registry.register('AWX_SOME_SETTING', field_class=fields.CharField, category=_('System'), category_slug='system')
|
||||||
|
|
||||||
existing_setting = mocker.Mock(key='AWX_SOME_SETTING', value='FROM_DB')
|
existing_setting = mocker.Mock(key='AWX_SOME_SETTING', value='FROM_DB')
|
||||||
with mocker.patch('awx.conf.models.Setting.objects.filter', return_value=[existing_setting]):
|
mocker.patch('awx.conf.models.Setting.objects.filter', return_value=[existing_setting])
|
||||||
del settings.AWX_SOME_SETTING
|
del settings.AWX_SOME_SETTING
|
||||||
|
|
||||||
assert existing_setting.delete.call_count == 1
|
assert existing_setting.delete.call_count == 1
|
||||||
|
|
||||||
@@ -283,10 +283,10 @@ def test_sensitive_cache_data_is_encrypted(settings, mocker):
|
|||||||
# use its primary key as part of the encryption key
|
# use its primary key as part of the encryption key
|
||||||
setting_from_db = mocker.Mock(pk=123, key='AWX_ENCRYPTED', value='SECRET!')
|
setting_from_db = mocker.Mock(pk=123, key='AWX_ENCRYPTED', value='SECRET!')
|
||||||
mocks = mocker.Mock(**{'order_by.return_value': mocker.Mock(**{'__iter__': lambda self: iter([setting_from_db]), 'first.return_value': setting_from_db})})
|
mocks = mocker.Mock(**{'order_by.return_value': mocker.Mock(**{'__iter__': lambda self: iter([setting_from_db]), 'first.return_value': setting_from_db})})
|
||||||
with mocker.patch('awx.conf.models.Setting.objects.filter', return_value=mocks):
|
mocker.patch('awx.conf.models.Setting.objects.filter', return_value=mocks)
|
||||||
cache.set('AWX_ENCRYPTED', 'SECRET!')
|
cache.set('AWX_ENCRYPTED', 'SECRET!')
|
||||||
assert cache.get('AWX_ENCRYPTED') == 'SECRET!'
|
assert cache.get('AWX_ENCRYPTED') == 'SECRET!'
|
||||||
assert native_cache.get('AWX_ENCRYPTED') == 'FRPERG!'
|
assert native_cache.get('AWX_ENCRYPTED') == 'FRPERG!'
|
||||||
|
|
||||||
|
|
||||||
def test_readonly_sensitive_cache_data_is_encrypted(settings):
|
def test_readonly_sensitive_cache_data_is_encrypted(settings):
|
||||||
|
|||||||
@@ -20,11 +20,15 @@ from rest_framework.exceptions import ParseError, PermissionDenied
|
|||||||
# Django OAuth Toolkit
|
# Django OAuth Toolkit
|
||||||
from awx.main.models.oauth import OAuth2Application, OAuth2AccessToken
|
from awx.main.models.oauth import OAuth2Application, OAuth2AccessToken
|
||||||
|
|
||||||
|
# django-ansible-base
|
||||||
|
from ansible_base.lib.utils.validation import to_python_boolean
|
||||||
|
from ansible_base.rbac.models import RoleEvaluation
|
||||||
|
from ansible_base.rbac import permission_registry
|
||||||
|
|
||||||
# AWX
|
# AWX
|
||||||
from awx.main.utils import (
|
from awx.main.utils import (
|
||||||
get_object_or_400,
|
get_object_or_400,
|
||||||
get_pk_from_dict,
|
get_pk_from_dict,
|
||||||
to_python_boolean,
|
|
||||||
get_licenser,
|
get_licenser,
|
||||||
)
|
)
|
||||||
from awx.main.models import (
|
from awx.main.models import (
|
||||||
@@ -56,6 +60,7 @@ from awx.main.models import (
|
|||||||
Project,
|
Project,
|
||||||
ProjectUpdate,
|
ProjectUpdate,
|
||||||
ProjectUpdateEvent,
|
ProjectUpdateEvent,
|
||||||
|
ReceptorAddress,
|
||||||
Role,
|
Role,
|
||||||
Schedule,
|
Schedule,
|
||||||
SystemJob,
|
SystemJob,
|
||||||
@@ -70,8 +75,6 @@ from awx.main.models import (
|
|||||||
WorkflowJobTemplateNode,
|
WorkflowJobTemplateNode,
|
||||||
WorkflowApproval,
|
WorkflowApproval,
|
||||||
WorkflowApprovalTemplate,
|
WorkflowApprovalTemplate,
|
||||||
ROLE_SINGLETON_SYSTEM_ADMINISTRATOR,
|
|
||||||
ROLE_SINGLETON_SYSTEM_AUDITOR,
|
|
||||||
)
|
)
|
||||||
from awx.main.models.mixins import ResourceMixin
|
from awx.main.models.mixins import ResourceMixin
|
||||||
|
|
||||||
@@ -79,7 +82,6 @@ __all__ = [
|
|||||||
'get_user_queryset',
|
'get_user_queryset',
|
||||||
'check_user_access',
|
'check_user_access',
|
||||||
'check_user_access_with_errors',
|
'check_user_access_with_errors',
|
||||||
'user_accessible_objects',
|
|
||||||
'consumer_access',
|
'consumer_access',
|
||||||
]
|
]
|
||||||
|
|
||||||
@@ -136,10 +138,6 @@ def register_access(model_class, access_class):
|
|||||||
access_registry[model_class] = access_class
|
access_registry[model_class] = access_class
|
||||||
|
|
||||||
|
|
||||||
def user_accessible_objects(user, role_name):
|
|
||||||
return ResourceMixin._accessible_objects(User, user, role_name)
|
|
||||||
|
|
||||||
|
|
||||||
def get_user_queryset(user, model_class):
|
def get_user_queryset(user, model_class):
|
||||||
"""
|
"""
|
||||||
Return a queryset for the given model_class containing only the instances
|
Return a queryset for the given model_class containing only the instances
|
||||||
@@ -267,7 +265,11 @@ class BaseAccess(object):
|
|||||||
return self.can_change(obj, data)
|
return self.can_change(obj, data)
|
||||||
|
|
||||||
def can_delete(self, obj):
|
def can_delete(self, obj):
|
||||||
return self.user.is_superuser
|
if self.user.is_superuser:
|
||||||
|
return True
|
||||||
|
if obj._meta.model_name in [cls._meta.model_name for cls in permission_registry.all_registered_models]:
|
||||||
|
return self.user.has_obj_perm(obj, 'delete')
|
||||||
|
return False
|
||||||
|
|
||||||
def can_copy(self, obj):
|
def can_copy(self, obj):
|
||||||
return self.can_add({'reference_obj': obj})
|
return self.can_add({'reference_obj': obj})
|
||||||
@@ -596,7 +598,7 @@ class InstanceGroupAccess(BaseAccess):
|
|||||||
- a superuser
|
- a superuser
|
||||||
- admin role on the Instance group
|
- admin role on the Instance group
|
||||||
I can add/delete Instance Groups:
|
I can add/delete Instance Groups:
|
||||||
- a superuser(system administrator)
|
- a superuser(system administrator), because these are not org-scoped
|
||||||
I can use Instance Groups when I have:
|
I can use Instance Groups when I have:
|
||||||
- use_role on the instance group
|
- use_role on the instance group
|
||||||
"""
|
"""
|
||||||
@@ -625,7 +627,7 @@ class InstanceGroupAccess(BaseAccess):
|
|||||||
def can_delete(self, obj):
|
def can_delete(self, obj):
|
||||||
if obj.name in [settings.DEFAULT_EXECUTION_QUEUE_NAME, settings.DEFAULT_CONTROL_PLANE_QUEUE_NAME]:
|
if obj.name in [settings.DEFAULT_EXECUTION_QUEUE_NAME, settings.DEFAULT_CONTROL_PLANE_QUEUE_NAME]:
|
||||||
return False
|
return False
|
||||||
return self.user.is_superuser
|
return self.user.has_obj_perm(obj, 'delete')
|
||||||
|
|
||||||
|
|
||||||
class UserAccess(BaseAccess):
|
class UserAccess(BaseAccess):
|
||||||
@@ -642,7 +644,10 @@ class UserAccess(BaseAccess):
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
model = User
|
model = User
|
||||||
prefetch_related = ('profile',)
|
prefetch_related = (
|
||||||
|
'profile',
|
||||||
|
'resource',
|
||||||
|
)
|
||||||
|
|
||||||
def filtered_queryset(self):
|
def filtered_queryset(self):
|
||||||
if settings.ORG_ADMINS_CAN_SEE_ALL_USERS and (self.user.admin_of_organizations.exists() or self.user.auditor_of_organizations.exists()):
|
if settings.ORG_ADMINS_CAN_SEE_ALL_USERS and (self.user.admin_of_organizations.exists() or self.user.auditor_of_organizations.exists()):
|
||||||
@@ -651,9 +656,7 @@ class UserAccess(BaseAccess):
|
|||||||
qs = (
|
qs = (
|
||||||
User.objects.filter(pk__in=Organization.accessible_objects(self.user, 'read_role').values('member_role__members'))
|
User.objects.filter(pk__in=Organization.accessible_objects(self.user, 'read_role').values('member_role__members'))
|
||||||
| User.objects.filter(pk=self.user.id)
|
| User.objects.filter(pk=self.user.id)
|
||||||
| User.objects.filter(
|
| User.objects.filter(is_superuser=True)
|
||||||
pk__in=Role.objects.filter(singleton_name__in=[ROLE_SINGLETON_SYSTEM_ADMINISTRATOR, ROLE_SINGLETON_SYSTEM_AUDITOR]).values('members')
|
|
||||||
)
|
|
||||||
).distinct()
|
).distinct()
|
||||||
return qs
|
return qs
|
||||||
|
|
||||||
@@ -711,6 +714,15 @@ class UserAccess(BaseAccess):
|
|||||||
if not allow_orphans:
|
if not allow_orphans:
|
||||||
# in these cases only superusers can modify orphan users
|
# in these cases only superusers can modify orphan users
|
||||||
return False
|
return False
|
||||||
|
if settings.ANSIBLE_BASE_ROLE_SYSTEM_ACTIVATED:
|
||||||
|
# Permission granted if the user has all permissions that the target user has
|
||||||
|
target_perms = set(
|
||||||
|
RoleEvaluation.objects.filter(role__in=obj.has_roles.all()).values_list('object_id', 'content_type_id', 'codename').distinct()
|
||||||
|
)
|
||||||
|
user_perms = set(
|
||||||
|
RoleEvaluation.objects.filter(role__in=self.user.has_roles.all()).values_list('object_id', 'content_type_id', 'codename').distinct()
|
||||||
|
)
|
||||||
|
return not (target_perms - user_perms)
|
||||||
return not obj.roles.all().exclude(ancestors__in=self.user.roles.all()).exists()
|
return not obj.roles.all().exclude(ancestors__in=self.user.roles.all()).exists()
|
||||||
else:
|
else:
|
||||||
return self.is_all_org_admin(obj)
|
return self.is_all_org_admin(obj)
|
||||||
@@ -838,6 +850,7 @@ class OrganizationAccess(NotificationAttachMixin, BaseAccess):
|
|||||||
prefetch_related = (
|
prefetch_related = (
|
||||||
'created_by',
|
'created_by',
|
||||||
'modified_by',
|
'modified_by',
|
||||||
|
'resource', # dab_resource_registry
|
||||||
)
|
)
|
||||||
# organization admin_role is not a parent of organization auditor_role
|
# organization admin_role is not a parent of organization auditor_role
|
||||||
notification_attach_roles = ['admin_role', 'auditor_role']
|
notification_attach_roles = ['admin_role', 'auditor_role']
|
||||||
@@ -948,9 +961,6 @@ class InventoryAccess(BaseAccess):
|
|||||||
def can_update(self, obj):
|
def can_update(self, obj):
|
||||||
return self.user in obj.update_role
|
return self.user in obj.update_role
|
||||||
|
|
||||||
def can_delete(self, obj):
|
|
||||||
return self.can_admin(obj, None)
|
|
||||||
|
|
||||||
def can_run_ad_hoc_commands(self, obj):
|
def can_run_ad_hoc_commands(self, obj):
|
||||||
return self.user in obj.adhoc_role
|
return self.user in obj.adhoc_role
|
||||||
|
|
||||||
@@ -1306,6 +1316,7 @@ class TeamAccess(BaseAccess):
|
|||||||
'created_by',
|
'created_by',
|
||||||
'modified_by',
|
'modified_by',
|
||||||
'organization',
|
'organization',
|
||||||
|
'resource', # dab_resource_registry
|
||||||
)
|
)
|
||||||
|
|
||||||
def filtered_queryset(self):
|
def filtered_queryset(self):
|
||||||
@@ -1376,12 +1387,11 @@ class TeamAccess(BaseAccess):
|
|||||||
class ExecutionEnvironmentAccess(BaseAccess):
|
class ExecutionEnvironmentAccess(BaseAccess):
|
||||||
"""
|
"""
|
||||||
I can see an execution environment when:
|
I can see an execution environment when:
|
||||||
- I'm a superuser
|
- I can see its organization
|
||||||
- I'm a member of the same organization
|
- It is a global ExecutionEnvironment
|
||||||
- it is a global ExecutionEnvironment
|
|
||||||
I can create/change an execution environment when:
|
I can create/change an execution environment when:
|
||||||
- I'm a superuser
|
- I'm a superuser
|
||||||
- I'm an admin for the organization(s)
|
- I have an organization or object role that gives access
|
||||||
"""
|
"""
|
||||||
|
|
||||||
model = ExecutionEnvironment
|
model = ExecutionEnvironment
|
||||||
@@ -1390,7 +1400,9 @@ class ExecutionEnvironmentAccess(BaseAccess):
|
|||||||
|
|
||||||
def filtered_queryset(self):
|
def filtered_queryset(self):
|
||||||
return ExecutionEnvironment.objects.filter(
|
return ExecutionEnvironment.objects.filter(
|
||||||
Q(organization__in=Organization.accessible_pk_qs(self.user, 'read_role')) | Q(organization__isnull=True)
|
Q(organization__in=Organization.accessible_pk_qs(self.user, 'read_role'))
|
||||||
|
| Q(organization__isnull=True)
|
||||||
|
| Q(id__in=ExecutionEnvironment.access_ids_qs(self.user, 'change'))
|
||||||
).distinct()
|
).distinct()
|
||||||
|
|
||||||
@check_superuser
|
@check_superuser
|
||||||
@@ -1403,13 +1415,19 @@ class ExecutionEnvironmentAccess(BaseAccess):
|
|||||||
def can_change(self, obj, data):
|
def can_change(self, obj, data):
|
||||||
if obj and obj.organization_id is None:
|
if obj and obj.organization_id is None:
|
||||||
raise PermissionDenied
|
raise PermissionDenied
|
||||||
if self.user not in obj.organization.execution_environment_admin_role:
|
if settings.ANSIBLE_BASE_ROLE_SYSTEM_ACTIVATED:
|
||||||
raise PermissionDenied
|
if not self.user.has_obj_perm(obj, 'change'):
|
||||||
if data and 'organization' in data:
|
|
||||||
new_org = get_object_from_data('organization', Organization, data, obj=obj)
|
|
||||||
if not new_org or self.user not in new_org.execution_environment_admin_role:
|
|
||||||
return False
|
return False
|
||||||
return self.check_related('organization', Organization, data, obj=obj, mandatory=True, role_field='execution_environment_admin_role')
|
else:
|
||||||
|
if self.user not in obj.organization.execution_environment_admin_role:
|
||||||
|
raise PermissionDenied
|
||||||
|
if not self.check_related('organization', Organization, data, obj=obj, role_field='execution_environment_admin_role'):
|
||||||
|
return False
|
||||||
|
# Special case that check_related does not catch, org users can not remove the organization from the EE
|
||||||
|
if data and ('organization' in data or 'organization_id' in data):
|
||||||
|
if (not data.get('organization')) and (not data.get('organization_id')):
|
||||||
|
return False
|
||||||
|
return True
|
||||||
|
|
||||||
def can_delete(self, obj):
|
def can_delete(self, obj):
|
||||||
if obj.managed:
|
if obj.managed:
|
||||||
@@ -1581,6 +1599,8 @@ class JobTemplateAccess(NotificationAttachMixin, UnifiedCredentialsMixin, BaseAc
|
|||||||
inventory = get_value(Inventory, 'inventory')
|
inventory = get_value(Inventory, 'inventory')
|
||||||
if inventory:
|
if inventory:
|
||||||
if self.user not in inventory.use_role:
|
if self.user not in inventory.use_role:
|
||||||
|
if self.save_messages:
|
||||||
|
self.messages['inventory'] = [_('You do not have use permission on Inventory')]
|
||||||
return False
|
return False
|
||||||
|
|
||||||
if not self.check_related('execution_environment', ExecutionEnvironment, data, role_field='read_role'):
|
if not self.check_related('execution_environment', ExecutionEnvironment, data, role_field='read_role'):
|
||||||
@@ -1589,11 +1609,16 @@ class JobTemplateAccess(NotificationAttachMixin, UnifiedCredentialsMixin, BaseAc
|
|||||||
project = get_value(Project, 'project')
|
project = get_value(Project, 'project')
|
||||||
# If the user has admin access to the project (as an org admin), should
|
# If the user has admin access to the project (as an org admin), should
|
||||||
# be able to proceed without additional checks.
|
# be able to proceed without additional checks.
|
||||||
if project:
|
if not project:
|
||||||
return self.user in project.use_role
|
|
||||||
else:
|
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
if self.user not in project.use_role:
|
||||||
|
if self.save_messages:
|
||||||
|
self.messages['project'] = [_('You do not have use permission on Project')]
|
||||||
|
return False
|
||||||
|
|
||||||
|
return True
|
||||||
|
|
||||||
@check_superuser
|
@check_superuser
|
||||||
def can_copy_related(self, obj):
|
def can_copy_related(self, obj):
|
||||||
"""
|
"""
|
||||||
@@ -2077,11 +2102,23 @@ class WorkflowJobTemplateAccess(NotificationAttachMixin, BaseAccess):
|
|||||||
if not data: # So the browseable API will work
|
if not data: # So the browseable API will work
|
||||||
return Organization.accessible_objects(self.user, 'workflow_admin_role').exists()
|
return Organization.accessible_objects(self.user, 'workflow_admin_role').exists()
|
||||||
|
|
||||||
return bool(
|
if not self.check_related('organization', Organization, data, role_field='workflow_admin_role', mandatory=True):
|
||||||
self.check_related('organization', Organization, data, role_field='workflow_admin_role', mandatory=True)
|
if data.get('organization', None) is None:
|
||||||
and self.check_related('inventory', Inventory, data, role_field='use_role')
|
if self.save_messages:
|
||||||
and self.check_related('execution_environment', ExecutionEnvironment, data, role_field='read_role')
|
self.messages['organization'] = [_('An organization is required to create a workflow job template for normal user')]
|
||||||
)
|
return False
|
||||||
|
|
||||||
|
if not self.check_related('inventory', Inventory, data, role_field='use_role'):
|
||||||
|
if self.save_messages:
|
||||||
|
self.messages['inventory'] = [_('You do not have use_role to the inventory')]
|
||||||
|
return False
|
||||||
|
|
||||||
|
if not self.check_related('execution_environment', ExecutionEnvironment, data, role_field='read_role'):
|
||||||
|
if self.save_messages:
|
||||||
|
self.messages['execution_environment'] = [_('You do not have read_role to the execution environment')]
|
||||||
|
return False
|
||||||
|
|
||||||
|
return True
|
||||||
|
|
||||||
def can_copy(self, obj):
|
def can_copy(self, obj):
|
||||||
if self.save_messages:
|
if self.save_messages:
|
||||||
@@ -2434,6 +2471,29 @@ class InventoryUpdateEventAccess(BaseAccess):
|
|||||||
return False
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
class ReceptorAddressAccess(BaseAccess):
|
||||||
|
"""
|
||||||
|
I can see receptor address records whenever I can access the instance
|
||||||
|
"""
|
||||||
|
|
||||||
|
model = ReceptorAddress
|
||||||
|
|
||||||
|
def filtered_queryset(self):
|
||||||
|
return self.model.objects.filter(Q(instance__in=Instance.accessible_pk_qs(self.user, 'read_role')))
|
||||||
|
|
||||||
|
@check_superuser
|
||||||
|
def can_add(self, data):
|
||||||
|
return False
|
||||||
|
|
||||||
|
@check_superuser
|
||||||
|
def can_change(self, obj, data):
|
||||||
|
return False
|
||||||
|
|
||||||
|
@check_superuser
|
||||||
|
def can_delete(self, obj):
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
class SystemJobEventAccess(BaseAccess):
|
class SystemJobEventAccess(BaseAccess):
|
||||||
"""
|
"""
|
||||||
I can only see manage System Jobs events if I'm a super user
|
I can only see manage System Jobs events if I'm a super user
|
||||||
@@ -2567,6 +2627,8 @@ class ScheduleAccess(UnifiedCredentialsMixin, BaseAccess):
|
|||||||
if not JobLaunchConfigAccess(self.user).can_add(data):
|
if not JobLaunchConfigAccess(self.user).can_add(data):
|
||||||
return False
|
return False
|
||||||
if not data:
|
if not data:
|
||||||
|
if settings.ANSIBLE_BASE_ROLE_SYSTEM_ACTIVATED:
|
||||||
|
return self.user.has_roles.filter(permission_partials__codename__in=['execute_jobtemplate', 'update_project', 'update_inventory']).exists()
|
||||||
return Role.objects.filter(role_field__in=['update_role', 'execute_role'], ancestors__in=self.user.roles.all()).exists()
|
return Role.objects.filter(role_field__in=['update_role', 'execute_role'], ancestors__in=self.user.roles.all()).exists()
|
||||||
|
|
||||||
return self.check_related('unified_job_template', UnifiedJobTemplate, data, role_field='execute_role', mandatory=True)
|
return self.check_related('unified_job_template', UnifiedJobTemplate, data, role_field='execute_role', mandatory=True)
|
||||||
@@ -2588,13 +2650,15 @@ class ScheduleAccess(UnifiedCredentialsMixin, BaseAccess):
|
|||||||
|
|
||||||
class NotificationTemplateAccess(BaseAccess):
|
class NotificationTemplateAccess(BaseAccess):
|
||||||
"""
|
"""
|
||||||
I can see/use a notification_template if I have permission to
|
Run standard logic from DAB RBAC
|
||||||
"""
|
"""
|
||||||
|
|
||||||
model = NotificationTemplate
|
model = NotificationTemplate
|
||||||
prefetch_related = ('created_by', 'modified_by', 'organization')
|
prefetch_related = ('created_by', 'modified_by', 'organization')
|
||||||
|
|
||||||
def filtered_queryset(self):
|
def filtered_queryset(self):
|
||||||
|
if settings.ANSIBLE_BASE_ROLE_SYSTEM_ACTIVATED:
|
||||||
|
return self.model.access_qs(self.user, 'view')
|
||||||
return self.model.objects.filter(
|
return self.model.objects.filter(
|
||||||
Q(organization__in=Organization.accessible_objects(self.user, 'notification_admin_role')) | Q(organization__in=self.user.auditor_of_organizations)
|
Q(organization__in=Organization.accessible_objects(self.user, 'notification_admin_role')) | Q(organization__in=self.user.auditor_of_organizations)
|
||||||
).distinct()
|
).distinct()
|
||||||
@@ -2607,10 +2671,7 @@ class NotificationTemplateAccess(BaseAccess):
|
|||||||
|
|
||||||
@check_superuser
|
@check_superuser
|
||||||
def can_change(self, obj, data):
|
def can_change(self, obj, data):
|
||||||
if obj.organization is None:
|
return self.user.has_obj_perm(obj, 'change') and self.check_related('organization', Organization, data, obj=obj, role_field='notification_admin_role')
|
||||||
# only superusers are allowed to edit orphan notification templates
|
|
||||||
return False
|
|
||||||
return self.check_related('organization', Organization, data, obj=obj, role_field='notification_admin_role', mandatory=True)
|
|
||||||
|
|
||||||
def can_admin(self, obj, data):
|
def can_admin(self, obj, data):
|
||||||
return self.can_change(obj, data)
|
return self.can_change(obj, data)
|
||||||
@@ -2620,9 +2681,7 @@ class NotificationTemplateAccess(BaseAccess):
|
|||||||
|
|
||||||
@check_superuser
|
@check_superuser
|
||||||
def can_start(self, obj, validate_license=True):
|
def can_start(self, obj, validate_license=True):
|
||||||
if obj.organization is None:
|
return self.can_change(obj, None)
|
||||||
return False
|
|
||||||
return self.user in obj.organization.notification_admin_role
|
|
||||||
|
|
||||||
|
|
||||||
class NotificationAccess(BaseAccess):
|
class NotificationAccess(BaseAccess):
|
||||||
@@ -2763,7 +2822,7 @@ class ActivityStreamAccess(BaseAccess):
|
|||||||
| Q(notification_template__organization__in=auditing_orgs)
|
| Q(notification_template__organization__in=auditing_orgs)
|
||||||
| Q(notification__notification_template__organization__in=auditing_orgs)
|
| Q(notification__notification_template__organization__in=auditing_orgs)
|
||||||
| Q(label__organization__in=auditing_orgs)
|
| Q(label__organization__in=auditing_orgs)
|
||||||
| Q(role__in=Role.objects.filter(ancestors__in=self.user.roles.all()) if auditing_orgs else [])
|
| Q(role__in=Role.visible_roles(self.user) if auditing_orgs else [])
|
||||||
)
|
)
|
||||||
|
|
||||||
project_set = Project.accessible_pk_qs(self.user, 'read_role')
|
project_set = Project.accessible_pk_qs(self.user, 'read_role')
|
||||||
@@ -2820,13 +2879,10 @@ class RoleAccess(BaseAccess):
|
|||||||
|
|
||||||
def filtered_queryset(self):
|
def filtered_queryset(self):
|
||||||
result = Role.visible_roles(self.user)
|
result = Role.visible_roles(self.user)
|
||||||
# Sanity check: is the requesting user an orphaned non-admin/auditor?
|
# Make system admin/auditor mandatorily visible.
|
||||||
# if yes, make system admin/auditor mandatorily visible.
|
mandatories = ('system_administrator', 'system_auditor')
|
||||||
if not self.user.is_superuser and not self.user.is_system_auditor and not self.user.organizations.exists():
|
super_qs = Role.objects.filter(singleton_name__in=mandatories)
|
||||||
mandatories = ('system_administrator', 'system_auditor')
|
return result | super_qs
|
||||||
super_qs = Role.objects.filter(singleton_name__in=mandatories)
|
|
||||||
result = result | super_qs
|
|
||||||
return result
|
|
||||||
|
|
||||||
def can_add(self, obj, data):
|
def can_add(self, obj, data):
|
||||||
# Unsupported for now
|
# Unsupported for now
|
||||||
|
|||||||
@@ -2,7 +2,7 @@
|
|||||||
import logging
|
import logging
|
||||||
|
|
||||||
# AWX
|
# AWX
|
||||||
from awx.main.analytics.subsystem_metrics import Metrics
|
from awx.main.analytics.subsystem_metrics import DispatcherMetrics, CallbackReceiverMetrics
|
||||||
from awx.main.dispatch.publish import task
|
from awx.main.dispatch.publish import task
|
||||||
from awx.main.dispatch import get_task_queuename
|
from awx.main.dispatch import get_task_queuename
|
||||||
|
|
||||||
@@ -11,4 +11,5 @@ logger = logging.getLogger('awx.main.scheduler')
|
|||||||
|
|
||||||
@task(queue=get_task_queuename)
|
@task(queue=get_task_queuename)
|
||||||
def send_subsystem_metrics():
|
def send_subsystem_metrics():
|
||||||
Metrics().send_metrics()
|
DispatcherMetrics().send_metrics()
|
||||||
|
CallbackReceiverMetrics().send_metrics()
|
||||||
|
|||||||
@@ -66,10 +66,8 @@ class FixedSlidingWindow:
|
|||||||
|
|
||||||
|
|
||||||
class RelayWebsocketStatsManager:
|
class RelayWebsocketStatsManager:
|
||||||
def __init__(self, event_loop, local_hostname):
|
def __init__(self, local_hostname):
|
||||||
self._local_hostname = local_hostname
|
self._local_hostname = local_hostname
|
||||||
|
|
||||||
self._event_loop = event_loop
|
|
||||||
self._stats = dict()
|
self._stats = dict()
|
||||||
self._redis_key = BROADCAST_WEBSOCKET_REDIS_KEY_NAME
|
self._redis_key = BROADCAST_WEBSOCKET_REDIS_KEY_NAME
|
||||||
|
|
||||||
@@ -94,7 +92,10 @@ class RelayWebsocketStatsManager:
|
|||||||
self.start()
|
self.start()
|
||||||
|
|
||||||
def start(self):
|
def start(self):
|
||||||
self.async_task = self._event_loop.create_task(self.run_loop())
|
self.async_task = asyncio.get_running_loop().create_task(
|
||||||
|
self.run_loop(),
|
||||||
|
name='RelayWebsocketStatsManager.run_loop',
|
||||||
|
)
|
||||||
return self.async_task
|
return self.async_task
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
|
|||||||
@@ -419,7 +419,7 @@ def _events_table(since, full_path, until, tbl, where_column, project_job_create
|
|||||||
resolved_action,
|
resolved_action,
|
||||||
resolved_role,
|
resolved_role,
|
||||||
-- '-' operator listed here:
|
-- '-' operator listed here:
|
||||||
-- https://www.postgresql.org/docs/12/functions-json.html
|
-- https://www.postgresql.org/docs/15/functions-json.html
|
||||||
-- note that operator is only supported by jsonb objects
|
-- note that operator is only supported by jsonb objects
|
||||||
-- https://www.postgresql.org/docs/current/datatype-json.html
|
-- https://www.postgresql.org/docs/current/datatype-json.html
|
||||||
(CASE WHEN event = 'playbook_on_stats' THEN {event_data} - 'artifact_data' END) as playbook_on_stats,
|
(CASE WHEN event = 'playbook_on_stats' THEN {event_data} - 'artifact_data' END) as playbook_on_stats,
|
||||||
|
|||||||
@@ -1,10 +1,15 @@
|
|||||||
|
import itertools
|
||||||
import redis
|
import redis
|
||||||
import json
|
import json
|
||||||
import time
|
import time
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
|
import prometheus_client
|
||||||
|
from prometheus_client.core import GaugeMetricFamily, HistogramMetricFamily
|
||||||
|
from prometheus_client.registry import CollectorRegistry
|
||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
from django.apps import apps
|
from django.http import HttpRequest
|
||||||
|
from rest_framework.request import Request
|
||||||
|
|
||||||
from awx.main.consumers import emit_channel_notification
|
from awx.main.consumers import emit_channel_notification
|
||||||
from awx.main.utils import is_testing
|
from awx.main.utils import is_testing
|
||||||
@@ -13,6 +18,30 @@ root_key = settings.SUBSYSTEM_METRICS_REDIS_KEY_PREFIX
|
|||||||
logger = logging.getLogger('awx.main.analytics')
|
logger = logging.getLogger('awx.main.analytics')
|
||||||
|
|
||||||
|
|
||||||
|
class MetricsNamespace:
|
||||||
|
def __init__(self, namespace):
|
||||||
|
self._namespace = namespace
|
||||||
|
|
||||||
|
|
||||||
|
class MetricsServerSettings(MetricsNamespace):
|
||||||
|
def port(self):
|
||||||
|
return settings.METRICS_SUBSYSTEM_CONFIG['server'][self._namespace]['port']
|
||||||
|
|
||||||
|
|
||||||
|
class MetricsServer(MetricsServerSettings):
|
||||||
|
def __init__(self, namespace, registry):
|
||||||
|
MetricsNamespace.__init__(self, namespace)
|
||||||
|
self._registry = registry
|
||||||
|
|
||||||
|
def start(self):
|
||||||
|
try:
|
||||||
|
# TODO: addr for ipv6 ?
|
||||||
|
prometheus_client.start_http_server(self.port(), addr='localhost', registry=self._registry)
|
||||||
|
except Exception:
|
||||||
|
logger.error(f"MetricsServer failed to start for service '{self._namespace}.")
|
||||||
|
raise
|
||||||
|
|
||||||
|
|
||||||
class BaseM:
|
class BaseM:
|
||||||
def __init__(self, field, help_text):
|
def __init__(self, field, help_text):
|
||||||
self.field = field
|
self.field = field
|
||||||
@@ -148,76 +177,40 @@ class HistogramM(BaseM):
|
|||||||
return output_text
|
return output_text
|
||||||
|
|
||||||
|
|
||||||
class Metrics:
|
class Metrics(MetricsNamespace):
|
||||||
def __init__(self, auto_pipe_execute=False, instance_name=None):
|
# metric name, help_text
|
||||||
|
METRICSLIST = []
|
||||||
|
_METRICSLIST = [
|
||||||
|
FloatM('subsystem_metrics_pipe_execute_seconds', 'Time spent saving metrics to redis'),
|
||||||
|
IntM('subsystem_metrics_pipe_execute_calls', 'Number of calls to pipe_execute'),
|
||||||
|
FloatM('subsystem_metrics_send_metrics_seconds', 'Time spent sending metrics to other nodes'),
|
||||||
|
]
|
||||||
|
|
||||||
|
def __init__(self, namespace, auto_pipe_execute=False, instance_name=None, metrics_have_changed=True, **kwargs):
|
||||||
|
MetricsNamespace.__init__(self, namespace)
|
||||||
|
|
||||||
self.pipe = redis.Redis.from_url(settings.BROKER_URL).pipeline()
|
self.pipe = redis.Redis.from_url(settings.BROKER_URL).pipeline()
|
||||||
self.conn = redis.Redis.from_url(settings.BROKER_URL)
|
self.conn = redis.Redis.from_url(settings.BROKER_URL)
|
||||||
self.last_pipe_execute = time.time()
|
self.last_pipe_execute = time.time()
|
||||||
# track if metrics have been modified since last saved to redis
|
# track if metrics have been modified since last saved to redis
|
||||||
# start with True so that we get an initial save to redis
|
# start with True so that we get an initial save to redis
|
||||||
self.metrics_have_changed = True
|
self.metrics_have_changed = metrics_have_changed
|
||||||
self.pipe_execute_interval = settings.SUBSYSTEM_METRICS_INTERVAL_SAVE_TO_REDIS
|
self.pipe_execute_interval = settings.SUBSYSTEM_METRICS_INTERVAL_SAVE_TO_REDIS
|
||||||
self.send_metrics_interval = settings.SUBSYSTEM_METRICS_INTERVAL_SEND_METRICS
|
self.send_metrics_interval = settings.SUBSYSTEM_METRICS_INTERVAL_SEND_METRICS
|
||||||
# auto pipe execute will commit transaction of metric data to redis
|
# auto pipe execute will commit transaction of metric data to redis
|
||||||
# at a regular interval (pipe_execute_interval). If set to False,
|
# at a regular interval (pipe_execute_interval). If set to False,
|
||||||
# the calling function should call .pipe_execute() explicitly
|
# the calling function should call .pipe_execute() explicitly
|
||||||
self.auto_pipe_execute = auto_pipe_execute
|
self.auto_pipe_execute = auto_pipe_execute
|
||||||
Instance = apps.get_model('main', 'Instance')
|
|
||||||
if instance_name:
|
if instance_name:
|
||||||
self.instance_name = instance_name
|
self.instance_name = instance_name
|
||||||
elif is_testing():
|
elif is_testing():
|
||||||
self.instance_name = "awx_testing"
|
self.instance_name = "awx_testing"
|
||||||
else:
|
else:
|
||||||
self.instance_name = Instance.objects.my_hostname()
|
self.instance_name = settings.CLUSTER_HOST_ID # Same as Instance.objects.my_hostname() BUT we do not need to import Instance
|
||||||
|
|
||||||
# metric name, help_text
|
|
||||||
METRICSLIST = [
|
|
||||||
SetIntM('callback_receiver_events_queue_size_redis', 'Current number of events in redis queue'),
|
|
||||||
IntM('callback_receiver_events_popped_redis', 'Number of events popped from redis'),
|
|
||||||
IntM('callback_receiver_events_in_memory', 'Current number of events in memory (in transfer from redis to db)'),
|
|
||||||
IntM('callback_receiver_batch_events_errors', 'Number of times batch insertion failed'),
|
|
||||||
FloatM('callback_receiver_events_insert_db_seconds', 'Total time spent saving events to database'),
|
|
||||||
IntM('callback_receiver_events_insert_db', 'Number of events batch inserted into database'),
|
|
||||||
IntM('callback_receiver_events_broadcast', 'Number of events broadcast to other control plane nodes'),
|
|
||||||
HistogramM(
|
|
||||||
'callback_receiver_batch_events_insert_db', 'Number of events batch inserted into database', settings.SUBSYSTEM_METRICS_BATCH_INSERT_BUCKETS
|
|
||||||
),
|
|
||||||
SetFloatM('callback_receiver_event_processing_avg_seconds', 'Average processing time per event per callback receiver batch'),
|
|
||||||
FloatM('subsystem_metrics_pipe_execute_seconds', 'Time spent saving metrics to redis'),
|
|
||||||
IntM('subsystem_metrics_pipe_execute_calls', 'Number of calls to pipe_execute'),
|
|
||||||
FloatM('subsystem_metrics_send_metrics_seconds', 'Time spent sending metrics to other nodes'),
|
|
||||||
SetFloatM('task_manager_get_tasks_seconds', 'Time spent in loading tasks from db'),
|
|
||||||
SetFloatM('task_manager_start_task_seconds', 'Time spent starting task'),
|
|
||||||
SetFloatM('task_manager_process_running_tasks_seconds', 'Time spent processing running tasks'),
|
|
||||||
SetFloatM('task_manager_process_pending_tasks_seconds', 'Time spent processing pending tasks'),
|
|
||||||
SetFloatM('task_manager__schedule_seconds', 'Time spent in running the entire _schedule'),
|
|
||||||
IntM('task_manager__schedule_calls', 'Number of calls to _schedule, after lock is acquired'),
|
|
||||||
SetFloatM('task_manager_recorded_timestamp', 'Unix timestamp when metrics were last recorded'),
|
|
||||||
SetIntM('task_manager_tasks_started', 'Number of tasks started'),
|
|
||||||
SetIntM('task_manager_running_processed', 'Number of running tasks processed'),
|
|
||||||
SetIntM('task_manager_pending_processed', 'Number of pending tasks processed'),
|
|
||||||
SetIntM('task_manager_tasks_blocked', 'Number of tasks blocked from running'),
|
|
||||||
SetFloatM('task_manager_commit_seconds', 'Time spent in db transaction, including on_commit calls'),
|
|
||||||
SetFloatM('dependency_manager_get_tasks_seconds', 'Time spent loading pending tasks from db'),
|
|
||||||
SetFloatM('dependency_manager_generate_dependencies_seconds', 'Time spent generating dependencies for pending tasks'),
|
|
||||||
SetFloatM('dependency_manager__schedule_seconds', 'Time spent in running the entire _schedule'),
|
|
||||||
IntM('dependency_manager__schedule_calls', 'Number of calls to _schedule, after lock is acquired'),
|
|
||||||
SetFloatM('dependency_manager_recorded_timestamp', 'Unix timestamp when metrics were last recorded'),
|
|
||||||
SetIntM('dependency_manager_pending_processed', 'Number of pending tasks processed'),
|
|
||||||
SetFloatM('workflow_manager__schedule_seconds', 'Time spent in running the entire _schedule'),
|
|
||||||
IntM('workflow_manager__schedule_calls', 'Number of calls to _schedule, after lock is acquired'),
|
|
||||||
SetFloatM('workflow_manager_recorded_timestamp', 'Unix timestamp when metrics were last recorded'),
|
|
||||||
SetFloatM('workflow_manager_spawn_workflow_graph_jobs_seconds', 'Time spent spawning workflow tasks'),
|
|
||||||
SetFloatM('workflow_manager_get_tasks_seconds', 'Time spent loading workflow tasks from db'),
|
|
||||||
# dispatcher subsystem metrics
|
|
||||||
SetIntM('dispatcher_pool_scale_up_events', 'Number of times local dispatcher scaled up a worker since startup'),
|
|
||||||
SetIntM('dispatcher_pool_active_task_count', 'Number of active tasks in the worker pool when last task was submitted'),
|
|
||||||
SetIntM('dispatcher_pool_max_worker_count', 'Highest number of workers in worker pool in last collection interval, about 20s'),
|
|
||||||
SetFloatM('dispatcher_availability', 'Fraction of time (in last collection interval) dispatcher was able to receive messages'),
|
|
||||||
]
|
|
||||||
# turn metric list into dictionary with the metric name as a key
|
# turn metric list into dictionary with the metric name as a key
|
||||||
self.METRICS = {}
|
self.METRICS = {}
|
||||||
for m in METRICSLIST:
|
for m in itertools.chain(self.METRICSLIST, self._METRICSLIST):
|
||||||
self.METRICS[m.field] = m
|
self.METRICS[m.field] = m
|
||||||
|
|
||||||
# track last time metrics were sent to other nodes
|
# track last time metrics were sent to other nodes
|
||||||
@@ -230,7 +223,7 @@ class Metrics:
|
|||||||
m.reset_value(self.conn)
|
m.reset_value(self.conn)
|
||||||
self.metrics_have_changed = True
|
self.metrics_have_changed = True
|
||||||
self.conn.delete(root_key + "_lock")
|
self.conn.delete(root_key + "_lock")
|
||||||
for m in self.conn.scan_iter(root_key + '_instance_*'):
|
for m in self.conn.scan_iter(root_key + '-' + self._namespace + '_instance_*'):
|
||||||
self.conn.delete(m)
|
self.conn.delete(m)
|
||||||
|
|
||||||
def inc(self, field, value):
|
def inc(self, field, value):
|
||||||
@@ -297,7 +290,7 @@ class Metrics:
|
|||||||
def send_metrics(self):
|
def send_metrics(self):
|
||||||
# more than one thread could be calling this at the same time, so should
|
# more than one thread could be calling this at the same time, so should
|
||||||
# acquire redis lock before sending metrics
|
# acquire redis lock before sending metrics
|
||||||
lock = self.conn.lock(root_key + '_lock')
|
lock = self.conn.lock(root_key + '-' + self._namespace + '_lock')
|
||||||
if not lock.acquire(blocking=False):
|
if not lock.acquire(blocking=False):
|
||||||
return
|
return
|
||||||
try:
|
try:
|
||||||
@@ -307,9 +300,10 @@ class Metrics:
|
|||||||
payload = {
|
payload = {
|
||||||
'instance': self.instance_name,
|
'instance': self.instance_name,
|
||||||
'metrics': serialized_metrics,
|
'metrics': serialized_metrics,
|
||||||
|
'metrics_namespace': self._namespace,
|
||||||
}
|
}
|
||||||
# store the serialized data locally as well, so that load_other_metrics will read it
|
# store the serialized data locally as well, so that load_other_metrics will read it
|
||||||
self.conn.set(root_key + '_instance_' + self.instance_name, serialized_metrics)
|
self.conn.set(root_key + '-' + self._namespace + '_instance_' + self.instance_name, serialized_metrics)
|
||||||
emit_channel_notification("metrics", payload)
|
emit_channel_notification("metrics", payload)
|
||||||
|
|
||||||
self.previous_send_metrics.set(current_time)
|
self.previous_send_metrics.set(current_time)
|
||||||
@@ -331,14 +325,14 @@ class Metrics:
|
|||||||
instances_filter = request.query_params.getlist("node")
|
instances_filter = request.query_params.getlist("node")
|
||||||
# get a sorted list of instance names
|
# get a sorted list of instance names
|
||||||
instance_names = [self.instance_name]
|
instance_names = [self.instance_name]
|
||||||
for m in self.conn.scan_iter(root_key + '_instance_*'):
|
for m in self.conn.scan_iter(root_key + '-' + self._namespace + '_instance_*'):
|
||||||
instance_names.append(m.decode('UTF-8').split('_instance_')[1])
|
instance_names.append(m.decode('UTF-8').split('_instance_')[1])
|
||||||
instance_names.sort()
|
instance_names.sort()
|
||||||
# load data, including data from the this local instance
|
# load data, including data from the this local instance
|
||||||
instance_data = {}
|
instance_data = {}
|
||||||
for instance in instance_names:
|
for instance in instance_names:
|
||||||
if len(instances_filter) == 0 or instance in instances_filter:
|
if len(instances_filter) == 0 or instance in instances_filter:
|
||||||
instance_data_from_redis = self.conn.get(root_key + '_instance_' + instance)
|
instance_data_from_redis = self.conn.get(root_key + '-' + self._namespace + '_instance_' + instance)
|
||||||
# data from other instances may not be available. That is OK.
|
# data from other instances may not be available. That is OK.
|
||||||
if instance_data_from_redis:
|
if instance_data_from_redis:
|
||||||
instance_data[instance] = json.loads(instance_data_from_redis.decode('UTF-8'))
|
instance_data[instance] = json.loads(instance_data_from_redis.decode('UTF-8'))
|
||||||
@@ -357,6 +351,120 @@ class Metrics:
|
|||||||
return output_text
|
return output_text
|
||||||
|
|
||||||
|
|
||||||
|
class DispatcherMetrics(Metrics):
|
||||||
|
METRICSLIST = [
|
||||||
|
SetFloatM('task_manager_get_tasks_seconds', 'Time spent in loading tasks from db'),
|
||||||
|
SetFloatM('task_manager_start_task_seconds', 'Time spent starting task'),
|
||||||
|
SetFloatM('task_manager_process_running_tasks_seconds', 'Time spent processing running tasks'),
|
||||||
|
SetFloatM('task_manager_process_pending_tasks_seconds', 'Time spent processing pending tasks'),
|
||||||
|
SetFloatM('task_manager__schedule_seconds', 'Time spent in running the entire _schedule'),
|
||||||
|
IntM('task_manager__schedule_calls', 'Number of calls to _schedule, after lock is acquired'),
|
||||||
|
SetFloatM('task_manager_recorded_timestamp', 'Unix timestamp when metrics were last recorded'),
|
||||||
|
SetIntM('task_manager_tasks_started', 'Number of tasks started'),
|
||||||
|
SetIntM('task_manager_running_processed', 'Number of running tasks processed'),
|
||||||
|
SetIntM('task_manager_pending_processed', 'Number of pending tasks processed'),
|
||||||
|
SetIntM('task_manager_tasks_blocked', 'Number of tasks blocked from running'),
|
||||||
|
SetFloatM('task_manager_commit_seconds', 'Time spent in db transaction, including on_commit calls'),
|
||||||
|
SetFloatM('dependency_manager_get_tasks_seconds', 'Time spent loading pending tasks from db'),
|
||||||
|
SetFloatM('dependency_manager_generate_dependencies_seconds', 'Time spent generating dependencies for pending tasks'),
|
||||||
|
SetFloatM('dependency_manager__schedule_seconds', 'Time spent in running the entire _schedule'),
|
||||||
|
IntM('dependency_manager__schedule_calls', 'Number of calls to _schedule, after lock is acquired'),
|
||||||
|
SetFloatM('dependency_manager_recorded_timestamp', 'Unix timestamp when metrics were last recorded'),
|
||||||
|
SetIntM('dependency_manager_pending_processed', 'Number of pending tasks processed'),
|
||||||
|
SetFloatM('workflow_manager__schedule_seconds', 'Time spent in running the entire _schedule'),
|
||||||
|
IntM('workflow_manager__schedule_calls', 'Number of calls to _schedule, after lock is acquired'),
|
||||||
|
SetFloatM('workflow_manager_recorded_timestamp', 'Unix timestamp when metrics were last recorded'),
|
||||||
|
SetFloatM('workflow_manager_spawn_workflow_graph_jobs_seconds', 'Time spent spawning workflow tasks'),
|
||||||
|
SetFloatM('workflow_manager_get_tasks_seconds', 'Time spent loading workflow tasks from db'),
|
||||||
|
# dispatcher subsystem metrics
|
||||||
|
SetIntM('dispatcher_pool_scale_up_events', 'Number of times local dispatcher scaled up a worker since startup'),
|
||||||
|
SetIntM('dispatcher_pool_active_task_count', 'Number of active tasks in the worker pool when last task was submitted'),
|
||||||
|
SetIntM('dispatcher_pool_max_worker_count', 'Highest number of workers in worker pool in last collection interval, about 20s'),
|
||||||
|
SetFloatM('dispatcher_availability', 'Fraction of time (in last collection interval) dispatcher was able to receive messages'),
|
||||||
|
]
|
||||||
|
|
||||||
|
def __init__(self, *args, **kwargs):
|
||||||
|
super().__init__(settings.METRICS_SERVICE_DISPATCHER, *args, **kwargs)
|
||||||
|
|
||||||
|
|
||||||
|
class CallbackReceiverMetrics(Metrics):
|
||||||
|
METRICSLIST = [
|
||||||
|
SetIntM('callback_receiver_events_queue_size_redis', 'Current number of events in redis queue'),
|
||||||
|
IntM('callback_receiver_events_popped_redis', 'Number of events popped from redis'),
|
||||||
|
IntM('callback_receiver_events_in_memory', 'Current number of events in memory (in transfer from redis to db)'),
|
||||||
|
IntM('callback_receiver_batch_events_errors', 'Number of times batch insertion failed'),
|
||||||
|
FloatM('callback_receiver_events_insert_db_seconds', 'Total time spent saving events to database'),
|
||||||
|
IntM('callback_receiver_events_insert_db', 'Number of events batch inserted into database'),
|
||||||
|
IntM('callback_receiver_events_broadcast', 'Number of events broadcast to other control plane nodes'),
|
||||||
|
HistogramM(
|
||||||
|
'callback_receiver_batch_events_insert_db', 'Number of events batch inserted into database', settings.SUBSYSTEM_METRICS_BATCH_INSERT_BUCKETS
|
||||||
|
),
|
||||||
|
SetFloatM('callback_receiver_event_processing_avg_seconds', 'Average processing time per event per callback receiver batch'),
|
||||||
|
]
|
||||||
|
|
||||||
|
def __init__(self, *args, **kwargs):
|
||||||
|
super().__init__(settings.METRICS_SERVICE_CALLBACK_RECEIVER, *args, **kwargs)
|
||||||
|
|
||||||
|
|
||||||
def metrics(request):
|
def metrics(request):
|
||||||
m = Metrics()
|
output_text = ''
|
||||||
return m.generate_metrics(request)
|
for m in [DispatcherMetrics(), CallbackReceiverMetrics()]:
|
||||||
|
output_text += m.generate_metrics(request)
|
||||||
|
return output_text
|
||||||
|
|
||||||
|
|
||||||
|
class CustomToPrometheusMetricsCollector(prometheus_client.registry.Collector):
|
||||||
|
"""
|
||||||
|
Takes the metric data from redis -> our custom metric fields -> prometheus
|
||||||
|
library metric fields.
|
||||||
|
|
||||||
|
The plan is to get rid of the use of redis, our custom metric fields, and
|
||||||
|
to switch fully to the prometheus library. At that point, this translation
|
||||||
|
code will be deleted.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, metrics_obj, *args, **kwargs):
|
||||||
|
super().__init__(*args, **kwargs)
|
||||||
|
self._metrics = metrics_obj
|
||||||
|
|
||||||
|
def collect(self):
|
||||||
|
my_hostname = settings.CLUSTER_HOST_ID
|
||||||
|
|
||||||
|
instance_data = self._metrics.load_other_metrics(Request(HttpRequest()))
|
||||||
|
if not instance_data:
|
||||||
|
logger.debug(f"No metric data not found in redis for metric namespace '{self._metrics._namespace}'")
|
||||||
|
return None
|
||||||
|
|
||||||
|
host_metrics = instance_data.get(my_hostname)
|
||||||
|
for _, metric in self._metrics.METRICS.items():
|
||||||
|
entry = host_metrics.get(metric.field)
|
||||||
|
if not entry:
|
||||||
|
logger.debug(f"{self._metrics._namespace} metric '{metric.field}' not found in redis data payload {json.dumps(instance_data, indent=2)}")
|
||||||
|
continue
|
||||||
|
if isinstance(metric, HistogramM):
|
||||||
|
buckets = list(zip(metric.buckets, entry['counts']))
|
||||||
|
buckets = [[str(i[0]), str(i[1])] for i in buckets]
|
||||||
|
yield HistogramMetricFamily(metric.field, metric.help_text, buckets=buckets, sum_value=entry['sum'])
|
||||||
|
else:
|
||||||
|
yield GaugeMetricFamily(metric.field, metric.help_text, value=entry)
|
||||||
|
|
||||||
|
|
||||||
|
class CallbackReceiverMetricsServer(MetricsServer):
|
||||||
|
def __init__(self):
|
||||||
|
registry = CollectorRegistry(auto_describe=True)
|
||||||
|
registry.register(CustomToPrometheusMetricsCollector(DispatcherMetrics(metrics_have_changed=False)))
|
||||||
|
super().__init__(settings.METRICS_SERVICE_CALLBACK_RECEIVER, registry)
|
||||||
|
|
||||||
|
|
||||||
|
class DispatcherMetricsServer(MetricsServer):
|
||||||
|
def __init__(self):
|
||||||
|
registry = CollectorRegistry(auto_describe=True)
|
||||||
|
registry.register(CustomToPrometheusMetricsCollector(CallbackReceiverMetrics(metrics_have_changed=False)))
|
||||||
|
super().__init__(settings.METRICS_SERVICE_DISPATCHER, registry)
|
||||||
|
|
||||||
|
|
||||||
|
class WebsocketsMetricsServer(MetricsServer):
|
||||||
|
def __init__(self):
|
||||||
|
registry = CollectorRegistry(auto_describe=True)
|
||||||
|
# registry.register()
|
||||||
|
super().__init__(settings.METRICS_SERVICE_WEBSOCKETS, registry)
|
||||||
|
|||||||
@@ -1,7 +1,40 @@
|
|||||||
from django.apps import AppConfig
|
from django.apps import AppConfig
|
||||||
from django.utils.translation import gettext_lazy as _
|
from django.utils.translation import gettext_lazy as _
|
||||||
|
from awx.main.utils.named_url_graph import _customize_graph, generate_graph
|
||||||
|
from awx.conf import register, fields
|
||||||
|
|
||||||
|
|
||||||
class MainConfig(AppConfig):
|
class MainConfig(AppConfig):
|
||||||
name = 'awx.main'
|
name = 'awx.main'
|
||||||
verbose_name = _('Main')
|
verbose_name = _('Main')
|
||||||
|
|
||||||
|
def load_named_url_feature(self):
|
||||||
|
models = [m for m in self.get_models() if hasattr(m, 'get_absolute_url')]
|
||||||
|
generate_graph(models)
|
||||||
|
_customize_graph()
|
||||||
|
register(
|
||||||
|
'NAMED_URL_FORMATS',
|
||||||
|
field_class=fields.DictField,
|
||||||
|
read_only=True,
|
||||||
|
label=_('Formats of all available named urls'),
|
||||||
|
help_text=_('Read-only list of key-value pairs that shows the standard format of all available named URLs.'),
|
||||||
|
category=_('Named URL'),
|
||||||
|
category_slug='named-url',
|
||||||
|
)
|
||||||
|
register(
|
||||||
|
'NAMED_URL_GRAPH_NODES',
|
||||||
|
field_class=fields.DictField,
|
||||||
|
read_only=True,
|
||||||
|
label=_('List of all named url graph nodes.'),
|
||||||
|
help_text=_(
|
||||||
|
'Read-only list of key-value pairs that exposes named URL graph topology.'
|
||||||
|
' Use this list to programmatically generate named URLs for resources'
|
||||||
|
),
|
||||||
|
category=_('Named URL'),
|
||||||
|
category_slug='named-url',
|
||||||
|
)
|
||||||
|
|
||||||
|
def ready(self):
|
||||||
|
super().ready()
|
||||||
|
|
||||||
|
self.load_named_url_feature()
|
||||||
|
|||||||
@@ -2,6 +2,7 @@
|
|||||||
import logging
|
import logging
|
||||||
|
|
||||||
# Django
|
# Django
|
||||||
|
from django.core.checks import Error
|
||||||
from django.utils.translation import gettext_lazy as _
|
from django.utils.translation import gettext_lazy as _
|
||||||
|
|
||||||
# Django REST Framework
|
# Django REST Framework
|
||||||
@@ -92,6 +93,7 @@ register(
|
|||||||
),
|
),
|
||||||
category=_('System'),
|
category=_('System'),
|
||||||
category_slug='system',
|
category_slug='system',
|
||||||
|
required=False,
|
||||||
)
|
)
|
||||||
|
|
||||||
register(
|
register(
|
||||||
@@ -774,6 +776,7 @@ register(
|
|||||||
allow_null=True,
|
allow_null=True,
|
||||||
category=_('System'),
|
category=_('System'),
|
||||||
category_slug='system',
|
category_slug='system',
|
||||||
|
required=False,
|
||||||
)
|
)
|
||||||
register(
|
register(
|
||||||
'AUTOMATION_ANALYTICS_LAST_ENTRIES',
|
'AUTOMATION_ANALYTICS_LAST_ENTRIES',
|
||||||
@@ -815,6 +818,7 @@ register(
|
|||||||
help_text=_('Max jobs to allow bulk jobs to launch'),
|
help_text=_('Max jobs to allow bulk jobs to launch'),
|
||||||
category=_('Bulk Actions'),
|
category=_('Bulk Actions'),
|
||||||
category_slug='bulk',
|
category_slug='bulk',
|
||||||
|
hidden=True,
|
||||||
)
|
)
|
||||||
|
|
||||||
register(
|
register(
|
||||||
@@ -825,6 +829,18 @@ register(
|
|||||||
help_text=_('Max number of hosts to allow to be created in a single bulk action'),
|
help_text=_('Max number of hosts to allow to be created in a single bulk action'),
|
||||||
category=_('Bulk Actions'),
|
category=_('Bulk Actions'),
|
||||||
category_slug='bulk',
|
category_slug='bulk',
|
||||||
|
hidden=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
register(
|
||||||
|
'BULK_HOST_MAX_DELETE',
|
||||||
|
field_class=fields.IntegerField,
|
||||||
|
default=250,
|
||||||
|
label=_('Max number of hosts to allow to be deleted in a single bulk action'),
|
||||||
|
help_text=_('Max number of hosts to allow to be deleted in a single bulk action'),
|
||||||
|
category=_('Bulk Actions'),
|
||||||
|
category_slug='bulk',
|
||||||
|
hidden=True,
|
||||||
)
|
)
|
||||||
|
|
||||||
register(
|
register(
|
||||||
@@ -835,6 +851,7 @@ register(
|
|||||||
help_text=_('Enable preview of new user interface.'),
|
help_text=_('Enable preview of new user interface.'),
|
||||||
category=_('System'),
|
category=_('System'),
|
||||||
category_slug='system',
|
category_slug='system',
|
||||||
|
hidden=True,
|
||||||
)
|
)
|
||||||
|
|
||||||
register(
|
register(
|
||||||
@@ -912,6 +929,16 @@ register(
|
|||||||
category_slug='debug',
|
category_slug='debug',
|
||||||
)
|
)
|
||||||
|
|
||||||
|
register(
|
||||||
|
'RECEPTOR_KEEP_WORK_ON_ERROR',
|
||||||
|
field_class=fields.BooleanField,
|
||||||
|
label=_('Keep receptor work on error'),
|
||||||
|
default=False,
|
||||||
|
help_text=_('Prevent receptor work from being released on when error is detected'),
|
||||||
|
category=('Debug'),
|
||||||
|
category_slug='debug',
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def logging_validate(serializer, attrs):
|
def logging_validate(serializer, attrs):
|
||||||
if not serializer.instance or not hasattr(serializer.instance, 'LOG_AGGREGATOR_HOST') or not hasattr(serializer.instance, 'LOG_AGGREGATOR_TYPE'):
|
if not serializer.instance or not hasattr(serializer.instance, 'LOG_AGGREGATOR_HOST') or not hasattr(serializer.instance, 'LOG_AGGREGATOR_TYPE'):
|
||||||
@@ -938,3 +965,27 @@ def logging_validate(serializer, attrs):
|
|||||||
|
|
||||||
|
|
||||||
register_validate('logging', logging_validate)
|
register_validate('logging', logging_validate)
|
||||||
|
|
||||||
|
|
||||||
|
def csrf_trusted_origins_validate(serializer, attrs):
|
||||||
|
if not serializer.instance or not hasattr(serializer.instance, 'CSRF_TRUSTED_ORIGINS'):
|
||||||
|
return attrs
|
||||||
|
if 'CSRF_TRUSTED_ORIGINS' not in attrs:
|
||||||
|
return attrs
|
||||||
|
errors = []
|
||||||
|
for origin in attrs['CSRF_TRUSTED_ORIGINS']:
|
||||||
|
if "://" not in origin:
|
||||||
|
errors.append(
|
||||||
|
Error(
|
||||||
|
"As of Django 4.0, the values in the CSRF_TRUSTED_ORIGINS "
|
||||||
|
"setting must start with a scheme (usually http:// or "
|
||||||
|
"https://) but found %s. See the release notes for details." % origin,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
if errors:
|
||||||
|
error_messages = [error.msg for error in errors]
|
||||||
|
raise serializers.ValidationError(_('\n'.join(error_messages)))
|
||||||
|
return attrs
|
||||||
|
|
||||||
|
|
||||||
|
register_validate('system', csrf_trusted_origins_validate)
|
||||||
|
|||||||
@@ -14,7 +14,7 @@ __all__ = [
|
|||||||
'STANDARD_INVENTORY_UPDATE_ENV',
|
'STANDARD_INVENTORY_UPDATE_ENV',
|
||||||
]
|
]
|
||||||
|
|
||||||
CLOUD_PROVIDERS = ('azure_rm', 'ec2', 'gce', 'vmware', 'openstack', 'rhv', 'satellite6', 'controller', 'insights')
|
CLOUD_PROVIDERS = ('azure_rm', 'ec2', 'gce', 'vmware', 'openstack', 'rhv', 'satellite6', 'controller', 'insights', 'terraform', 'openshift_virtualization')
|
||||||
PRIVILEGE_ESCALATION_METHODS = [
|
PRIVILEGE_ESCALATION_METHODS = [
|
||||||
('sudo', _('Sudo')),
|
('sudo', _('Sudo')),
|
||||||
('su', _('Su')),
|
('su', _('Su')),
|
||||||
@@ -43,6 +43,7 @@ STANDARD_INVENTORY_UPDATE_ENV = {
|
|||||||
}
|
}
|
||||||
CAN_CANCEL = ('new', 'pending', 'waiting', 'running')
|
CAN_CANCEL = ('new', 'pending', 'waiting', 'running')
|
||||||
ACTIVE_STATES = CAN_CANCEL
|
ACTIVE_STATES = CAN_CANCEL
|
||||||
|
ERROR_STATES = ('error',)
|
||||||
MINIMAL_EVENTS = set(['playbook_on_play_start', 'playbook_on_task_start', 'playbook_on_stats', 'EOF'])
|
MINIMAL_EVENTS = set(['playbook_on_play_start', 'playbook_on_task_start', 'playbook_on_stats', 'EOF'])
|
||||||
CENSOR_VALUE = '************'
|
CENSOR_VALUE = '************'
|
||||||
ENV_BLOCKLIST = frozenset(
|
ENV_BLOCKLIST = frozenset(
|
||||||
@@ -114,3 +115,28 @@ SUBSCRIPTION_USAGE_MODEL_UNIQUE_HOSTS = 'unique_managed_hosts'
|
|||||||
|
|
||||||
# Shared prefetch to use for creating a queryset for the purpose of writing or saving facts
|
# Shared prefetch to use for creating a queryset for the purpose of writing or saving facts
|
||||||
HOST_FACTS_FIELDS = ('name', 'ansible_facts', 'ansible_facts_modified', 'modified', 'inventory_id')
|
HOST_FACTS_FIELDS = ('name', 'ansible_facts', 'ansible_facts_modified', 'modified', 'inventory_id')
|
||||||
|
|
||||||
|
# Data for RBAC compatibility layer
|
||||||
|
role_name_to_perm_mapping = {
|
||||||
|
'adhoc_role': ['adhoc_'],
|
||||||
|
'approval_role': ['approve_'],
|
||||||
|
'auditor_role': ['audit_'],
|
||||||
|
'admin_role': ['change_', 'add_', 'delete_'],
|
||||||
|
'execute_role': ['execute_'],
|
||||||
|
'read_role': ['view_'],
|
||||||
|
'update_role': ['update_'],
|
||||||
|
'member_role': ['member_'],
|
||||||
|
'use_role': ['use_'],
|
||||||
|
}
|
||||||
|
|
||||||
|
org_role_to_permission = {
|
||||||
|
'notification_admin_role': 'add_notificationtemplate',
|
||||||
|
'project_admin_role': 'add_project',
|
||||||
|
'execute_role': 'execute_jobtemplate',
|
||||||
|
'inventory_admin_role': 'add_inventory',
|
||||||
|
'credential_admin_role': 'add_credential',
|
||||||
|
'workflow_admin_role': 'add_workflowjobtemplate',
|
||||||
|
'job_template_admin_role': 'change_jobtemplate', # TODO: this doesnt really work, solution not clear
|
||||||
|
'execution_environment_admin_role': 'add_executionenvironment',
|
||||||
|
'auditor_role': 'view_project', # TODO: also doesnt really work
|
||||||
|
}
|
||||||
|
|||||||
@@ -106,7 +106,7 @@ class RelayConsumer(AsyncJsonWebsocketConsumer):
|
|||||||
if group == "metrics":
|
if group == "metrics":
|
||||||
message = json.loads(message['text'])
|
message = json.loads(message['text'])
|
||||||
conn = redis.Redis.from_url(settings.BROKER_URL)
|
conn = redis.Redis.from_url(settings.BROKER_URL)
|
||||||
conn.set(settings.SUBSYSTEM_METRICS_REDIS_KEY_PREFIX + "_instance_" + message['instance'], message['metrics'])
|
conn.set(settings.SUBSYSTEM_METRICS_REDIS_KEY_PREFIX + "-" + message['metrics_namespace'] + "_instance_" + message['instance'], message['metrics'])
|
||||||
else:
|
else:
|
||||||
await self.channel_layer.group_send(group, message)
|
await self.channel_layer.group_send(group, message)
|
||||||
|
|
||||||
|
|||||||
@@ -58,7 +58,7 @@ aim_inputs = {
|
|||||||
'id': 'object_property',
|
'id': 'object_property',
|
||||||
'label': _('Object Property'),
|
'label': _('Object Property'),
|
||||||
'type': 'string',
|
'type': 'string',
|
||||||
'help_text': _('The property of the object to return. Default: Content Ex: Username, Address, etc.'),
|
'help_text': _('The property of the object to return. Available properties: Username, Password and Address.'),
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
'id': 'reason',
|
'id': 'reason',
|
||||||
@@ -111,8 +111,12 @@ def aim_backend(**kwargs):
|
|||||||
object_property = 'Content'
|
object_property = 'Content'
|
||||||
elif object_property.lower() == 'username':
|
elif object_property.lower() == 'username':
|
||||||
object_property = 'UserName'
|
object_property = 'UserName'
|
||||||
|
elif object_property.lower() == 'password':
|
||||||
|
object_property = 'Content'
|
||||||
|
elif object_property.lower() == 'address':
|
||||||
|
object_property = 'Address'
|
||||||
elif object_property not in res:
|
elif object_property not in res:
|
||||||
raise KeyError('Property {} not found in object'.format(object_property))
|
raise KeyError('Property {} not found in object, available properties: Username, Password and Address'.format(object_property))
|
||||||
else:
|
else:
|
||||||
object_property = object_property.capitalize()
|
object_property = object_property.capitalize()
|
||||||
|
|
||||||
|
|||||||
@@ -1,9 +1,10 @@
|
|||||||
|
from azure.keyvault.secrets import SecretClient
|
||||||
|
from azure.identity import ClientSecretCredential
|
||||||
|
from msrestazure import azure_cloud
|
||||||
|
|
||||||
from .plugin import CredentialPlugin
|
from .plugin import CredentialPlugin
|
||||||
|
|
||||||
from django.utils.translation import gettext_lazy as _
|
from django.utils.translation import gettext_lazy as _
|
||||||
from azure.keyvault import KeyVaultClient, KeyVaultAuthentication
|
|
||||||
from azure.common.credentials import ServicePrincipalCredentials
|
|
||||||
from msrestazure import azure_cloud
|
|
||||||
|
|
||||||
|
|
||||||
# https://github.com/Azure/msrestazure-for-python/blob/master/msrestazure/azure_cloud.py
|
# https://github.com/Azure/msrestazure-for-python/blob/master/msrestazure/azure_cloud.py
|
||||||
@@ -54,22 +55,9 @@ azure_keyvault_inputs = {
|
|||||||
|
|
||||||
|
|
||||||
def azure_keyvault_backend(**kwargs):
|
def azure_keyvault_backend(**kwargs):
|
||||||
url = kwargs['url']
|
csc = ClientSecretCredential(tenant_id=kwargs['tenant'], client_id=kwargs['client'], client_secret=kwargs['secret'])
|
||||||
[cloud] = [c for c in clouds if c.name == kwargs.get('cloud_name', default_cloud.name)]
|
kv = SecretClient(credential=csc, vault_url=kwargs['url'])
|
||||||
|
return kv.get_secret(name=kwargs['secret_field'], version=kwargs.get('secret_version', '')).value
|
||||||
def auth_callback(server, resource, scope):
|
|
||||||
credentials = ServicePrincipalCredentials(
|
|
||||||
url=url,
|
|
||||||
client_id=kwargs['client'],
|
|
||||||
secret=kwargs['secret'],
|
|
||||||
tenant=kwargs['tenant'],
|
|
||||||
resource=f"https://{cloud.suffixes.keyvault_dns.split('.', 1).pop()}",
|
|
||||||
)
|
|
||||||
token = credentials.token
|
|
||||||
return token['token_type'], token['access_token']
|
|
||||||
|
|
||||||
kv = KeyVaultClient(KeyVaultAuthentication(auth_callback))
|
|
||||||
return kv.get_secret(url, kwargs['secret_field'], kwargs.get('secret_version', '')).value
|
|
||||||
|
|
||||||
|
|
||||||
azure_keyvault_plugin = CredentialPlugin('Microsoft Azure Key Vault', inputs=azure_keyvault_inputs, backend=azure_keyvault_backend)
|
azure_keyvault_plugin = CredentialPlugin('Microsoft Azure Key Vault', inputs=azure_keyvault_inputs, backend=azure_keyvault_backend)
|
||||||
|
|||||||
@@ -3,6 +3,7 @@ from .plugin import CredentialPlugin
|
|||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
from django.utils.translation import gettext_lazy as _
|
from django.utils.translation import gettext_lazy as _
|
||||||
from delinea.secrets.vault import PasswordGrantAuthorizer, SecretsVault
|
from delinea.secrets.vault import PasswordGrantAuthorizer, SecretsVault
|
||||||
|
from base64 import b64decode
|
||||||
|
|
||||||
dsv_inputs = {
|
dsv_inputs = {
|
||||||
'fields': [
|
'fields': [
|
||||||
@@ -44,8 +45,16 @@ dsv_inputs = {
|
|||||||
'help_text': _('The field to extract from the secret'),
|
'help_text': _('The field to extract from the secret'),
|
||||||
'type': 'string',
|
'type': 'string',
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
'id': 'secret_decoding',
|
||||||
|
'label': _('Should the secret be base64 decoded?'),
|
||||||
|
'help_text': _('Specify whether the secret should be base64 decoded, typically used for storing files, such as SSH keys'),
|
||||||
|
'choices': ['No Decoding', 'Decode Base64'],
|
||||||
|
'type': 'string',
|
||||||
|
'default': 'No Decoding',
|
||||||
|
},
|
||||||
],
|
],
|
||||||
'required': ['tenant', 'client_id', 'client_secret', 'path', 'secret_field'],
|
'required': ['tenant', 'client_id', 'client_secret', 'path', 'secret_field', 'secret_decoding'],
|
||||||
}
|
}
|
||||||
|
|
||||||
if settings.DEBUG:
|
if settings.DEBUG:
|
||||||
@@ -67,12 +76,18 @@ def dsv_backend(**kwargs):
|
|||||||
client_secret = kwargs['client_secret']
|
client_secret = kwargs['client_secret']
|
||||||
secret_path = kwargs['path']
|
secret_path = kwargs['path']
|
||||||
secret_field = kwargs['secret_field']
|
secret_field = kwargs['secret_field']
|
||||||
|
# providing a default value to remain backward compatible for secrets that have not specified this option
|
||||||
|
secret_decoding = kwargs.get('secret_decoding', 'No Decoding')
|
||||||
|
|
||||||
tenant_url = tenant_url_template.format(tenant_name, tenant_tld.strip("."))
|
tenant_url = tenant_url_template.format(tenant_name, tenant_tld.strip("."))
|
||||||
|
|
||||||
authorizer = PasswordGrantAuthorizer(tenant_url, client_id, client_secret)
|
authorizer = PasswordGrantAuthorizer(tenant_url, client_id, client_secret)
|
||||||
dsv_secret = SecretsVault(tenant_url, authorizer).get_secret(secret_path)
|
dsv_secret = SecretsVault(tenant_url, authorizer).get_secret(secret_path)
|
||||||
|
|
||||||
|
# files can be uploaded base64 decoded to DSV and thus decoding it only, when asked for
|
||||||
|
if secret_decoding == 'Decode Base64':
|
||||||
|
return b64decode(dsv_secret['data'][secret_field]).decode()
|
||||||
|
|
||||||
return dsv_secret['data'][secret_field]
|
return dsv_secret['data'][secret_field]
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -41,6 +41,34 @@ base_inputs = {
|
|||||||
'secret': True,
|
'secret': True,
|
||||||
'help_text': _('The Secret ID for AppRole Authentication'),
|
'help_text': _('The Secret ID for AppRole Authentication'),
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
'id': 'client_cert_public',
|
||||||
|
'label': _('Client Certificate'),
|
||||||
|
'type': 'string',
|
||||||
|
'multiline': True,
|
||||||
|
'help_text': _(
|
||||||
|
'The PEM-encoded client certificate used for TLS client authentication.'
|
||||||
|
' This should include the certificate and any intermediate certififcates.'
|
||||||
|
),
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'id': 'client_cert_private',
|
||||||
|
'label': _('Client Certificate Key'),
|
||||||
|
'type': 'string',
|
||||||
|
'multiline': True,
|
||||||
|
'secret': True,
|
||||||
|
'help_text': _('The certificate private key used for TLS client authentication.'),
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'id': 'client_cert_role',
|
||||||
|
'label': _('TLS Authentication Role'),
|
||||||
|
'type': 'string',
|
||||||
|
'multiline': False,
|
||||||
|
'help_text': _(
|
||||||
|
'The role configured in Hashicorp Vault for TLS client authentication.'
|
||||||
|
' If not provided, Hashicorp Vault may assign roles based on the certificate used.'
|
||||||
|
),
|
||||||
|
},
|
||||||
{
|
{
|
||||||
'id': 'namespace',
|
'id': 'namespace',
|
||||||
'label': _('Namespace name (Vault Enterprise only)'),
|
'label': _('Namespace name (Vault Enterprise only)'),
|
||||||
@@ -59,6 +87,20 @@ base_inputs = {
|
|||||||
' see https://www.vaultproject.io/docs/auth/kubernetes#configuration'
|
' see https://www.vaultproject.io/docs/auth/kubernetes#configuration'
|
||||||
),
|
),
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
'id': 'username',
|
||||||
|
'label': _('Username'),
|
||||||
|
'type': 'string',
|
||||||
|
'secret': False,
|
||||||
|
'help_text': _('Username for user authentication.'),
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'id': 'password',
|
||||||
|
'label': _('Password'),
|
||||||
|
'type': 'string',
|
||||||
|
'secret': True,
|
||||||
|
'help_text': _('Password for user authentication.'),
|
||||||
|
},
|
||||||
{
|
{
|
||||||
'id': 'default_auth_path',
|
'id': 'default_auth_path',
|
||||||
'label': _('Path to Auth'),
|
'label': _('Path to Auth'),
|
||||||
@@ -157,19 +199,25 @@ hashi_ssh_inputs['required'].extend(['public_key', 'role'])
|
|||||||
|
|
||||||
def handle_auth(**kwargs):
|
def handle_auth(**kwargs):
|
||||||
token = None
|
token = None
|
||||||
|
|
||||||
if kwargs.get('token'):
|
if kwargs.get('token'):
|
||||||
token = kwargs['token']
|
token = kwargs['token']
|
||||||
|
elif kwargs.get('username') and kwargs.get('password'):
|
||||||
|
token = method_auth(**kwargs, auth_param=userpass_auth(**kwargs))
|
||||||
elif kwargs.get('role_id') and kwargs.get('secret_id'):
|
elif kwargs.get('role_id') and kwargs.get('secret_id'):
|
||||||
token = method_auth(**kwargs, auth_param=approle_auth(**kwargs))
|
token = method_auth(**kwargs, auth_param=approle_auth(**kwargs))
|
||||||
elif kwargs.get('kubernetes_role'):
|
elif kwargs.get('kubernetes_role'):
|
||||||
token = method_auth(**kwargs, auth_param=kubernetes_auth(**kwargs))
|
token = method_auth(**kwargs, auth_param=kubernetes_auth(**kwargs))
|
||||||
|
elif kwargs.get('client_cert_public') and kwargs.get('client_cert_private'):
|
||||||
|
token = method_auth(**kwargs, auth_param=client_cert_auth(**kwargs))
|
||||||
else:
|
else:
|
||||||
raise Exception('Either token or AppRole/Kubernetes authentication parameters must be set')
|
raise Exception('Token, Username/Password, AppRole, Kubernetes, or TLS authentication parameters must be set')
|
||||||
|
|
||||||
return token
|
return token
|
||||||
|
|
||||||
|
|
||||||
|
def userpass_auth(**kwargs):
|
||||||
|
return {'username': kwargs['username'], 'password': kwargs['password']}
|
||||||
|
|
||||||
|
|
||||||
def approle_auth(**kwargs):
|
def approle_auth(**kwargs):
|
||||||
return {'role_id': kwargs['role_id'], 'secret_id': kwargs['secret_id']}
|
return {'role_id': kwargs['role_id'], 'secret_id': kwargs['secret_id']}
|
||||||
|
|
||||||
@@ -181,6 +229,10 @@ def kubernetes_auth(**kwargs):
|
|||||||
return {'role': kwargs['kubernetes_role'], 'jwt': jwt}
|
return {'role': kwargs['kubernetes_role'], 'jwt': jwt}
|
||||||
|
|
||||||
|
|
||||||
|
def client_cert_auth(**kwargs):
|
||||||
|
return {'name': kwargs.get('client_cert_role')}
|
||||||
|
|
||||||
|
|
||||||
def method_auth(**kwargs):
|
def method_auth(**kwargs):
|
||||||
# get auth method specific params
|
# get auth method specific params
|
||||||
request_kwargs = {'json': kwargs['auth_param'], 'timeout': 30}
|
request_kwargs = {'json': kwargs['auth_param'], 'timeout': 30}
|
||||||
@@ -193,13 +245,25 @@ def method_auth(**kwargs):
|
|||||||
cacert = kwargs.get('cacert', None)
|
cacert = kwargs.get('cacert', None)
|
||||||
|
|
||||||
sess = requests.Session()
|
sess = requests.Session()
|
||||||
|
sess.mount(url, requests.adapters.HTTPAdapter(max_retries=5))
|
||||||
|
|
||||||
# Namespace support
|
# Namespace support
|
||||||
if kwargs.get('namespace'):
|
if kwargs.get('namespace'):
|
||||||
sess.headers['X-Vault-Namespace'] = kwargs['namespace']
|
sess.headers['X-Vault-Namespace'] = kwargs['namespace']
|
||||||
request_url = '/'.join([url, 'auth', auth_path, 'login']).rstrip('/')
|
request_url = '/'.join([url, 'auth', auth_path, 'login']).rstrip('/')
|
||||||
|
if kwargs['auth_param'].get('username'):
|
||||||
|
request_url = request_url + '/' + (kwargs['username'])
|
||||||
with CertFiles(cacert) as cert:
|
with CertFiles(cacert) as cert:
|
||||||
request_kwargs['verify'] = cert
|
request_kwargs['verify'] = cert
|
||||||
resp = sess.post(request_url, **request_kwargs)
|
# TLS client certificate support
|
||||||
|
if kwargs.get('client_cert_public') and kwargs.get('client_cert_private'):
|
||||||
|
# Add client cert to requests Session before making call
|
||||||
|
with CertFiles(kwargs['client_cert_public'], key=kwargs['client_cert_private']) as client_cert:
|
||||||
|
sess.cert = client_cert
|
||||||
|
resp = sess.post(request_url, **request_kwargs)
|
||||||
|
else:
|
||||||
|
# Make call without client certificate
|
||||||
|
resp = sess.post(request_url, **request_kwargs)
|
||||||
resp.raise_for_status()
|
resp.raise_for_status()
|
||||||
token = resp.json()['auth']['client_token']
|
token = resp.json()['auth']['client_token']
|
||||||
return token
|
return token
|
||||||
@@ -220,6 +284,7 @@ def kv_backend(**kwargs):
|
|||||||
}
|
}
|
||||||
|
|
||||||
sess = requests.Session()
|
sess = requests.Session()
|
||||||
|
sess.mount(url, requests.adapters.HTTPAdapter(max_retries=5))
|
||||||
sess.headers['Authorization'] = 'Bearer {}'.format(token)
|
sess.headers['Authorization'] = 'Bearer {}'.format(token)
|
||||||
# Compatibility header for older installs of Hashicorp Vault
|
# Compatibility header for older installs of Hashicorp Vault
|
||||||
sess.headers['X-Vault-Token'] = token
|
sess.headers['X-Vault-Token'] = token
|
||||||
@@ -290,6 +355,7 @@ def ssh_backend(**kwargs):
|
|||||||
request_kwargs['json']['valid_principals'] = kwargs['valid_principals']
|
request_kwargs['json']['valid_principals'] = kwargs['valid_principals']
|
||||||
|
|
||||||
sess = requests.Session()
|
sess = requests.Session()
|
||||||
|
sess.mount(url, requests.adapters.HTTPAdapter(max_retries=5))
|
||||||
sess.headers['Authorization'] = 'Bearer {}'.format(token)
|
sess.headers['Authorization'] = 'Bearer {}'.format(token)
|
||||||
if kwargs.get('namespace'):
|
if kwargs.get('namespace'):
|
||||||
sess.headers['X-Vault-Namespace'] = kwargs['namespace']
|
sess.headers['X-Vault-Namespace'] = kwargs['namespace']
|
||||||
|
|||||||
@@ -1,6 +1,7 @@
|
|||||||
import os
|
import os
|
||||||
import psycopg
|
import psycopg
|
||||||
import select
|
import select
|
||||||
|
from copy import deepcopy
|
||||||
|
|
||||||
from contextlib import contextmanager
|
from contextlib import contextmanager
|
||||||
|
|
||||||
@@ -93,6 +94,27 @@ class PubSub(object):
|
|||||||
self.conn.close()
|
self.conn.close()
|
||||||
|
|
||||||
|
|
||||||
|
def create_listener_connection():
|
||||||
|
conf = deepcopy(settings.DATABASES['default'])
|
||||||
|
conf['OPTIONS'] = deepcopy(conf.get('OPTIONS', {}))
|
||||||
|
# Modify the application name to distinguish from other connections the process might use
|
||||||
|
conf['OPTIONS']['application_name'] = get_application_name(settings.CLUSTER_HOST_ID, function='listener')
|
||||||
|
|
||||||
|
# Apply overrides specifically for the listener connection
|
||||||
|
for k, v in settings.LISTENER_DATABASES.get('default', {}).items():
|
||||||
|
if k != 'OPTIONS':
|
||||||
|
conf[k] = v
|
||||||
|
for k, v in settings.LISTENER_DATABASES.get('default', {}).get('OPTIONS', {}).items():
|
||||||
|
conf['OPTIONS'][k] = v
|
||||||
|
|
||||||
|
# Allow password-less authentication
|
||||||
|
if 'PASSWORD' in conf:
|
||||||
|
conf['OPTIONS']['password'] = conf.pop('PASSWORD')
|
||||||
|
|
||||||
|
connection_data = f"dbname={conf['NAME']} host={conf['HOST']} user={conf['USER']} port={conf['PORT']}"
|
||||||
|
return psycopg.connect(connection_data, autocommit=True, **conf['OPTIONS'])
|
||||||
|
|
||||||
|
|
||||||
@contextmanager
|
@contextmanager
|
||||||
def pg_bus_conn(new_connection=False, select_timeout=None):
|
def pg_bus_conn(new_connection=False, select_timeout=None):
|
||||||
'''
|
'''
|
||||||
@@ -106,12 +128,7 @@ def pg_bus_conn(new_connection=False, select_timeout=None):
|
|||||||
'''
|
'''
|
||||||
|
|
||||||
if new_connection:
|
if new_connection:
|
||||||
conf = settings.DATABASES['default'].copy()
|
conn = create_listener_connection()
|
||||||
conf['OPTIONS'] = conf.get('OPTIONS', {}).copy()
|
|
||||||
# Modify the application name to distinguish from other connections the process might use
|
|
||||||
conf['OPTIONS']['application_name'] = get_application_name(settings.CLUSTER_HOST_ID, function='listener')
|
|
||||||
connection_data = f"dbname={conf['NAME']} host={conf['HOST']} user={conf['USER']} password={conf['PASSWORD']} port={conf['PORT']}"
|
|
||||||
conn = psycopg.connect(connection_data, autocommit=True, **conf['OPTIONS'])
|
|
||||||
else:
|
else:
|
||||||
if pg_connection.connection is None:
|
if pg_connection.connection is None:
|
||||||
pg_connection.connect()
|
pg_connection.connect()
|
||||||
|
|||||||
@@ -162,13 +162,13 @@ class AWXConsumerRedis(AWXConsumerBase):
|
|||||||
class AWXConsumerPG(AWXConsumerBase):
|
class AWXConsumerPG(AWXConsumerBase):
|
||||||
def __init__(self, *args, schedule=None, **kwargs):
|
def __init__(self, *args, schedule=None, **kwargs):
|
||||||
super().__init__(*args, **kwargs)
|
super().__init__(*args, **kwargs)
|
||||||
self.pg_max_wait = settings.DISPATCHER_DB_DOWNTIME_TOLERANCE
|
self.pg_max_wait = getattr(settings, 'DISPATCHER_DB_DOWNTOWN_TOLLERANCE', settings.DISPATCHER_DB_DOWNTIME_TOLERANCE)
|
||||||
# if no successful loops have ran since startup, then we should fail right away
|
# if no successful loops have ran since startup, then we should fail right away
|
||||||
self.pg_is_down = True # set so that we fail if we get database errors on startup
|
self.pg_is_down = True # set so that we fail if we get database errors on startup
|
||||||
init_time = time.time()
|
init_time = time.time()
|
||||||
self.pg_down_time = init_time - self.pg_max_wait # allow no grace period
|
self.pg_down_time = init_time - self.pg_max_wait # allow no grace period
|
||||||
self.last_cleanup = init_time
|
self.last_cleanup = init_time
|
||||||
self.subsystem_metrics = s_metrics.Metrics(auto_pipe_execute=False)
|
self.subsystem_metrics = s_metrics.DispatcherMetrics(auto_pipe_execute=False)
|
||||||
self.last_metrics_gather = init_time
|
self.last_metrics_gather = init_time
|
||||||
self.listen_cumulative_time = 0.0
|
self.listen_cumulative_time = 0.0
|
||||||
if schedule:
|
if schedule:
|
||||||
@@ -214,7 +214,10 @@ class AWXConsumerPG(AWXConsumerBase):
|
|||||||
# bypasses pg_notify for scheduled tasks
|
# bypasses pg_notify for scheduled tasks
|
||||||
self.dispatch_task(body)
|
self.dispatch_task(body)
|
||||||
|
|
||||||
self.pg_is_down = False
|
if self.pg_is_down:
|
||||||
|
logger.info('Dispatcher listener connection established')
|
||||||
|
self.pg_is_down = False
|
||||||
|
|
||||||
self.listen_start = time.time()
|
self.listen_start = time.time()
|
||||||
|
|
||||||
return self.scheduler.time_until_next_run()
|
return self.scheduler.time_until_next_run()
|
||||||
@@ -256,6 +259,12 @@ class AWXConsumerPG(AWXConsumerBase):
|
|||||||
current_downtime = time.time() - self.pg_down_time
|
current_downtime = time.time() - self.pg_down_time
|
||||||
if current_downtime > self.pg_max_wait:
|
if current_downtime > self.pg_max_wait:
|
||||||
logger.exception(f"Postgres event consumer has not recovered in {current_downtime} s, exiting")
|
logger.exception(f"Postgres event consumer has not recovered in {current_downtime} s, exiting")
|
||||||
|
# Sending QUIT to multiprocess queue to signal workers to exit
|
||||||
|
for worker in self.pool.workers:
|
||||||
|
try:
|
||||||
|
worker.quit()
|
||||||
|
except Exception:
|
||||||
|
logger.exception(f"Error sending QUIT to worker {worker}")
|
||||||
raise
|
raise
|
||||||
# Wait for a second before next attempt, but still listen for any shutdown signals
|
# Wait for a second before next attempt, but still listen for any shutdown signals
|
||||||
for i in range(10):
|
for i in range(10):
|
||||||
@@ -267,6 +276,12 @@ class AWXConsumerPG(AWXConsumerBase):
|
|||||||
except Exception:
|
except Exception:
|
||||||
# Log unanticipated exception in addition to writing to stderr to get timestamps and other metadata
|
# Log unanticipated exception in addition to writing to stderr to get timestamps and other metadata
|
||||||
logger.exception('Encountered unhandled error in dispatcher main loop')
|
logger.exception('Encountered unhandled error in dispatcher main loop')
|
||||||
|
# Sending QUIT to multiprocess queue to signal workers to exit
|
||||||
|
for worker in self.pool.workers:
|
||||||
|
try:
|
||||||
|
worker.quit()
|
||||||
|
except Exception:
|
||||||
|
logger.exception(f"Error sending QUIT to worker {worker}")
|
||||||
raise
|
raise
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -72,7 +72,7 @@ class CallbackBrokerWorker(BaseWorker):
|
|||||||
def __init__(self):
|
def __init__(self):
|
||||||
self.buff = {}
|
self.buff = {}
|
||||||
self.redis = redis.Redis.from_url(settings.BROKER_URL)
|
self.redis = redis.Redis.from_url(settings.BROKER_URL)
|
||||||
self.subsystem_metrics = s_metrics.Metrics(auto_pipe_execute=False)
|
self.subsystem_metrics = s_metrics.CallbackReceiverMetrics(auto_pipe_execute=False)
|
||||||
self.queue_pop = 0
|
self.queue_pop = 0
|
||||||
self.queue_name = settings.CALLBACK_QUEUE
|
self.queue_name = settings.CALLBACK_QUEUE
|
||||||
self.prof = AWXProfiler("CallbackBrokerWorker")
|
self.prof = AWXProfiler("CallbackBrokerWorker")
|
||||||
|
|||||||
@@ -5,6 +5,7 @@
|
|||||||
import copy
|
import copy
|
||||||
import json
|
import json
|
||||||
import re
|
import re
|
||||||
|
import sys
|
||||||
import urllib.parse
|
import urllib.parse
|
||||||
|
|
||||||
from jinja2 import sandbox, StrictUndefined
|
from jinja2 import sandbox, StrictUndefined
|
||||||
@@ -251,7 +252,7 @@ class ImplicitRoleField(models.ForeignKey):
|
|||||||
kwargs.setdefault('related_name', '+')
|
kwargs.setdefault('related_name', '+')
|
||||||
kwargs.setdefault('null', 'True')
|
kwargs.setdefault('null', 'True')
|
||||||
kwargs.setdefault('editable', False)
|
kwargs.setdefault('editable', False)
|
||||||
kwargs.setdefault('on_delete', models.CASCADE)
|
kwargs.setdefault('on_delete', models.SET_NULL)
|
||||||
super(ImplicitRoleField, self).__init__(*args, **kwargs)
|
super(ImplicitRoleField, self).__init__(*args, **kwargs)
|
||||||
|
|
||||||
def deconstruct(self):
|
def deconstruct(self):
|
||||||
@@ -406,11 +407,13 @@ class SmartFilterField(models.TextField):
|
|||||||
# https://docs.python.org/2/library/stdtypes.html#truth-value-testing
|
# https://docs.python.org/2/library/stdtypes.html#truth-value-testing
|
||||||
if not value:
|
if not value:
|
||||||
return None
|
return None
|
||||||
value = urllib.parse.unquote(value)
|
# avoid doing too much during migrations
|
||||||
try:
|
if 'migrate' not in sys.argv:
|
||||||
SmartFilter().query_from_string(value)
|
value = urllib.parse.unquote(value)
|
||||||
except RuntimeError as e:
|
try:
|
||||||
raise models.base.ValidationError(e)
|
SmartFilter().query_from_string(value)
|
||||||
|
except RuntimeError as e:
|
||||||
|
raise models.base.ValidationError(e)
|
||||||
return super(SmartFilterField, self).get_prep_value(value)
|
return super(SmartFilterField, self).get_prep_value(value)
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
53
awx/main/management/commands/add_receptor_address.py
Normal file
53
awx/main/management/commands/add_receptor_address.py
Normal file
@@ -0,0 +1,53 @@
|
|||||||
|
# Copyright (c) 2015 Ansible, Inc.
|
||||||
|
# All Rights Reserved
|
||||||
|
|
||||||
|
from django.core.management.base import BaseCommand
|
||||||
|
|
||||||
|
from awx.main.models import Instance, ReceptorAddress
|
||||||
|
|
||||||
|
|
||||||
|
def add_address(**kwargs):
|
||||||
|
try:
|
||||||
|
instance = Instance.objects.get(hostname=kwargs.pop('instance'))
|
||||||
|
kwargs['instance'] = instance
|
||||||
|
|
||||||
|
if kwargs.get('canonical') and instance.receptor_addresses.filter(canonical=True).exclude(address=kwargs['address']).exists():
|
||||||
|
print(f"Instance {instance.hostname} already has a canonical address, skipping")
|
||||||
|
return False
|
||||||
|
# if ReceptorAddress already exists with address, just update
|
||||||
|
# otherwise, create new ReceptorAddress
|
||||||
|
addr, _ = ReceptorAddress.objects.update_or_create(address=kwargs.pop('address'), defaults=kwargs)
|
||||||
|
print(f"Successfully added receptor address {addr.get_full_address()}")
|
||||||
|
return True
|
||||||
|
except Exception as e:
|
||||||
|
print(f"Error adding receptor address: {e}")
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
class Command(BaseCommand):
|
||||||
|
"""
|
||||||
|
Internal controller command.
|
||||||
|
Register receptor address to an already-registered instance.
|
||||||
|
"""
|
||||||
|
|
||||||
|
help = "Add receptor address to an instance."
|
||||||
|
|
||||||
|
def add_arguments(self, parser):
|
||||||
|
parser.add_argument('--instance', dest='instance', required=True, type=str, help="Instance hostname this address is added to")
|
||||||
|
parser.add_argument('--address', dest='address', required=True, type=str, help="Receptor address")
|
||||||
|
parser.add_argument('--port', dest='port', type=int, help="Receptor listener port")
|
||||||
|
parser.add_argument('--websocket_path', dest='websocket_path', type=str, default="", help="Path for websockets")
|
||||||
|
parser.add_argument('--is_internal', action='store_true', help="If true, address only resolvable within the Kubernetes cluster")
|
||||||
|
parser.add_argument('--protocol', type=str, default='tcp', choices=['tcp', 'ws', 'wss'], help="Protocol to use for the Receptor listener")
|
||||||
|
parser.add_argument('--canonical', action='store_true', help="If true, address is the canonical address for the instance")
|
||||||
|
parser.add_argument('--peers_from_control_nodes', action='store_true', help="If true, control nodes will peer to this address")
|
||||||
|
|
||||||
|
def handle(self, **options):
|
||||||
|
address_options = {
|
||||||
|
k: options[k]
|
||||||
|
for k in ('instance', 'address', 'port', 'websocket_path', 'is_internal', 'protocol', 'peers_from_control_nodes', 'canonical')
|
||||||
|
if options[k]
|
||||||
|
}
|
||||||
|
changed = add_address(**address_options)
|
||||||
|
if changed:
|
||||||
|
print("(changed: True)")
|
||||||
12
awx/main/management/commands/check_instance_ready.py
Normal file
12
awx/main/management/commands/check_instance_ready.py
Normal file
@@ -0,0 +1,12 @@
|
|||||||
|
from django.core.management.base import BaseCommand, CommandError
|
||||||
|
from awx.main.models.ha import Instance
|
||||||
|
|
||||||
|
|
||||||
|
class Command(BaseCommand):
|
||||||
|
help = 'Check if the task manager instance is ready throw error if not ready, can be use as readiness probe for k8s.'
|
||||||
|
|
||||||
|
def handle(self, *args, **options):
|
||||||
|
if Instance.objects.me().node_state != Instance.States.READY:
|
||||||
|
raise CommandError('Instance is not ready') # so that return code is not 0
|
||||||
|
|
||||||
|
return
|
||||||
@@ -2,6 +2,7 @@
|
|||||||
# All Rights Reserved
|
# All Rights Reserved
|
||||||
|
|
||||||
from django.core.management.base import BaseCommand
|
from django.core.management.base import BaseCommand
|
||||||
|
from django.db import transaction
|
||||||
from crum import impersonate
|
from crum import impersonate
|
||||||
from awx.main.models import User, Organization, Project, Inventory, CredentialType, Credential, Host, JobTemplate
|
from awx.main.models import User, Organization, Project, Inventory, CredentialType, Credential, Host, JobTemplate
|
||||||
from awx.main.signals import disable_computed_fields
|
from awx.main.signals import disable_computed_fields
|
||||||
@@ -13,6 +14,12 @@ class Command(BaseCommand):
|
|||||||
help = 'Creates a preload tower data if there is none.'
|
help = 'Creates a preload tower data if there is none.'
|
||||||
|
|
||||||
def handle(self, *args, **kwargs):
|
def handle(self, *args, **kwargs):
|
||||||
|
# Wrap the operation in an atomic block, so we do not on accident
|
||||||
|
# create the organization but not create the project, etc.
|
||||||
|
with transaction.atomic():
|
||||||
|
self._handle()
|
||||||
|
|
||||||
|
def _handle(self):
|
||||||
changed = False
|
changed = False
|
||||||
|
|
||||||
# Create a default organization as the first superuser found.
|
# Create a default organization as the first superuser found.
|
||||||
@@ -43,10 +50,11 @@ class Command(BaseCommand):
|
|||||||
|
|
||||||
ssh_type = CredentialType.objects.filter(namespace='ssh').first()
|
ssh_type = CredentialType.objects.filter(namespace='ssh').first()
|
||||||
c, _ = Credential.objects.get_or_create(
|
c, _ = Credential.objects.get_or_create(
|
||||||
credential_type=ssh_type, name='Demo Credential', inputs={'username': superuser.username}, created_by=superuser
|
credential_type=ssh_type, name='Demo Credential', inputs={'username': getattr(superuser, 'username', 'null')}, created_by=superuser
|
||||||
)
|
)
|
||||||
|
|
||||||
c.admin_role.members.add(superuser)
|
if superuser:
|
||||||
|
c.admin_role.members.add(superuser)
|
||||||
|
|
||||||
public_galaxy_credential, _ = Credential.objects.get_or_create(
|
public_galaxy_credential, _ = Credential.objects.get_or_create(
|
||||||
name='Ansible Galaxy',
|
name='Ansible Galaxy',
|
||||||
|
|||||||
195
awx/main/management/commands/dump_auth_config.py
Normal file
195
awx/main/management/commands/dump_auth_config.py
Normal file
@@ -0,0 +1,195 @@
|
|||||||
|
import json
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
import re
|
||||||
|
from typing import Any
|
||||||
|
|
||||||
|
from django.core.management.base import BaseCommand
|
||||||
|
from django.conf import settings
|
||||||
|
|
||||||
|
from awx.conf import settings_registry
|
||||||
|
|
||||||
|
|
||||||
|
class Command(BaseCommand):
|
||||||
|
help = 'Dump the current auth configuration in django_ansible_base.authenticator format, currently supports LDAP and SAML'
|
||||||
|
|
||||||
|
DAB_SAML_AUTHENTICATOR_KEYS = {
|
||||||
|
"SP_ENTITY_ID": True,
|
||||||
|
"SP_PUBLIC_CERT": True,
|
||||||
|
"SP_PRIVATE_KEY": True,
|
||||||
|
"ORG_INFO": True,
|
||||||
|
"TECHNICAL_CONTACT": True,
|
||||||
|
"SUPPORT_CONTACT": True,
|
||||||
|
"SP_EXTRA": False,
|
||||||
|
"SECURITY_CONFIG": False,
|
||||||
|
"EXTRA_DATA": False,
|
||||||
|
"ENABLED_IDPS": True,
|
||||||
|
"CALLBACK_URL": False,
|
||||||
|
}
|
||||||
|
|
||||||
|
DAB_LDAP_AUTHENTICATOR_KEYS = {
|
||||||
|
"SERVER_URI": True,
|
||||||
|
"BIND_DN": False,
|
||||||
|
"BIND_PASSWORD": False,
|
||||||
|
"CONNECTION_OPTIONS": False,
|
||||||
|
"GROUP_TYPE": True,
|
||||||
|
"GROUP_TYPE_PARAMS": True,
|
||||||
|
"GROUP_SEARCH": False,
|
||||||
|
"START_TLS": False,
|
||||||
|
"USER_DN_TEMPLATE": True,
|
||||||
|
"USER_ATTR_MAP": True,
|
||||||
|
"USER_SEARCH": False,
|
||||||
|
}
|
||||||
|
|
||||||
|
def is_enabled(self, settings, keys):
|
||||||
|
missing_fields = []
|
||||||
|
for key, required in keys.items():
|
||||||
|
if required and not settings.get(key):
|
||||||
|
missing_fields.append(key)
|
||||||
|
if missing_fields:
|
||||||
|
return False, missing_fields
|
||||||
|
return True, None
|
||||||
|
|
||||||
|
def get_awx_ldap_settings(self) -> dict[str, dict[str, Any]]:
|
||||||
|
awx_ldap_settings = {}
|
||||||
|
|
||||||
|
for awx_ldap_setting in settings_registry.get_registered_settings(category_slug='ldap'):
|
||||||
|
key = awx_ldap_setting.removeprefix("AUTH_LDAP_")
|
||||||
|
value = getattr(settings, awx_ldap_setting, None)
|
||||||
|
awx_ldap_settings[key] = value
|
||||||
|
|
||||||
|
grouped_settings = {}
|
||||||
|
|
||||||
|
for key, value in awx_ldap_settings.items():
|
||||||
|
match = re.search(r'(\d+)', key)
|
||||||
|
index = int(match.group()) if match else 0
|
||||||
|
new_key = re.sub(r'\d+_', '', key)
|
||||||
|
|
||||||
|
if index not in grouped_settings:
|
||||||
|
grouped_settings[index] = {}
|
||||||
|
|
||||||
|
grouped_settings[index][new_key] = value
|
||||||
|
if new_key == "GROUP_TYPE" and value:
|
||||||
|
grouped_settings[index][new_key] = type(value).__name__
|
||||||
|
|
||||||
|
if new_key == "SERVER_URI" and value:
|
||||||
|
value = value.split(", ")
|
||||||
|
grouped_settings[index][new_key] = value
|
||||||
|
|
||||||
|
if type(value).__name__ == "LDAPSearch":
|
||||||
|
data = []
|
||||||
|
data.append(value.base_dn)
|
||||||
|
data.append("SCOPE_SUBTREE")
|
||||||
|
data.append(value.filterstr)
|
||||||
|
grouped_settings[index][new_key] = data
|
||||||
|
|
||||||
|
return grouped_settings
|
||||||
|
|
||||||
|
def get_awx_saml_settings(self) -> dict[str, Any]:
|
||||||
|
awx_saml_settings = {}
|
||||||
|
for awx_saml_setting in settings_registry.get_registered_settings(category_slug='saml'):
|
||||||
|
awx_saml_settings[awx_saml_setting.removeprefix("SOCIAL_AUTH_SAML_")] = getattr(settings, awx_saml_setting, None)
|
||||||
|
|
||||||
|
return awx_saml_settings
|
||||||
|
|
||||||
|
def format_config_data(self, enabled, awx_settings, type, keys, name):
|
||||||
|
config = {
|
||||||
|
"type": f"ansible_base.authentication.authenticator_plugins.{type}",
|
||||||
|
"name": name,
|
||||||
|
"enabled": enabled,
|
||||||
|
"create_objects": True,
|
||||||
|
"users_unique": False,
|
||||||
|
"remove_users": True,
|
||||||
|
"configuration": {},
|
||||||
|
}
|
||||||
|
for k in keys:
|
||||||
|
v = awx_settings.get(k)
|
||||||
|
config["configuration"].update({k: v})
|
||||||
|
|
||||||
|
if type == "saml":
|
||||||
|
idp_to_key_mapping = {
|
||||||
|
"url": "IDP_URL",
|
||||||
|
"x509cert": "IDP_X509_CERT",
|
||||||
|
"entity_id": "IDP_ENTITY_ID",
|
||||||
|
"attr_email": "IDP_ATTR_EMAIL",
|
||||||
|
"attr_groups": "IDP_GROUPS",
|
||||||
|
"attr_username": "IDP_ATTR_USERNAME",
|
||||||
|
"attr_last_name": "IDP_ATTR_LAST_NAME",
|
||||||
|
"attr_first_name": "IDP_ATTR_FIRST_NAME",
|
||||||
|
"attr_user_permanent_id": "IDP_ATTR_USER_PERMANENT_ID",
|
||||||
|
}
|
||||||
|
for idp_name in awx_settings.get("ENABLED_IDPS", {}):
|
||||||
|
for key in idp_to_key_mapping:
|
||||||
|
value = awx_settings["ENABLED_IDPS"][idp_name].get(key)
|
||||||
|
if value is not None:
|
||||||
|
config["name"] = idp_name
|
||||||
|
config["configuration"].update({idp_to_key_mapping[key]: value})
|
||||||
|
|
||||||
|
return config
|
||||||
|
|
||||||
|
def add_arguments(self, parser):
|
||||||
|
parser.add_argument(
|
||||||
|
"output_file",
|
||||||
|
nargs="?",
|
||||||
|
type=str,
|
||||||
|
default=None,
|
||||||
|
help="Output JSON file path",
|
||||||
|
)
|
||||||
|
|
||||||
|
def handle(self, *args, **options):
|
||||||
|
try:
|
||||||
|
data = []
|
||||||
|
|
||||||
|
# dump SAML settings
|
||||||
|
awx_saml_settings = self.get_awx_saml_settings()
|
||||||
|
awx_saml_enabled, saml_missing_fields = self.is_enabled(awx_saml_settings, self.DAB_SAML_AUTHENTICATOR_KEYS)
|
||||||
|
if awx_saml_enabled:
|
||||||
|
awx_saml_name = awx_saml_settings["ENABLED_IDPS"]
|
||||||
|
data.append(
|
||||||
|
self.format_config_data(
|
||||||
|
awx_saml_enabled,
|
||||||
|
awx_saml_settings,
|
||||||
|
"saml",
|
||||||
|
self.DAB_SAML_AUTHENTICATOR_KEYS,
|
||||||
|
awx_saml_name,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
data.append({"SAML_missing_fields": saml_missing_fields})
|
||||||
|
|
||||||
|
# dump LDAP settings
|
||||||
|
awx_ldap_group_settings = self.get_awx_ldap_settings()
|
||||||
|
for awx_ldap_name, awx_ldap_settings in awx_ldap_group_settings.items():
|
||||||
|
awx_ldap_enabled, ldap_missing_fields = self.is_enabled(awx_ldap_settings, self.DAB_LDAP_AUTHENTICATOR_KEYS)
|
||||||
|
if awx_ldap_enabled:
|
||||||
|
data.append(
|
||||||
|
self.format_config_data(
|
||||||
|
awx_ldap_enabled,
|
||||||
|
awx_ldap_settings,
|
||||||
|
"ldap",
|
||||||
|
self.DAB_LDAP_AUTHENTICATOR_KEYS,
|
||||||
|
f"LDAP_{awx_ldap_name}",
|
||||||
|
)
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
data.append({f"LDAP_{awx_ldap_name}_missing_fields": ldap_missing_fields})
|
||||||
|
|
||||||
|
# write to file if requested
|
||||||
|
if options["output_file"]:
|
||||||
|
# Define the path for the output JSON file
|
||||||
|
output_file = options["output_file"]
|
||||||
|
|
||||||
|
# Ensure the directory exists
|
||||||
|
os.makedirs(os.path.dirname(output_file), exist_ok=True)
|
||||||
|
|
||||||
|
# Write data to the JSON file
|
||||||
|
with open(output_file, "w") as f:
|
||||||
|
json.dump(data, f, indent=4)
|
||||||
|
|
||||||
|
self.stdout.write(self.style.SUCCESS(f"Auth config data dumped to {output_file}"))
|
||||||
|
else:
|
||||||
|
self.stdout.write(json.dumps(data, indent=4))
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
self.stdout.write(self.style.ERROR(f"An error occurred: {str(e)}"))
|
||||||
|
sys.exit(1)
|
||||||
151
awx/main/management/commands/job_performance_rollup.py
Normal file
151
awx/main/management/commands/job_performance_rollup.py
Normal file
@@ -0,0 +1,151 @@
|
|||||||
|
# Copyright (c) 2015 Ansible, Inc.
|
||||||
|
# All Rights Reserved
|
||||||
|
|
||||||
|
# Django
|
||||||
|
from django.core.management.base import BaseCommand
|
||||||
|
from django.db import connection
|
||||||
|
|
||||||
|
import json
|
||||||
|
import re
|
||||||
|
|
||||||
|
|
||||||
|
class Command(BaseCommand):
|
||||||
|
"""
|
||||||
|
Emits some simple statistics suitable for external monitoring
|
||||||
|
"""
|
||||||
|
|
||||||
|
help = 'Run queries that provide an overview of the performance of the system over a given period of time'
|
||||||
|
|
||||||
|
def add_arguments(self, parser):
|
||||||
|
parser.add_argument('--since', action='store', dest='days', type=str, default="1", help='Max days to look back to for data')
|
||||||
|
parser.add_argument('--limit', action='store', dest='limit', type=str, default="10", help='Max number of records for database queries (LIMIT)')
|
||||||
|
|
||||||
|
def execute_query(self, query):
|
||||||
|
with connection.cursor() as cursor:
|
||||||
|
cursor.execute(query)
|
||||||
|
rows = cursor.fetchall()
|
||||||
|
return rows
|
||||||
|
|
||||||
|
def jsonify(self, title, keys, values, query):
|
||||||
|
result = []
|
||||||
|
query = re.sub('\n', ' ', query)
|
||||||
|
query = re.sub('\s{2,}', ' ', query)
|
||||||
|
for value in values:
|
||||||
|
result.append(dict(zip(keys, value)))
|
||||||
|
return {title: result, 'count': len(values), 'query': query}
|
||||||
|
|
||||||
|
def jobs_pending_duration(self, days, limit):
|
||||||
|
"""Return list of jobs sorted by time in pending within configured number of days (within limit)"""
|
||||||
|
query = f"""
|
||||||
|
SELECT name, id AS job_id, unified_job_template_id, created, started - created AS pending_duration
|
||||||
|
FROM main_unifiedjob
|
||||||
|
WHERE finished IS NOT null
|
||||||
|
AND started IS NOT null
|
||||||
|
AND cancel_flag IS NOT true
|
||||||
|
AND created > NOW() - INTERVAL '{days} days'
|
||||||
|
AND started - created > INTERVAL '0 seconds'
|
||||||
|
ORDER BY pending_duration DESC
|
||||||
|
LIMIT {limit};"""
|
||||||
|
values = self.execute_query(query)
|
||||||
|
return self.jsonify(
|
||||||
|
title='completed_or_started_jobs_by_pending_duration',
|
||||||
|
keys=('job_name', 'job_id', 'unified_job_template_id', 'job_created', 'pending_duration'),
|
||||||
|
values=values,
|
||||||
|
query=query,
|
||||||
|
)
|
||||||
|
|
||||||
|
def times_of_day_pending_more_than_X_min(self, days, limit, minutes_pending):
|
||||||
|
"""Return list of jobs sorted by time in pending within configured number of days (within limit)"""
|
||||||
|
query = f"""
|
||||||
|
SELECT
|
||||||
|
date_trunc('hour', created) as day_and_hour,
|
||||||
|
COUNT(created) as count_jobs_pending_greater_than_{minutes_pending}_min
|
||||||
|
FROM main_unifiedjob
|
||||||
|
WHERE started IS NOT NULL
|
||||||
|
AND started - created > INTERVAL '{minutes_pending} minutes'
|
||||||
|
AND created > NOW() - INTERVAL '{days} days'
|
||||||
|
GROUP BY date_trunc('hour', created)
|
||||||
|
ORDER BY count_jobs_pending_greater_than_{minutes_pending}_min DESC
|
||||||
|
LIMIT {limit};"""
|
||||||
|
values = self.execute_query(query)
|
||||||
|
return self.jsonify(
|
||||||
|
title=f'times_of_day_pending_more_than_{minutes_pending}',
|
||||||
|
keys=('day_and_hour', f'count_jobs_pending_more_than_{minutes_pending}_min'),
|
||||||
|
values=values,
|
||||||
|
query=query,
|
||||||
|
)
|
||||||
|
|
||||||
|
def pending_jobs_details(self, days, limit):
|
||||||
|
"""Return list of jobs that are in pending and list details such as reasons they may be blocked, within configured number of days and limit."""
|
||||||
|
query = f"""
|
||||||
|
SELECT DISTINCT ON(A.id) A.name, A.id, A.unified_job_template_id, A.created, NOW() - A.created as pending_duration, F.allow_simultaneous, B.current_job_id as current_ujt_job, I.to_unifiedjob_id as dependency_job_id, A.dependencies_processed
|
||||||
|
FROM main_unifiedjob A
|
||||||
|
LEFT JOIN (
|
||||||
|
SELECT C.id, C.current_job_id FROM main_unifiedjobtemplate as C
|
||||||
|
) B
|
||||||
|
ON A.unified_job_template_id = B.id
|
||||||
|
LEFT JOIN main_job F ON A.id = F.unifiedjob_ptr_id
|
||||||
|
LEFT JOIN (
|
||||||
|
SELECT * FROM main_unifiedjob_dependent_jobs as G
|
||||||
|
RIGHT JOIN main_unifiedjob H ON G.to_unifiedjob_id = H.id
|
||||||
|
) I
|
||||||
|
ON A.id = I.from_unifiedjob_id
|
||||||
|
WHERE A.status = 'pending'
|
||||||
|
AND A.created > NOW() - INTERVAL '{days} days'
|
||||||
|
ORDER BY id DESC
|
||||||
|
LIMIT {limit};"""
|
||||||
|
values = self.execute_query(query)
|
||||||
|
return self.jsonify(
|
||||||
|
title='pending_jobs_details',
|
||||||
|
keys=(
|
||||||
|
'job_name',
|
||||||
|
'job_id',
|
||||||
|
'unified_job_template_id',
|
||||||
|
'job_created',
|
||||||
|
'pending_duration',
|
||||||
|
'allow_simultaneous',
|
||||||
|
'current_ujt_job',
|
||||||
|
'dependency_job_id',
|
||||||
|
'dependencies_processed',
|
||||||
|
),
|
||||||
|
values=values,
|
||||||
|
query=query,
|
||||||
|
)
|
||||||
|
|
||||||
|
def jobs_by_FUNC_event_processing_time(self, func, days, limit):
|
||||||
|
"""Return list of jobs sorted by MAX job event procesing time within configured number of days (within limit)"""
|
||||||
|
if func not in ('MAX', 'MIN', 'AVG', 'SUM'):
|
||||||
|
raise RuntimeError('Only able to asses job events grouped by job with MAX, MIN, AVG, SUM functions')
|
||||||
|
|
||||||
|
query = f"""SELECT job_id, {func}(A.modified - A.created) as job_event_processing_delay_{func}, B.name, B.created, B.finished, B.controller_node, B.execution_node
|
||||||
|
FROM main_jobevent A
|
||||||
|
RIGHT JOIN (
|
||||||
|
SELECT id, created, name, finished, controller_node, execution_node FROM
|
||||||
|
main_unifiedjob
|
||||||
|
WHERE created > NOW() - INTERVAL '{days} days'
|
||||||
|
AND created IS NOT null
|
||||||
|
AND finished IS NOT null
|
||||||
|
AND id IS NOT null
|
||||||
|
AND name IS NOT null
|
||||||
|
) B
|
||||||
|
ON A.job_id=B.id
|
||||||
|
WHERE A.job_id is not null
|
||||||
|
GROUP BY job_id, B.name, B.created, B.finished, B.controller_node, B.execution_node
|
||||||
|
ORDER BY job_event_processing_delay_{func} DESC
|
||||||
|
LIMIT {limit};"""
|
||||||
|
values = self.execute_query(query)
|
||||||
|
return self.jsonify(
|
||||||
|
title=f'jobs_by_{func}_event_processing',
|
||||||
|
keys=('job_id', f'{func}_job_event_processing_delay', 'job_name', 'job_created_time', 'job_finished_time', 'controller_node', 'execution_node'),
|
||||||
|
values=values,
|
||||||
|
query=query,
|
||||||
|
)
|
||||||
|
|
||||||
|
def handle(self, *args, **options):
|
||||||
|
items = []
|
||||||
|
for func in ('MAX', 'MIN', 'AVG'):
|
||||||
|
items.append(self.jobs_by_FUNC_event_processing_time(func, options['days'], options['limit']))
|
||||||
|
items.append(self.jobs_pending_duration(options['days'], options['limit']))
|
||||||
|
items.append(self.pending_jobs_details(options['days'], options['limit']))
|
||||||
|
items.append(self.times_of_day_pending_more_than_X_min(options['days'], options['limit'], minutes_pending=10))
|
||||||
|
self.stdout.write(json.dumps(items, indent=4, sort_keys=True, default=str))
|
||||||
@@ -55,7 +55,7 @@ class Command(BaseCommand):
|
|||||||
|
|
||||||
capacity = f' capacity={x.capacity}' if x.node_type != 'hop' else ''
|
capacity = f' capacity={x.capacity}' if x.node_type != 'hop' else ''
|
||||||
version = f" version={x.version or '?'}" if x.node_type != 'hop' else ''
|
version = f" version={x.version or '?'}" if x.node_type != 'hop' else ''
|
||||||
heartbeat = f' heartbeat="{x.last_seen:%Y-%m-%d %H:%M:%S}"' if x.capacity or x.node_type == 'hop' else ''
|
heartbeat = f' heartbeat="{x.last_seen:%Y-%m-%d %H:%M:%S}"' if x.last_seen else ''
|
||||||
print(f'\t{color}{x.hostname}{capacity} node_type={x.node_type}{version}{heartbeat}{end_color}')
|
print(f'\t{color}{x.hostname}{capacity} node_type={x.node_type}{version}{heartbeat}{end_color}')
|
||||||
|
|
||||||
print()
|
print()
|
||||||
|
|||||||
@@ -25,20 +25,17 @@ class Command(BaseCommand):
|
|||||||
|
|
||||||
def add_arguments(self, parser):
|
def add_arguments(self, parser):
|
||||||
parser.add_argument('--hostname', dest='hostname', type=str, help="Hostname used during provisioning")
|
parser.add_argument('--hostname', dest='hostname', type=str, help="Hostname used during provisioning")
|
||||||
parser.add_argument('--listener_port', dest='listener_port', type=int, help="Receptor listener port")
|
|
||||||
parser.add_argument('--node_type', type=str, default='hybrid', choices=['control', 'execution', 'hop', 'hybrid'], help="Instance Node type")
|
parser.add_argument('--node_type', type=str, default='hybrid', choices=['control', 'execution', 'hop', 'hybrid'], help="Instance Node type")
|
||||||
parser.add_argument('--uuid', type=str, help="Instance UUID")
|
parser.add_argument('--uuid', type=str, help="Instance UUID")
|
||||||
|
|
||||||
def _register_hostname(self, hostname, node_type, uuid, listener_port):
|
def _register_hostname(self, hostname, node_type, uuid):
|
||||||
if not hostname:
|
if not hostname:
|
||||||
if not settings.AWX_AUTO_DEPROVISION_INSTANCES:
|
if not settings.AWX_AUTO_DEPROVISION_INSTANCES:
|
||||||
raise CommandError('Registering with values from settings only intended for use in K8s installs')
|
raise CommandError('Registering with values from settings only intended for use in K8s installs')
|
||||||
|
|
||||||
from awx.main.management.commands.register_queue import RegisterQueue
|
from awx.main.management.commands.register_queue import RegisterQueue
|
||||||
|
|
||||||
(changed, instance) = Instance.objects.register(
|
(changed, instance) = Instance.objects.register(ip_address=os.environ.get('MY_POD_IP'), node_type='control', node_uuid=settings.SYSTEM_UUID)
|
||||||
ip_address=os.environ.get('MY_POD_IP'), listener_port=listener_port, node_type='control', node_uuid=settings.SYSTEM_UUID
|
|
||||||
)
|
|
||||||
RegisterQueue(settings.DEFAULT_CONTROL_PLANE_QUEUE_NAME, 100, 0, [], is_container_group=False).register()
|
RegisterQueue(settings.DEFAULT_CONTROL_PLANE_QUEUE_NAME, 100, 0, [], is_container_group=False).register()
|
||||||
RegisterQueue(
|
RegisterQueue(
|
||||||
settings.DEFAULT_EXECUTION_QUEUE_NAME,
|
settings.DEFAULT_EXECUTION_QUEUE_NAME,
|
||||||
@@ -51,16 +48,17 @@ class Command(BaseCommand):
|
|||||||
max_concurrent_jobs=settings.DEFAULT_EXECUTION_QUEUE_MAX_CONCURRENT_JOBS,
|
max_concurrent_jobs=settings.DEFAULT_EXECUTION_QUEUE_MAX_CONCURRENT_JOBS,
|
||||||
).register()
|
).register()
|
||||||
else:
|
else:
|
||||||
(changed, instance) = Instance.objects.register(hostname=hostname, node_type=node_type, node_uuid=uuid, listener_port=listener_port)
|
(changed, instance) = Instance.objects.register(hostname=hostname, node_type=node_type, node_uuid=uuid)
|
||||||
if changed:
|
if changed:
|
||||||
print("Successfully registered instance {}".format(hostname))
|
print("Successfully registered instance {}".format(hostname))
|
||||||
else:
|
else:
|
||||||
print("Instance already registered {}".format(instance.hostname))
|
print("Instance already registered {}".format(instance.hostname))
|
||||||
|
|
||||||
self.changed = changed
|
self.changed = changed
|
||||||
|
|
||||||
@transaction.atomic
|
@transaction.atomic
|
||||||
def handle(self, **options):
|
def handle(self, **options):
|
||||||
self.changed = False
|
self.changed = False
|
||||||
self._register_hostname(options.get('hostname'), options.get('node_type'), options.get('uuid'), options.get('listener_port'))
|
self._register_hostname(options.get('hostname'), options.get('node_type'), options.get('uuid'))
|
||||||
if self.changed:
|
if self.changed:
|
||||||
print("(changed: True)")
|
print("(changed: True)")
|
||||||
|
|||||||
@@ -1,9 +1,7 @@
|
|||||||
import warnings
|
|
||||||
|
|
||||||
from django.core.management.base import BaseCommand, CommandError
|
from django.core.management.base import BaseCommand, CommandError
|
||||||
from django.db import transaction
|
from django.db import transaction
|
||||||
|
|
||||||
from awx.main.models import Instance, InstanceLink
|
from awx.main.models import Instance, InstanceLink, ReceptorAddress
|
||||||
|
|
||||||
|
|
||||||
class Command(BaseCommand):
|
class Command(BaseCommand):
|
||||||
@@ -28,7 +26,9 @@ class Command(BaseCommand):
|
|||||||
|
|
||||||
def handle(self, **options):
|
def handle(self, **options):
|
||||||
# provides a mapping of hostname to Instance objects
|
# provides a mapping of hostname to Instance objects
|
||||||
nodes = Instance.objects.in_bulk(field_name='hostname')
|
nodes = Instance.objects.all().in_bulk(field_name='hostname')
|
||||||
|
# provides a mapping of address to ReceptorAddress objects
|
||||||
|
addresses = ReceptorAddress.objects.all().in_bulk(field_name='address')
|
||||||
|
|
||||||
if options['source'] not in nodes:
|
if options['source'] not in nodes:
|
||||||
raise CommandError(f"Host {options['source']} is not a registered instance.")
|
raise CommandError(f"Host {options['source']} is not a registered instance.")
|
||||||
@@ -39,6 +39,14 @@ class Command(BaseCommand):
|
|||||||
if options['exact'] is not None and options['disconnect']:
|
if options['exact'] is not None and options['disconnect']:
|
||||||
raise CommandError("The option --disconnect may not be used with --exact.")
|
raise CommandError("The option --disconnect may not be used with --exact.")
|
||||||
|
|
||||||
|
# make sure each target has a receptor address
|
||||||
|
peers = options['peers'] or []
|
||||||
|
disconnect = options['disconnect'] or []
|
||||||
|
exact = options['exact'] or []
|
||||||
|
for peer in peers + disconnect + exact:
|
||||||
|
if peer not in addresses:
|
||||||
|
raise CommandError(f"Peer {peer} does not have a receptor address.")
|
||||||
|
|
||||||
# No 1-cycles
|
# No 1-cycles
|
||||||
for collection in ('peers', 'disconnect', 'exact'):
|
for collection in ('peers', 'disconnect', 'exact'):
|
||||||
if options[collection] is not None and options['source'] in options[collection]:
|
if options[collection] is not None and options['source'] in options[collection]:
|
||||||
@@ -47,9 +55,12 @@ class Command(BaseCommand):
|
|||||||
# No 2-cycles
|
# No 2-cycles
|
||||||
if options['peers'] or options['exact'] is not None:
|
if options['peers'] or options['exact'] is not None:
|
||||||
peers = set(options['peers'] or options['exact'])
|
peers = set(options['peers'] or options['exact'])
|
||||||
incoming = set(InstanceLink.objects.filter(target=nodes[options['source']]).values_list('source__hostname', flat=True))
|
if options['source'] in addresses:
|
||||||
|
incoming = set(InstanceLink.objects.filter(target=addresses[options['source']]).values_list('source__hostname', flat=True))
|
||||||
|
else:
|
||||||
|
incoming = set()
|
||||||
if peers & incoming:
|
if peers & incoming:
|
||||||
warnings.warn(f"Source node {options['source']} should not link to nodes already peering to it: {peers & incoming}.")
|
raise CommandError(f"Source node {options['source']} should not link to nodes already peering to it: {peers & incoming}.")
|
||||||
|
|
||||||
if options['peers']:
|
if options['peers']:
|
||||||
missing_peers = set(options['peers']) - set(nodes)
|
missing_peers = set(options['peers']) - set(nodes)
|
||||||
@@ -60,7 +71,7 @@ class Command(BaseCommand):
|
|||||||
results = 0
|
results = 0
|
||||||
for target in options['peers']:
|
for target in options['peers']:
|
||||||
_, created = InstanceLink.objects.update_or_create(
|
_, created = InstanceLink.objects.update_or_create(
|
||||||
source=nodes[options['source']], target=nodes[target], defaults={'link_state': InstanceLink.States.ESTABLISHED}
|
source=nodes[options['source']], target=addresses[target], defaults={'link_state': InstanceLink.States.ESTABLISHED}
|
||||||
)
|
)
|
||||||
if created:
|
if created:
|
||||||
results += 1
|
results += 1
|
||||||
@@ -70,9 +81,9 @@ class Command(BaseCommand):
|
|||||||
if options['disconnect']:
|
if options['disconnect']:
|
||||||
results = 0
|
results = 0
|
||||||
for target in options['disconnect']:
|
for target in options['disconnect']:
|
||||||
if target not in nodes: # Be permissive, the node might have already been de-registered.
|
if target not in addresses: # Be permissive, the node might have already been de-registered.
|
||||||
continue
|
continue
|
||||||
n, _ = InstanceLink.objects.filter(source=nodes[options['source']], target=nodes[target]).delete()
|
n, _ = InstanceLink.objects.filter(source=nodes[options['source']], target=addresses[target]).delete()
|
||||||
results += n
|
results += n
|
||||||
|
|
||||||
print(f"{results} peer links removed from the database.")
|
print(f"{results} peer links removed from the database.")
|
||||||
@@ -81,11 +92,11 @@ class Command(BaseCommand):
|
|||||||
additions = 0
|
additions = 0
|
||||||
with transaction.atomic():
|
with transaction.atomic():
|
||||||
peers = set(options['exact'])
|
peers = set(options['exact'])
|
||||||
links = set(InstanceLink.objects.filter(source=nodes[options['source']]).values_list('target__hostname', flat=True))
|
links = set(InstanceLink.objects.filter(source=nodes[options['source']]).values_list('target__address', flat=True))
|
||||||
removals, _ = InstanceLink.objects.filter(source=nodes[options['source']], target__hostname__in=links - peers).delete()
|
removals, _ = InstanceLink.objects.filter(source=nodes[options['source']], target__instance__hostname__in=links - peers).delete()
|
||||||
for target in peers - links:
|
for target in peers - links:
|
||||||
_, created = InstanceLink.objects.update_or_create(
|
_, created = InstanceLink.objects.update_or_create(
|
||||||
source=nodes[options['source']], target=nodes[target], defaults={'link_state': InstanceLink.States.ESTABLISHED}
|
source=nodes[options['source']], target=addresses[target], defaults={'link_state': InstanceLink.States.ESTABLISHED}
|
||||||
)
|
)
|
||||||
if created:
|
if created:
|
||||||
additions += 1
|
additions += 1
|
||||||
|
|||||||
26
awx/main/management/commands/remove_receptor_address.py
Normal file
26
awx/main/management/commands/remove_receptor_address.py
Normal file
@@ -0,0 +1,26 @@
|
|||||||
|
# Copyright (c) 2015 Ansible, Inc.
|
||||||
|
# All Rights Reserved
|
||||||
|
|
||||||
|
from django.core.management.base import BaseCommand
|
||||||
|
|
||||||
|
from awx.main.models import ReceptorAddress
|
||||||
|
|
||||||
|
|
||||||
|
class Command(BaseCommand):
|
||||||
|
"""
|
||||||
|
Internal controller command.
|
||||||
|
Delete a receptor address.
|
||||||
|
"""
|
||||||
|
|
||||||
|
help = "Add receptor address to an instance."
|
||||||
|
|
||||||
|
def add_arguments(self, parser):
|
||||||
|
parser.add_argument('--address', dest='address', type=str, help="Receptor address to remove")
|
||||||
|
|
||||||
|
def handle(self, **options):
|
||||||
|
deleted = ReceptorAddress.objects.filter(address=options['address']).delete()
|
||||||
|
if deleted[0]:
|
||||||
|
print(f"Successfully removed {options['address']}")
|
||||||
|
print("(changed: True)")
|
||||||
|
else:
|
||||||
|
print(f"Did not remove {options['address']}, not found")
|
||||||
@@ -3,6 +3,7 @@
|
|||||||
|
|
||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
from django.core.management.base import BaseCommand
|
from django.core.management.base import BaseCommand
|
||||||
|
from awx.main.analytics.subsystem_metrics import CallbackReceiverMetricsServer
|
||||||
|
|
||||||
from awx.main.dispatch.control import Control
|
from awx.main.dispatch.control import Control
|
||||||
from awx.main.dispatch.worker import AWXConsumerRedis, CallbackBrokerWorker
|
from awx.main.dispatch.worker import AWXConsumerRedis, CallbackBrokerWorker
|
||||||
@@ -25,6 +26,9 @@ class Command(BaseCommand):
|
|||||||
print(Control('callback_receiver').status())
|
print(Control('callback_receiver').status())
|
||||||
return
|
return
|
||||||
consumer = None
|
consumer = None
|
||||||
|
|
||||||
|
CallbackReceiverMetricsServer().start()
|
||||||
|
|
||||||
try:
|
try:
|
||||||
consumer = AWXConsumerRedis(
|
consumer = AWXConsumerRedis(
|
||||||
'callback_receiver',
|
'callback_receiver',
|
||||||
|
|||||||
@@ -10,6 +10,7 @@ from awx.main.dispatch import get_task_queuename
|
|||||||
from awx.main.dispatch.control import Control
|
from awx.main.dispatch.control import Control
|
||||||
from awx.main.dispatch.pool import AutoscalePool
|
from awx.main.dispatch.pool import AutoscalePool
|
||||||
from awx.main.dispatch.worker import AWXConsumerPG, TaskWorker
|
from awx.main.dispatch.worker import AWXConsumerPG, TaskWorker
|
||||||
|
from awx.main.analytics.subsystem_metrics import DispatcherMetricsServer
|
||||||
|
|
||||||
logger = logging.getLogger('awx.main.dispatch')
|
logger = logging.getLogger('awx.main.dispatch')
|
||||||
|
|
||||||
@@ -62,6 +63,8 @@ class Command(BaseCommand):
|
|||||||
|
|
||||||
consumer = None
|
consumer = None
|
||||||
|
|
||||||
|
DispatcherMetricsServer().start()
|
||||||
|
|
||||||
try:
|
try:
|
||||||
queues = ['tower_broadcast_all', 'tower_settings_change', get_task_queuename()]
|
queues = ['tower_broadcast_all', 'tower_settings_change', get_task_queuename()]
|
||||||
consumer = AWXConsumerPG('dispatcher', TaskWorker(), queues, AutoscalePool(min_workers=4), schedule=settings.CELERYBEAT_SCHEDULE)
|
consumer = AWXConsumerPG('dispatcher', TaskWorker(), queues, AutoscalePool(min_workers=4), schedule=settings.CELERYBEAT_SCHEDULE)
|
||||||
|
|||||||
@@ -16,6 +16,7 @@ from awx.main.analytics.broadcast_websocket import (
|
|||||||
RelayWebsocketStatsManager,
|
RelayWebsocketStatsManager,
|
||||||
safe_name,
|
safe_name,
|
||||||
)
|
)
|
||||||
|
from awx.main.analytics.subsystem_metrics import WebsocketsMetricsServer
|
||||||
from awx.main.wsrelay import WebSocketRelayManager
|
from awx.main.wsrelay import WebSocketRelayManager
|
||||||
|
|
||||||
|
|
||||||
@@ -100,8 +101,9 @@ class Command(BaseCommand):
|
|||||||
migrating = bool(executor.migration_plan(executor.loader.graph.leaf_nodes()))
|
migrating = bool(executor.migration_plan(executor.loader.graph.leaf_nodes()))
|
||||||
connection.close() # Because of async nature, main loop will use new connection, so close this
|
connection.close() # Because of async nature, main loop will use new connection, so close this
|
||||||
except Exception as exc:
|
except Exception as exc:
|
||||||
logger.warning(f'Error on startup of run_wsrelay (error: {exc}), retry in 10s...')
|
time.sleep(10) # Prevent supervisor from restarting the service too quickly and the service to enter FATAL state
|
||||||
time.sleep(10)
|
# sleeping before logging because logging rely on setting which require database connection...
|
||||||
|
logger.warning(f'Error on startup of run_wsrelay (error: {exc}), slept for 10s...')
|
||||||
return
|
return
|
||||||
|
|
||||||
# In containerized deployments, migrations happen in the task container,
|
# In containerized deployments, migrations happen in the task container,
|
||||||
@@ -120,13 +122,14 @@ class Command(BaseCommand):
|
|||||||
return
|
return
|
||||||
|
|
||||||
try:
|
try:
|
||||||
my_hostname = Instance.objects.my_hostname()
|
my_hostname = Instance.objects.my_hostname() # This relies on settings.CLUSTER_HOST_ID which requires database connection
|
||||||
logger.info('Active instance with hostname {} is registered.'.format(my_hostname))
|
logger.info('Active instance with hostname {} is registered.'.format(my_hostname))
|
||||||
except RuntimeError as e:
|
except RuntimeError as e:
|
||||||
# the CLUSTER_HOST_ID in the task, and web instance must match and
|
# the CLUSTER_HOST_ID in the task, and web instance must match and
|
||||||
# ensure network connectivity between the task and web instance
|
# ensure network connectivity between the task and web instance
|
||||||
logger.info('Unable to return currently active instance: {}, retry in 5s...'.format(e))
|
time.sleep(10) # Prevent supervisor from restarting the service too quickly and the service to enter FATAL state
|
||||||
time.sleep(5)
|
# sleeping before logging because logging rely on setting which require database connection...
|
||||||
|
logger.warning(f"Unable to return currently active instance: {e}, slept for 10s before return.")
|
||||||
return
|
return
|
||||||
|
|
||||||
if options.get('status'):
|
if options.get('status'):
|
||||||
@@ -163,8 +166,16 @@ class Command(BaseCommand):
|
|||||||
|
|
||||||
return
|
return
|
||||||
|
|
||||||
|
WebsocketsMetricsServer().start()
|
||||||
|
|
||||||
try:
|
try:
|
||||||
|
logger.info('Starting Websocket Relayer...')
|
||||||
websocket_relay_manager = WebSocketRelayManager()
|
websocket_relay_manager = WebSocketRelayManager()
|
||||||
asyncio.run(websocket_relay_manager.run())
|
asyncio.run(websocket_relay_manager.run())
|
||||||
except KeyboardInterrupt:
|
except KeyboardInterrupt:
|
||||||
logger.info('Terminating Websocket Relayer')
|
logger.info('Terminating Websocket Relayer')
|
||||||
|
except BaseException as e: # BaseException is used to catch all exceptions including asyncio.CancelledError
|
||||||
|
time.sleep(10) # Prevent supervisor from restarting the service too quickly and the service to enter FATAL state
|
||||||
|
# sleeping before logging because logging rely on setting which require database connection...
|
||||||
|
logger.warning(f"Encounter error while running Websocket Relayer {e}, slept for 10s...")
|
||||||
|
return
|
||||||
|
|||||||
@@ -115,7 +115,14 @@ class InstanceManager(models.Manager):
|
|||||||
return node[0]
|
return node[0]
|
||||||
raise RuntimeError("No instance found with the current cluster host id")
|
raise RuntimeError("No instance found with the current cluster host id")
|
||||||
|
|
||||||
def register(self, node_uuid=None, hostname=None, ip_address="", listener_port=None, node_type='hybrid', defaults=None):
|
def register(
|
||||||
|
self,
|
||||||
|
node_uuid=None,
|
||||||
|
hostname=None,
|
||||||
|
ip_address="",
|
||||||
|
node_type='hybrid',
|
||||||
|
defaults=None,
|
||||||
|
):
|
||||||
if not hostname:
|
if not hostname:
|
||||||
hostname = settings.CLUSTER_HOST_ID
|
hostname = settings.CLUSTER_HOST_ID
|
||||||
|
|
||||||
@@ -161,9 +168,6 @@ class InstanceManager(models.Manager):
|
|||||||
if instance.node_type != node_type:
|
if instance.node_type != node_type:
|
||||||
instance.node_type = node_type
|
instance.node_type = node_type
|
||||||
update_fields.append('node_type')
|
update_fields.append('node_type')
|
||||||
if instance.listener_port != listener_port:
|
|
||||||
instance.listener_port = listener_port
|
|
||||||
update_fields.append('listener_port')
|
|
||||||
if update_fields:
|
if update_fields:
|
||||||
instance.save(update_fields=update_fields)
|
instance.save(update_fields=update_fields)
|
||||||
return (True, instance)
|
return (True, instance)
|
||||||
@@ -174,11 +178,13 @@ class InstanceManager(models.Manager):
|
|||||||
create_defaults = {
|
create_defaults = {
|
||||||
'node_state': Instance.States.INSTALLED,
|
'node_state': Instance.States.INSTALLED,
|
||||||
'capacity': 0,
|
'capacity': 0,
|
||||||
|
'managed': True,
|
||||||
}
|
}
|
||||||
if defaults is not None:
|
if defaults is not None:
|
||||||
create_defaults.update(defaults)
|
create_defaults.update(defaults)
|
||||||
uuid_option = {'uuid': node_uuid if node_uuid is not None else uuid.uuid4()}
|
uuid_option = {'uuid': node_uuid if node_uuid is not None else uuid.uuid4()}
|
||||||
if node_type == 'execution' and 'version' not in create_defaults:
|
if node_type == 'execution' and 'version' not in create_defaults:
|
||||||
create_defaults['version'] = RECEPTOR_PENDING
|
create_defaults['version'] = RECEPTOR_PENDING
|
||||||
instance = self.create(hostname=hostname, ip_address=ip_address, listener_port=listener_port, node_type=node_type, **create_defaults, **uuid_option)
|
instance = self.create(hostname=hostname, ip_address=ip_address, node_type=node_type, **create_defaults, **uuid_option)
|
||||||
|
|
||||||
return (True, instance)
|
return (True, instance)
|
||||||
|
|||||||
@@ -1,25 +1,25 @@
|
|||||||
# Copyright (c) 2015 Ansible, Inc.
|
# Copyright (c) 2015 Ansible, Inc.
|
||||||
# All Rights Reserved.
|
# All Rights Reserved.
|
||||||
|
|
||||||
|
import functools
|
||||||
import logging
|
import logging
|
||||||
import threading
|
import threading
|
||||||
import time
|
import time
|
||||||
import urllib.parse
|
import urllib.parse
|
||||||
|
from pathlib import Path, PurePosixPath
|
||||||
|
|
||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
from django.contrib.auth import logout
|
from django.contrib.auth import logout
|
||||||
from django.contrib.auth.models import User
|
from django.db.migrations.recorder import MigrationRecorder
|
||||||
from django.db.migrations.executor import MigrationExecutor
|
|
||||||
from django.db import connection
|
from django.db import connection
|
||||||
from django.shortcuts import redirect
|
from django.shortcuts import redirect
|
||||||
from django.apps import apps
|
|
||||||
from django.utils.deprecation import MiddlewareMixin
|
from django.utils.deprecation import MiddlewareMixin
|
||||||
from django.utils.translation import gettext_lazy as _
|
|
||||||
from django.urls import reverse, resolve
|
from django.urls import reverse, resolve
|
||||||
|
|
||||||
from awx.main.utils.named_url_graph import generate_graph, GraphNode
|
from awx.main import migrations
|
||||||
from awx.conf import fields, register
|
|
||||||
from awx.main.utils.profiling import AWXProfiler
|
from awx.main.utils.profiling import AWXProfiler
|
||||||
|
from awx.main.utils.common import memoize
|
||||||
|
from awx.urls import get_urlpatterns
|
||||||
|
|
||||||
|
|
||||||
logger = logging.getLogger('awx.main.middleware')
|
logger = logging.getLogger('awx.main.middleware')
|
||||||
@@ -97,49 +97,7 @@ class DisableLocalAuthMiddleware(MiddlewareMixin):
|
|||||||
logout(request)
|
logout(request)
|
||||||
|
|
||||||
|
|
||||||
def _customize_graph():
|
|
||||||
from awx.main.models import Instance, Schedule, UnifiedJobTemplate
|
|
||||||
|
|
||||||
for model in [Schedule, UnifiedJobTemplate]:
|
|
||||||
if model in settings.NAMED_URL_GRAPH:
|
|
||||||
settings.NAMED_URL_GRAPH[model].remove_bindings()
|
|
||||||
settings.NAMED_URL_GRAPH.pop(model)
|
|
||||||
if User not in settings.NAMED_URL_GRAPH:
|
|
||||||
settings.NAMED_URL_GRAPH[User] = GraphNode(User, ['username'], [])
|
|
||||||
settings.NAMED_URL_GRAPH[User].add_bindings()
|
|
||||||
if Instance not in settings.NAMED_URL_GRAPH:
|
|
||||||
settings.NAMED_URL_GRAPH[Instance] = GraphNode(Instance, ['hostname'], [])
|
|
||||||
settings.NAMED_URL_GRAPH[Instance].add_bindings()
|
|
||||||
|
|
||||||
|
|
||||||
class URLModificationMiddleware(MiddlewareMixin):
|
class URLModificationMiddleware(MiddlewareMixin):
|
||||||
def __init__(self, get_response):
|
|
||||||
models = [m for m in apps.get_app_config('main').get_models() if hasattr(m, 'get_absolute_url')]
|
|
||||||
generate_graph(models)
|
|
||||||
_customize_graph()
|
|
||||||
register(
|
|
||||||
'NAMED_URL_FORMATS',
|
|
||||||
field_class=fields.DictField,
|
|
||||||
read_only=True,
|
|
||||||
label=_('Formats of all available named urls'),
|
|
||||||
help_text=_('Read-only list of key-value pairs that shows the standard format of all available named URLs.'),
|
|
||||||
category=_('Named URL'),
|
|
||||||
category_slug='named-url',
|
|
||||||
)
|
|
||||||
register(
|
|
||||||
'NAMED_URL_GRAPH_NODES',
|
|
||||||
field_class=fields.DictField,
|
|
||||||
read_only=True,
|
|
||||||
label=_('List of all named url graph nodes.'),
|
|
||||||
help_text=_(
|
|
||||||
'Read-only list of key-value pairs that exposes named URL graph topology.'
|
|
||||||
' Use this list to programmatically generate named URLs for resources'
|
|
||||||
),
|
|
||||||
category=_('Named URL'),
|
|
||||||
category_slug='named-url',
|
|
||||||
)
|
|
||||||
super().__init__(get_response)
|
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def _hijack_for_old_jt_name(node, kwargs, named_url):
|
def _hijack_for_old_jt_name(node, kwargs, named_url):
|
||||||
try:
|
try:
|
||||||
@@ -180,14 +138,36 @@ class URLModificationMiddleware(MiddlewareMixin):
|
|||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def _convert_named_url(cls, url_path):
|
def _convert_named_url(cls, url_path):
|
||||||
url_units = url_path.split('/')
|
default_prefix = PurePosixPath('/api/v2/')
|
||||||
# If the identifier is an empty string, it is always invalid.
|
optional_prefix = PurePosixPath(f'/api/{settings.OPTIONAL_API_URLPATTERN_PREFIX}/v2/')
|
||||||
if len(url_units) < 6 or url_units[1] != 'api' or url_units[2] not in ['v2'] or not url_units[4]:
|
|
||||||
return url_path
|
url_path_original = url_path
|
||||||
resource = url_units[3]
|
url_path = PurePosixPath(url_path)
|
||||||
|
|
||||||
|
if set(optional_prefix.parts).issubset(set(url_path.parts)):
|
||||||
|
url_prefix = optional_prefix
|
||||||
|
elif set(default_prefix.parts).issubset(set(url_path.parts)):
|
||||||
|
url_prefix = default_prefix
|
||||||
|
else:
|
||||||
|
return url_path_original
|
||||||
|
|
||||||
|
# Remove prefix
|
||||||
|
url_path = PurePosixPath(*url_path.parts[len(url_prefix.parts) :])
|
||||||
|
try:
|
||||||
|
resource_path = PurePosixPath(url_path.parts[0])
|
||||||
|
name = url_path.parts[1]
|
||||||
|
url_suffix = PurePosixPath(*url_path.parts[2:]) # remove name and resource
|
||||||
|
except IndexError:
|
||||||
|
return url_path_original
|
||||||
|
|
||||||
|
resource = resource_path.parts[0]
|
||||||
if resource in settings.NAMED_URL_MAPPINGS:
|
if resource in settings.NAMED_URL_MAPPINGS:
|
||||||
url_units[4] = cls._named_url_to_pk(settings.NAMED_URL_GRAPH[settings.NAMED_URL_MAPPINGS[resource]], resource, url_units[4])
|
pk = PurePosixPath(cls._named_url_to_pk(settings.NAMED_URL_GRAPH[settings.NAMED_URL_MAPPINGS[resource]], resource, name))
|
||||||
return '/'.join(url_units)
|
else:
|
||||||
|
return url_path_original
|
||||||
|
|
||||||
|
parts = url_prefix.parts + resource_path.parts + pk.parts + url_suffix.parts
|
||||||
|
return PurePosixPath(*parts).as_posix() + '/'
|
||||||
|
|
||||||
def process_request(self, request):
|
def process_request(self, request):
|
||||||
old_path = request.path_info
|
old_path = request.path_info
|
||||||
@@ -198,9 +178,46 @@ class URLModificationMiddleware(MiddlewareMixin):
|
|||||||
request.path_info = new_path
|
request.path_info = new_path
|
||||||
|
|
||||||
|
|
||||||
|
@memoize(ttl=20)
|
||||||
|
def is_migrating():
|
||||||
|
latest_number = 0
|
||||||
|
latest_name = ''
|
||||||
|
for migration_path in Path(migrations.__path__[0]).glob('[0-9]*.py'):
|
||||||
|
try:
|
||||||
|
migration_number = int(migration_path.name.split('_', 1)[0])
|
||||||
|
except ValueError:
|
||||||
|
continue
|
||||||
|
if migration_number > latest_number:
|
||||||
|
latest_number = migration_number
|
||||||
|
latest_name = migration_path.name[: -len('.py')]
|
||||||
|
return not MigrationRecorder(connection).migration_qs.filter(app='main', name=latest_name).exists()
|
||||||
|
|
||||||
|
|
||||||
class MigrationRanCheckMiddleware(MiddlewareMixin):
|
class MigrationRanCheckMiddleware(MiddlewareMixin):
|
||||||
def process_request(self, request):
|
def process_request(self, request):
|
||||||
executor = MigrationExecutor(connection)
|
if is_migrating() and getattr(resolve(request.path), 'url_name', '') != 'migrations_notran':
|
||||||
plan = executor.migration_plan(executor.loader.graph.leaf_nodes())
|
|
||||||
if bool(plan) and getattr(resolve(request.path), 'url_name', '') != 'migrations_notran':
|
|
||||||
return redirect(reverse("ui:migrations_notran"))
|
return redirect(reverse("ui:migrations_notran"))
|
||||||
|
|
||||||
|
|
||||||
|
class OptionalURLPrefixPath(MiddlewareMixin):
|
||||||
|
@functools.lru_cache
|
||||||
|
def _url_optional(self, prefix):
|
||||||
|
# Relavant Django code path https://github.com/django/django/blob/stable/4.2.x/django/core/handlers/base.py#L300
|
||||||
|
#
|
||||||
|
# resolve_request(request)
|
||||||
|
# get_resolver(request.urlconf)
|
||||||
|
# _get_cached_resolver(request.urlconf) <-- cached via @functools.cache
|
||||||
|
#
|
||||||
|
# Django will attempt to cache the value(s) of request.urlconf
|
||||||
|
# Being hashable is a prerequisit for being cachable.
|
||||||
|
# tuple() is hashable list() is not.
|
||||||
|
# Hence the tuple(list()) wrap.
|
||||||
|
return tuple(get_urlpatterns(prefix=prefix))
|
||||||
|
|
||||||
|
def process_request(self, request):
|
||||||
|
prefix = settings.OPTIONAL_API_URLPATTERN_PREFIX
|
||||||
|
|
||||||
|
if request.path.startswith(f"/api/{prefix}"):
|
||||||
|
request.urlconf = self._url_optional(prefix)
|
||||||
|
else:
|
||||||
|
request.urlconf = 'awx.urls'
|
||||||
|
|||||||
@@ -9,6 +9,7 @@ from django.conf import settings
|
|||||||
# AWX
|
# AWX
|
||||||
import awx.main.fields
|
import awx.main.fields
|
||||||
from awx.main.models import Host
|
from awx.main.models import Host
|
||||||
|
from ._sqlite_helper import dbawaremigrations
|
||||||
|
|
||||||
|
|
||||||
def replaces():
|
def replaces():
|
||||||
@@ -131,9 +132,11 @@ class Migration(migrations.Migration):
|
|||||||
help_text='If enabled, Tower will act as an Ansible Fact Cache Plugin; persisting facts at the end of a playbook run to the database and caching facts for use by Ansible.',
|
help_text='If enabled, Tower will act as an Ansible Fact Cache Plugin; persisting facts at the end of a playbook run to the database and caching facts for use by Ansible.',
|
||||||
),
|
),
|
||||||
),
|
),
|
||||||
migrations.RunSQL(
|
dbawaremigrations.RunSQL(
|
||||||
sql="CREATE INDEX host_ansible_facts_default_gin ON {} USING gin(ansible_facts jsonb_path_ops);".format(Host._meta.db_table),
|
sql="CREATE INDEX host_ansible_facts_default_gin ON {} USING gin(ansible_facts jsonb_path_ops);".format(Host._meta.db_table),
|
||||||
reverse_sql='DROP INDEX host_ansible_facts_default_gin;',
|
reverse_sql='DROP INDEX host_ansible_facts_default_gin;',
|
||||||
|
sqlite_sql=dbawaremigrations.RunSQL.noop,
|
||||||
|
sqlite_reverse_sql=dbawaremigrations.RunSQL.noop,
|
||||||
),
|
),
|
||||||
# SCM file-based inventories
|
# SCM file-based inventories
|
||||||
migrations.AddField(
|
migrations.AddField(
|
||||||
|
|||||||
@@ -17,49 +17,49 @@ class Migration(migrations.Migration):
|
|||||||
model_name='organization',
|
model_name='organization',
|
||||||
name='execute_role',
|
name='execute_role',
|
||||||
field=awx.main.fields.ImplicitRoleField(
|
field=awx.main.fields.ImplicitRoleField(
|
||||||
null='True', on_delete=django.db.models.deletion.CASCADE, parent_role='admin_role', related_name='+', to='main.Role'
|
null='True', on_delete=django.db.models.deletion.SET_NULL, parent_role='admin_role', related_name='+', to='main.Role'
|
||||||
),
|
),
|
||||||
),
|
),
|
||||||
migrations.AddField(
|
migrations.AddField(
|
||||||
model_name='organization',
|
model_name='organization',
|
||||||
name='job_template_admin_role',
|
name='job_template_admin_role',
|
||||||
field=awx.main.fields.ImplicitRoleField(
|
field=awx.main.fields.ImplicitRoleField(
|
||||||
editable=False, null='True', on_delete=django.db.models.deletion.CASCADE, parent_role='admin_role', related_name='+', to='main.Role'
|
editable=False, null='True', on_delete=django.db.models.deletion.SET_NULL, parent_role='admin_role', related_name='+', to='main.Role'
|
||||||
),
|
),
|
||||||
),
|
),
|
||||||
migrations.AddField(
|
migrations.AddField(
|
||||||
model_name='organization',
|
model_name='organization',
|
||||||
name='credential_admin_role',
|
name='credential_admin_role',
|
||||||
field=awx.main.fields.ImplicitRoleField(
|
field=awx.main.fields.ImplicitRoleField(
|
||||||
null='True', on_delete=django.db.models.deletion.CASCADE, parent_role='admin_role', related_name='+', to='main.Role'
|
null='True', on_delete=django.db.models.deletion.SET_NULL, parent_role='admin_role', related_name='+', to='main.Role'
|
||||||
),
|
),
|
||||||
),
|
),
|
||||||
migrations.AddField(
|
migrations.AddField(
|
||||||
model_name='organization',
|
model_name='organization',
|
||||||
name='inventory_admin_role',
|
name='inventory_admin_role',
|
||||||
field=awx.main.fields.ImplicitRoleField(
|
field=awx.main.fields.ImplicitRoleField(
|
||||||
null='True', on_delete=django.db.models.deletion.CASCADE, parent_role='admin_role', related_name='+', to='main.Role'
|
null='True', on_delete=django.db.models.deletion.SET_NULL, parent_role='admin_role', related_name='+', to='main.Role'
|
||||||
),
|
),
|
||||||
),
|
),
|
||||||
migrations.AddField(
|
migrations.AddField(
|
||||||
model_name='organization',
|
model_name='organization',
|
||||||
name='project_admin_role',
|
name='project_admin_role',
|
||||||
field=awx.main.fields.ImplicitRoleField(
|
field=awx.main.fields.ImplicitRoleField(
|
||||||
null='True', on_delete=django.db.models.deletion.CASCADE, parent_role='admin_role', related_name='+', to='main.Role'
|
null='True', on_delete=django.db.models.deletion.SET_NULL, parent_role='admin_role', related_name='+', to='main.Role'
|
||||||
),
|
),
|
||||||
),
|
),
|
||||||
migrations.AddField(
|
migrations.AddField(
|
||||||
model_name='organization',
|
model_name='organization',
|
||||||
name='workflow_admin_role',
|
name='workflow_admin_role',
|
||||||
field=awx.main.fields.ImplicitRoleField(
|
field=awx.main.fields.ImplicitRoleField(
|
||||||
null='True', on_delete=django.db.models.deletion.CASCADE, parent_role='admin_role', related_name='+', to='main.Role'
|
null='True', on_delete=django.db.models.deletion.SET_NULL, parent_role='admin_role', related_name='+', to='main.Role'
|
||||||
),
|
),
|
||||||
),
|
),
|
||||||
migrations.AddField(
|
migrations.AddField(
|
||||||
model_name='organization',
|
model_name='organization',
|
||||||
name='notification_admin_role',
|
name='notification_admin_role',
|
||||||
field=awx.main.fields.ImplicitRoleField(
|
field=awx.main.fields.ImplicitRoleField(
|
||||||
null='True', on_delete=django.db.models.deletion.CASCADE, parent_role='admin_role', related_name='+', to='main.Role'
|
null='True', on_delete=django.db.models.deletion.SET_NULL, parent_role='admin_role', related_name='+', to='main.Role'
|
||||||
),
|
),
|
||||||
),
|
),
|
||||||
migrations.AlterField(
|
migrations.AlterField(
|
||||||
@@ -67,7 +67,7 @@ class Migration(migrations.Migration):
|
|||||||
name='admin_role',
|
name='admin_role',
|
||||||
field=awx.main.fields.ImplicitRoleField(
|
field=awx.main.fields.ImplicitRoleField(
|
||||||
null='True',
|
null='True',
|
||||||
on_delete=django.db.models.deletion.CASCADE,
|
on_delete=django.db.models.deletion.SET_NULL,
|
||||||
parent_role=['singleton:system_administrator', 'organization.credential_admin_role'],
|
parent_role=['singleton:system_administrator', 'organization.credential_admin_role'],
|
||||||
related_name='+',
|
related_name='+',
|
||||||
to='main.Role',
|
to='main.Role',
|
||||||
@@ -77,7 +77,7 @@ class Migration(migrations.Migration):
|
|||||||
model_name='inventory',
|
model_name='inventory',
|
||||||
name='admin_role',
|
name='admin_role',
|
||||||
field=awx.main.fields.ImplicitRoleField(
|
field=awx.main.fields.ImplicitRoleField(
|
||||||
null='True', on_delete=django.db.models.deletion.CASCADE, parent_role='organization.inventory_admin_role', related_name='+', to='main.Role'
|
null='True', on_delete=django.db.models.deletion.SET_NULL, parent_role='organization.inventory_admin_role', related_name='+', to='main.Role'
|
||||||
),
|
),
|
||||||
),
|
),
|
||||||
migrations.AlterField(
|
migrations.AlterField(
|
||||||
@@ -85,7 +85,7 @@ class Migration(migrations.Migration):
|
|||||||
name='admin_role',
|
name='admin_role',
|
||||||
field=awx.main.fields.ImplicitRoleField(
|
field=awx.main.fields.ImplicitRoleField(
|
||||||
null='True',
|
null='True',
|
||||||
on_delete=django.db.models.deletion.CASCADE,
|
on_delete=django.db.models.deletion.SET_NULL,
|
||||||
parent_role=['organization.project_admin_role', 'singleton:system_administrator'],
|
parent_role=['organization.project_admin_role', 'singleton:system_administrator'],
|
||||||
related_name='+',
|
related_name='+',
|
||||||
to='main.Role',
|
to='main.Role',
|
||||||
@@ -96,7 +96,7 @@ class Migration(migrations.Migration):
|
|||||||
name='admin_role',
|
name='admin_role',
|
||||||
field=awx.main.fields.ImplicitRoleField(
|
field=awx.main.fields.ImplicitRoleField(
|
||||||
null='True',
|
null='True',
|
||||||
on_delete=django.db.models.deletion.CASCADE,
|
on_delete=django.db.models.deletion.SET_NULL,
|
||||||
parent_role=['singleton:system_administrator', 'organization.workflow_admin_role'],
|
parent_role=['singleton:system_administrator', 'organization.workflow_admin_role'],
|
||||||
related_name='+',
|
related_name='+',
|
||||||
to='main.Role',
|
to='main.Role',
|
||||||
@@ -107,7 +107,7 @@ class Migration(migrations.Migration):
|
|||||||
name='execute_role',
|
name='execute_role',
|
||||||
field=awx.main.fields.ImplicitRoleField(
|
field=awx.main.fields.ImplicitRoleField(
|
||||||
null='True',
|
null='True',
|
||||||
on_delete=django.db.models.deletion.CASCADE,
|
on_delete=django.db.models.deletion.SET_NULL,
|
||||||
parent_role=['admin_role', 'organization.execute_role'],
|
parent_role=['admin_role', 'organization.execute_role'],
|
||||||
related_name='+',
|
related_name='+',
|
||||||
to='main.Role',
|
to='main.Role',
|
||||||
@@ -119,7 +119,7 @@ class Migration(migrations.Migration):
|
|||||||
field=awx.main.fields.ImplicitRoleField(
|
field=awx.main.fields.ImplicitRoleField(
|
||||||
editable=False,
|
editable=False,
|
||||||
null='True',
|
null='True',
|
||||||
on_delete=django.db.models.deletion.CASCADE,
|
on_delete=django.db.models.deletion.SET_NULL,
|
||||||
parent_role=['project.organization.job_template_admin_role', 'inventory.organization.job_template_admin_role'],
|
parent_role=['project.organization.job_template_admin_role', 'inventory.organization.job_template_admin_role'],
|
||||||
related_name='+',
|
related_name='+',
|
||||||
to='main.Role',
|
to='main.Role',
|
||||||
@@ -130,7 +130,7 @@ class Migration(migrations.Migration):
|
|||||||
name='execute_role',
|
name='execute_role',
|
||||||
field=awx.main.fields.ImplicitRoleField(
|
field=awx.main.fields.ImplicitRoleField(
|
||||||
null='True',
|
null='True',
|
||||||
on_delete=django.db.models.deletion.CASCADE,
|
on_delete=django.db.models.deletion.SET_NULL,
|
||||||
parent_role=['admin_role', 'project.organization.execute_role', 'inventory.organization.execute_role'],
|
parent_role=['admin_role', 'project.organization.execute_role', 'inventory.organization.execute_role'],
|
||||||
related_name='+',
|
related_name='+',
|
||||||
to='main.Role',
|
to='main.Role',
|
||||||
@@ -142,7 +142,7 @@ class Migration(migrations.Migration):
|
|||||||
field=awx.main.fields.ImplicitRoleField(
|
field=awx.main.fields.ImplicitRoleField(
|
||||||
editable=False,
|
editable=False,
|
||||||
null='True',
|
null='True',
|
||||||
on_delete=django.db.models.deletion.CASCADE,
|
on_delete=django.db.models.deletion.SET_NULL,
|
||||||
parent_role=[
|
parent_role=[
|
||||||
'admin_role',
|
'admin_role',
|
||||||
'execute_role',
|
'execute_role',
|
||||||
|
|||||||
@@ -18,7 +18,7 @@ class Migration(migrations.Migration):
|
|||||||
model_name='organization',
|
model_name='organization',
|
||||||
name='member_role',
|
name='member_role',
|
||||||
field=awx.main.fields.ImplicitRoleField(
|
field=awx.main.fields.ImplicitRoleField(
|
||||||
editable=False, null='True', on_delete=django.db.models.deletion.CASCADE, parent_role=['admin_role'], related_name='+', to='main.Role'
|
editable=False, null='True', on_delete=django.db.models.deletion.SET_NULL, parent_role=['admin_role'], related_name='+', to='main.Role'
|
||||||
),
|
),
|
||||||
),
|
),
|
||||||
migrations.AlterField(
|
migrations.AlterField(
|
||||||
@@ -27,7 +27,7 @@ class Migration(migrations.Migration):
|
|||||||
field=awx.main.fields.ImplicitRoleField(
|
field=awx.main.fields.ImplicitRoleField(
|
||||||
editable=False,
|
editable=False,
|
||||||
null='True',
|
null='True',
|
||||||
on_delete=django.db.models.deletion.CASCADE,
|
on_delete=django.db.models.deletion.SET_NULL,
|
||||||
parent_role=[
|
parent_role=[
|
||||||
'member_role',
|
'member_role',
|
||||||
'auditor_role',
|
'auditor_role',
|
||||||
|
|||||||
@@ -3,24 +3,27 @@ from __future__ import unicode_literals
|
|||||||
|
|
||||||
from django.db import migrations
|
from django.db import migrations
|
||||||
|
|
||||||
|
from ._sqlite_helper import dbawaremigrations
|
||||||
|
|
||||||
|
tables_to_drop = [
|
||||||
|
'celery_taskmeta',
|
||||||
|
'celery_tasksetmeta',
|
||||||
|
'djcelery_crontabschedule',
|
||||||
|
'djcelery_intervalschedule',
|
||||||
|
'djcelery_periodictask',
|
||||||
|
'djcelery_periodictasks',
|
||||||
|
'djcelery_taskstate',
|
||||||
|
'djcelery_workerstate',
|
||||||
|
'djkombu_message',
|
||||||
|
'djkombu_queue',
|
||||||
|
]
|
||||||
|
postgres_sql = ([("DROP TABLE IF EXISTS {} CASCADE;".format(table))] for table in tables_to_drop)
|
||||||
|
sqlite_sql = ([("DROP TABLE IF EXISTS {};".format(table))] for table in tables_to_drop)
|
||||||
|
|
||||||
|
|
||||||
class Migration(migrations.Migration):
|
class Migration(migrations.Migration):
|
||||||
dependencies = [
|
dependencies = [
|
||||||
('main', '0049_v330_validate_instance_capacity_adjustment'),
|
('main', '0049_v330_validate_instance_capacity_adjustment'),
|
||||||
]
|
]
|
||||||
|
|
||||||
operations = [
|
operations = [dbawaremigrations.RunSQL(p, sqlite_sql=s) for p, s in zip(postgres_sql, sqlite_sql)]
|
||||||
migrations.RunSQL([("DROP TABLE IF EXISTS {} CASCADE;".format(table))])
|
|
||||||
for table in (
|
|
||||||
'celery_taskmeta',
|
|
||||||
'celery_tasksetmeta',
|
|
||||||
'djcelery_crontabschedule',
|
|
||||||
'djcelery_intervalschedule',
|
|
||||||
'djcelery_periodictask',
|
|
||||||
'djcelery_periodictasks',
|
|
||||||
'djcelery_taskstate',
|
|
||||||
'djcelery_workerstate',
|
|
||||||
'djkombu_message',
|
|
||||||
'djkombu_queue',
|
|
||||||
)
|
|
||||||
]
|
|
||||||
|
|||||||
@@ -36,7 +36,7 @@ class Migration(migrations.Migration):
|
|||||||
model_name='organization',
|
model_name='organization',
|
||||||
name='approval_role',
|
name='approval_role',
|
||||||
field=awx.main.fields.ImplicitRoleField(
|
field=awx.main.fields.ImplicitRoleField(
|
||||||
editable=False, null='True', on_delete=django.db.models.deletion.CASCADE, parent_role='admin_role', related_name='+', to='main.Role'
|
editable=False, null='True', on_delete=django.db.models.deletion.SET_NULL, parent_role='admin_role', related_name='+', to='main.Role'
|
||||||
),
|
),
|
||||||
preserve_default='True',
|
preserve_default='True',
|
||||||
),
|
),
|
||||||
@@ -46,7 +46,7 @@ class Migration(migrations.Migration):
|
|||||||
field=awx.main.fields.ImplicitRoleField(
|
field=awx.main.fields.ImplicitRoleField(
|
||||||
editable=False,
|
editable=False,
|
||||||
null='True',
|
null='True',
|
||||||
on_delete=django.db.models.deletion.CASCADE,
|
on_delete=django.db.models.deletion.SET_NULL,
|
||||||
parent_role=['organization.approval_role', 'admin_role'],
|
parent_role=['organization.approval_role', 'admin_role'],
|
||||||
related_name='+',
|
related_name='+',
|
||||||
to='main.Role',
|
to='main.Role',
|
||||||
@@ -116,7 +116,7 @@ class Migration(migrations.Migration):
|
|||||||
field=awx.main.fields.ImplicitRoleField(
|
field=awx.main.fields.ImplicitRoleField(
|
||||||
editable=False,
|
editable=False,
|
||||||
null='True',
|
null='True',
|
||||||
on_delete=django.db.models.deletion.CASCADE,
|
on_delete=django.db.models.deletion.SET_NULL,
|
||||||
parent_role=[
|
parent_role=[
|
||||||
'member_role',
|
'member_role',
|
||||||
'auditor_role',
|
'auditor_role',
|
||||||
@@ -139,7 +139,7 @@ class Migration(migrations.Migration):
|
|||||||
field=awx.main.fields.ImplicitRoleField(
|
field=awx.main.fields.ImplicitRoleField(
|
||||||
editable=False,
|
editable=False,
|
||||||
null='True',
|
null='True',
|
||||||
on_delete=django.db.models.deletion.CASCADE,
|
on_delete=django.db.models.deletion.SET_NULL,
|
||||||
parent_role=['singleton:system_auditor', 'organization.auditor_role', 'execute_role', 'admin_role', 'approval_role'],
|
parent_role=['singleton:system_auditor', 'organization.auditor_role', 'execute_role', 'admin_role', 'approval_role'],
|
||||||
related_name='+',
|
related_name='+',
|
||||||
to='main.Role',
|
to='main.Role',
|
||||||
|
|||||||
@@ -80,7 +80,7 @@ class Migration(migrations.Migration):
|
|||||||
field=awx.main.fields.ImplicitRoleField(
|
field=awx.main.fields.ImplicitRoleField(
|
||||||
editable=False,
|
editable=False,
|
||||||
null='True',
|
null='True',
|
||||||
on_delete=django.db.models.deletion.CASCADE,
|
on_delete=django.db.models.deletion.SET_NULL,
|
||||||
parent_role=['organization.job_template_admin_role'],
|
parent_role=['organization.job_template_admin_role'],
|
||||||
related_name='+',
|
related_name='+',
|
||||||
to='main.Role',
|
to='main.Role',
|
||||||
@@ -92,7 +92,7 @@ class Migration(migrations.Migration):
|
|||||||
field=awx.main.fields.ImplicitRoleField(
|
field=awx.main.fields.ImplicitRoleField(
|
||||||
editable=False,
|
editable=False,
|
||||||
null='True',
|
null='True',
|
||||||
on_delete=django.db.models.deletion.CASCADE,
|
on_delete=django.db.models.deletion.SET_NULL,
|
||||||
parent_role=['admin_role', 'organization.execute_role'],
|
parent_role=['admin_role', 'organization.execute_role'],
|
||||||
related_name='+',
|
related_name='+',
|
||||||
to='main.Role',
|
to='main.Role',
|
||||||
@@ -104,7 +104,7 @@ class Migration(migrations.Migration):
|
|||||||
field=awx.main.fields.ImplicitRoleField(
|
field=awx.main.fields.ImplicitRoleField(
|
||||||
editable=False,
|
editable=False,
|
||||||
null='True',
|
null='True',
|
||||||
on_delete=django.db.models.deletion.CASCADE,
|
on_delete=django.db.models.deletion.SET_NULL,
|
||||||
parent_role=['organization.auditor_role', 'inventory.organization.auditor_role', 'execute_role', 'admin_role'],
|
parent_role=['organization.auditor_role', 'inventory.organization.auditor_role', 'execute_role', 'admin_role'],
|
||||||
related_name='+',
|
related_name='+',
|
||||||
to='main.Role',
|
to='main.Role',
|
||||||
|
|||||||
@@ -2,6 +2,8 @@
|
|||||||
|
|
||||||
from django.db import migrations, models, connection
|
from django.db import migrations, models, connection
|
||||||
|
|
||||||
|
from ._sqlite_helper import dbawaremigrations
|
||||||
|
|
||||||
|
|
||||||
def migrate_event_data(apps, schema_editor):
|
def migrate_event_data(apps, schema_editor):
|
||||||
# see: https://github.com/ansible/awx/issues/6010
|
# see: https://github.com/ansible/awx/issues/6010
|
||||||
@@ -24,6 +26,11 @@ def migrate_event_data(apps, schema_editor):
|
|||||||
cursor.execute(f'ALTER TABLE {tblname} ALTER COLUMN id TYPE bigint USING id::bigint;')
|
cursor.execute(f'ALTER TABLE {tblname} ALTER COLUMN id TYPE bigint USING id::bigint;')
|
||||||
|
|
||||||
|
|
||||||
|
def migrate_event_data_sqlite(apps, schema_editor):
|
||||||
|
# TODO: cmeyers fill this in
|
||||||
|
return
|
||||||
|
|
||||||
|
|
||||||
class FakeAlterField(migrations.AlterField):
|
class FakeAlterField(migrations.AlterField):
|
||||||
def database_forwards(self, *args):
|
def database_forwards(self, *args):
|
||||||
# this is intentionally left blank, because we're
|
# this is intentionally left blank, because we're
|
||||||
@@ -37,7 +44,7 @@ class Migration(migrations.Migration):
|
|||||||
]
|
]
|
||||||
|
|
||||||
operations = [
|
operations = [
|
||||||
migrations.RunPython(migrate_event_data),
|
dbawaremigrations.RunPython(migrate_event_data, sqlite_code=migrate_event_data_sqlite),
|
||||||
FakeAlterField(
|
FakeAlterField(
|
||||||
model_name='adhoccommandevent',
|
model_name='adhoccommandevent',
|
||||||
name='id',
|
name='id',
|
||||||
|
|||||||
@@ -26,7 +26,7 @@ class Migration(migrations.Migration):
|
|||||||
model_name='organization',
|
model_name='organization',
|
||||||
name='execution_environment_admin_role',
|
name='execution_environment_admin_role',
|
||||||
field=awx.main.fields.ImplicitRoleField(
|
field=awx.main.fields.ImplicitRoleField(
|
||||||
editable=False, null='True', on_delete=django.db.models.deletion.CASCADE, parent_role='admin_role', related_name='+', to='main.Role'
|
editable=False, null='True', on_delete=django.db.models.deletion.SET_NULL, parent_role='admin_role', related_name='+', to='main.Role'
|
||||||
),
|
),
|
||||||
preserve_default='True',
|
preserve_default='True',
|
||||||
),
|
),
|
||||||
|
|||||||
@@ -17,7 +17,7 @@ class Migration(migrations.Migration):
|
|||||||
field=awx.main.fields.ImplicitRoleField(
|
field=awx.main.fields.ImplicitRoleField(
|
||||||
editable=False,
|
editable=False,
|
||||||
null='True',
|
null='True',
|
||||||
on_delete=django.db.models.deletion.CASCADE,
|
on_delete=django.db.models.deletion.SET_NULL,
|
||||||
parent_role=[
|
parent_role=[
|
||||||
'member_role',
|
'member_role',
|
||||||
'auditor_role',
|
'auditor_role',
|
||||||
|
|||||||
@@ -1,5 +1,7 @@
|
|||||||
from django.db import migrations, models, connection
|
from django.db import migrations, models, connection
|
||||||
|
|
||||||
|
from ._sqlite_helper import dbawaremigrations
|
||||||
|
|
||||||
|
|
||||||
def migrate_event_data(apps, schema_editor):
|
def migrate_event_data(apps, schema_editor):
|
||||||
# see: https://github.com/ansible/awx/issues/9039
|
# see: https://github.com/ansible/awx/issues/9039
|
||||||
@@ -59,6 +61,10 @@ def migrate_event_data(apps, schema_editor):
|
|||||||
cursor.execute('DROP INDEX IF EXISTS main_jobevent_job_id_idx')
|
cursor.execute('DROP INDEX IF EXISTS main_jobevent_job_id_idx')
|
||||||
|
|
||||||
|
|
||||||
|
def migrate_event_data_sqlite(apps, schema_editor):
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
class FakeAddField(migrations.AddField):
|
class FakeAddField(migrations.AddField):
|
||||||
def database_forwards(self, *args):
|
def database_forwards(self, *args):
|
||||||
# this is intentionally left blank, because we're
|
# this is intentionally left blank, because we're
|
||||||
@@ -72,7 +78,7 @@ class Migration(migrations.Migration):
|
|||||||
]
|
]
|
||||||
|
|
||||||
operations = [
|
operations = [
|
||||||
migrations.RunPython(migrate_event_data),
|
dbawaremigrations.RunPython(migrate_event_data, sqlite_code=migrate_event_data_sqlite),
|
||||||
FakeAddField(
|
FakeAddField(
|
||||||
model_name='jobevent',
|
model_name='jobevent',
|
||||||
name='job_created',
|
name='job_created',
|
||||||
|
|||||||
@@ -17,7 +17,7 @@ class Migration(migrations.Migration):
|
|||||||
field=awx.main.fields.ImplicitRoleField(
|
field=awx.main.fields.ImplicitRoleField(
|
||||||
editable=False,
|
editable=False,
|
||||||
null='True',
|
null='True',
|
||||||
on_delete=django.db.models.deletion.CASCADE,
|
on_delete=django.db.models.deletion.SET_NULL,
|
||||||
parent_role=['singleton:system_administrator'],
|
parent_role=['singleton:system_administrator'],
|
||||||
related_name='+',
|
related_name='+',
|
||||||
to='main.role',
|
to='main.role',
|
||||||
@@ -30,7 +30,7 @@ class Migration(migrations.Migration):
|
|||||||
field=awx.main.fields.ImplicitRoleField(
|
field=awx.main.fields.ImplicitRoleField(
|
||||||
editable=False,
|
editable=False,
|
||||||
null='True',
|
null='True',
|
||||||
on_delete=django.db.models.deletion.CASCADE,
|
on_delete=django.db.models.deletion.SET_NULL,
|
||||||
parent_role=['singleton:system_auditor', 'use_role', 'admin_role'],
|
parent_role=['singleton:system_auditor', 'use_role', 'admin_role'],
|
||||||
related_name='+',
|
related_name='+',
|
||||||
to='main.role',
|
to='main.role',
|
||||||
@@ -41,7 +41,7 @@ class Migration(migrations.Migration):
|
|||||||
model_name='instancegroup',
|
model_name='instancegroup',
|
||||||
name='use_role',
|
name='use_role',
|
||||||
field=awx.main.fields.ImplicitRoleField(
|
field=awx.main.fields.ImplicitRoleField(
|
||||||
editable=False, null='True', on_delete=django.db.models.deletion.CASCADE, parent_role=['admin_role'], related_name='+', to='main.role'
|
editable=False, null='True', on_delete=django.db.models.deletion.SET_NULL, parent_role=['admin_role'], related_name='+', to='main.role'
|
||||||
),
|
),
|
||||||
preserve_default='True',
|
preserve_default='True',
|
||||||
),
|
),
|
||||||
|
|||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user