mirror of
https://github.com/ansible/awx.git
synced 2026-02-07 04:28:23 -03:30
Compare commits
537 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
9bf721665d | ||
|
|
5f6a383ebe | ||
|
|
871b862731 | ||
|
|
851f7b4c7e | ||
|
|
c78a50b44d | ||
|
|
704029459f | ||
|
|
b78cacb4d8 | ||
|
|
4c5757b3bd | ||
|
|
ca2f67e0a9 | ||
|
|
889eb2331c | ||
|
|
8e46166313 | ||
|
|
b81f082a18 | ||
|
|
51b18aa012 | ||
|
|
5e51dd2ff7 | ||
|
|
15704e55e1 | ||
|
|
b3266f6c62 | ||
|
|
c120b731a4 | ||
|
|
ab61675c2d | ||
|
|
548ebd5999 | ||
|
|
12077627e4 | ||
|
|
3d5f28f790 | ||
|
|
8788c904c8 | ||
|
|
e85a32d463 | ||
|
|
be08e0ce69 | ||
|
|
3aba1e9db5 | ||
|
|
4992fed5a3 | ||
|
|
aa7514a993 | ||
|
|
ea8ebe8a9f | ||
|
|
7ff82db691 | ||
|
|
8a8bfc5176 | ||
|
|
14685b9157 | ||
|
|
87e564026e | ||
|
|
8795d860d6 | ||
|
|
d14fa93ce9 | ||
|
|
e7090a6f8a | ||
|
|
b6b87aea76 | ||
|
|
e6d1810844 | ||
|
|
720e8055f8 | ||
|
|
182ff3464e | ||
|
|
973c9d313e | ||
|
|
a89a683eb4 | ||
|
|
52646362c3 | ||
|
|
8a433f30e4 | ||
|
|
496eea9647 | ||
|
|
6ab3d5301c | ||
|
|
d93d0f00ee | ||
|
|
4cc947d65d | ||
|
|
8b4b54d2c4 | ||
|
|
701deb2268 | ||
|
|
85adc4a0ab | ||
|
|
9fc5579a50 | ||
|
|
7faf9c6267 | ||
|
|
8cb9341d8f | ||
|
|
8e024c234c | ||
|
|
a5f676c3e1 | ||
|
|
99f3825826 | ||
|
|
29926ba5d9 | ||
|
|
db9fbf1493 | ||
|
|
590d64f40e | ||
|
|
64fa18cafe | ||
|
|
634df240ed | ||
|
|
44e6e9344b | ||
|
|
4b5b95a0f8 | ||
|
|
f5e1f2ed14 | ||
|
|
c232289323 | ||
|
|
db8c56caf4 | ||
|
|
8e66172ed4 | ||
|
|
62be4defa2 | ||
|
|
cb590be095 | ||
|
|
72c3339719 | ||
|
|
7ca35634a7 | ||
|
|
4ab4f2f8f9 | ||
|
|
3e64e8225a | ||
|
|
b65d9ede81 | ||
|
|
12edbdab11 | ||
|
|
fcdb38469b | ||
|
|
900127fde7 | ||
|
|
ffb2198eab | ||
|
|
503a753241 | ||
|
|
7734def856 | ||
|
|
d6e84b54c9 | ||
|
|
ec93af4ba8 | ||
|
|
197d50bc44 | ||
|
|
5ad60a3ed4 | ||
|
|
38638b4a6b | ||
|
|
232801e0ba | ||
|
|
d55f36eb90 | ||
|
|
277c47ba4e | ||
|
|
12cbc9756b | ||
|
|
72df8723f6 | ||
|
|
a8710bf2f1 | ||
|
|
4bdc488fe7 | ||
|
|
9633714c49 | ||
|
|
66bdcee854 | ||
|
|
e61f79c8c3 | ||
|
|
96fc38d182 | ||
|
|
ae9ae14e5a | ||
|
|
39fa70c58b | ||
|
|
4f132e302f | ||
|
|
a45f586599 | ||
|
|
170e64070b | ||
|
|
7e0d2aabbd | ||
|
|
ff3f5fd529 | ||
|
|
52db0bf0c0 | ||
|
|
1e66a977c7 | ||
|
|
1b233aa8cc | ||
|
|
294b9c8910 | ||
|
|
6f43784c47 | ||
|
|
9921887ce8 | ||
|
|
501cf297df | ||
|
|
169f55c908 | ||
|
|
c0d8474ac6 | ||
|
|
44949b73cf | ||
|
|
b55c5f7de2 | ||
|
|
ef27ebfed8 | ||
|
|
a66eca82c2 | ||
|
|
7248e2c6d0 | ||
|
|
24f3499bd9 | ||
|
|
a50034be3c | ||
|
|
470db2bc91 | ||
|
|
02021fe2c9 | ||
|
|
4882ca0481 | ||
|
|
526a4c303f | ||
|
|
bb5f494fbd | ||
|
|
c81bc60a33 | ||
|
|
a28c44e509 | ||
|
|
27219d34eb | ||
|
|
f49e4a646f | ||
|
|
b699864f00 | ||
|
|
abaeec40ae | ||
|
|
f81f6cf114 | ||
|
|
81bccc1c7f | ||
|
|
e23b47b997 | ||
|
|
faec21ed08 | ||
|
|
1deb4ff5e4 | ||
|
|
13788c4568 | ||
|
|
69b818ff83 | ||
|
|
a8400e4b7c | ||
|
|
9141e789aa | ||
|
|
60ccdfa4e6 | ||
|
|
0d7f7df043 | ||
|
|
b6a55e53d5 | ||
|
|
1a33f7ce1a | ||
|
|
f22fa9c5b0 | ||
|
|
3e7554974a | ||
|
|
d3928a0c0f | ||
|
|
153a1ecd39 | ||
|
|
d6f9c5a0b6 | ||
|
|
2f47bacb4f | ||
|
|
91eff51390 | ||
|
|
9cf294f3d7 | ||
|
|
691b4512b5 | ||
|
|
471e22a4e2 | ||
|
|
a1c2d458de | ||
|
|
ef1da5d5de | ||
|
|
865f348167 | ||
|
|
30f5fbb07a | ||
|
|
d57fee7b63 | ||
|
|
79930347f9 | ||
|
|
e0feda780b | ||
|
|
76c39e38c0 | ||
|
|
8cd4d06903 | ||
|
|
8c263f17ab | ||
|
|
a4e4f0aa98 | ||
|
|
ebf9bf429c | ||
|
|
30e461c18e | ||
|
|
b8b3424c1f | ||
|
|
929be1652a | ||
|
|
2be5ae3b2d | ||
|
|
aba14bfb8c | ||
|
|
a9c3484387 | ||
|
|
ee7f73623f | ||
|
|
b338da40c5 | ||
|
|
00fb955544 | ||
|
|
74711a55bb | ||
|
|
d30dd97c96 | ||
|
|
10664d1931 | ||
|
|
7b04fa114e | ||
|
|
0d843899e1 | ||
|
|
54e1991ff4 | ||
|
|
76fd63ba5f | ||
|
|
07edf505e7 | ||
|
|
1078bf76ad | ||
|
|
cb4fcb9d80 | ||
|
|
32e149c76e | ||
|
|
4d8176e6af | ||
|
|
ddd109059f | ||
|
|
4d480cb95f | ||
|
|
69c3acfb39 | ||
|
|
a092406543 | ||
|
|
a65008f762 | ||
|
|
2a44a72024 | ||
|
|
40a10dcc5f | ||
|
|
d25d5762e0 | ||
|
|
073e518c16 | ||
|
|
454c8e66e0 | ||
|
|
cd5553a1dc | ||
|
|
e4d9cd4000 | ||
|
|
400c55faaa | ||
|
|
fabd2eec63 | ||
|
|
619fabc3a1 | ||
|
|
4e4f1d3cce | ||
|
|
e2e3d30b49 | ||
|
|
ff78cade3a | ||
|
|
97381f6810 | ||
|
|
999086968c | ||
|
|
ab4abf4e3b | ||
|
|
0fd0f0c1bd | ||
|
|
d16055806b | ||
|
|
e09ac530d5 | ||
|
|
1923926422 | ||
|
|
7d3bf36227 | ||
|
|
d653c05da8 | ||
|
|
862a6835fe | ||
|
|
33f3ad17cb | ||
|
|
52d9fbce73 | ||
|
|
0933a94ae7 | ||
|
|
0b701b3b24 | ||
|
|
b4a45e4cf4 | ||
|
|
0567a2a3bf | ||
|
|
e372f4f8f6 | ||
|
|
dccddfffe6 | ||
|
|
948e4c13d2 | ||
|
|
13f2b3f632 | ||
|
|
944c32da24 | ||
|
|
b8f1fa1a13 | ||
|
|
10ab12c99a | ||
|
|
662ee6fa36 | ||
|
|
314fdd6066 | ||
|
|
94352c9a72 | ||
|
|
4268f1aeeb | ||
|
|
31c85dd89f | ||
|
|
e9f1f8c6fe | ||
|
|
8d87d9e6e7 | ||
|
|
d324baf1b0 | ||
|
|
52c8033a08 | ||
|
|
28a70ced56 | ||
|
|
bf6064db21 | ||
|
|
e406e4298b | ||
|
|
591a3e7a60 | ||
|
|
370440f63d | ||
|
|
330625b565 | ||
|
|
5de34a9c0b | ||
|
|
35eda3a9a7 | ||
|
|
ac4b38bc30 | ||
|
|
3052e2077d | ||
|
|
439302b38e | ||
|
|
22029b9d7c | ||
|
|
114bcd0349 | ||
|
|
6f8725c680 | ||
|
|
4a75ae9869 | ||
|
|
68b399fdef | ||
|
|
514cba6467 | ||
|
|
7dfa957619 | ||
|
|
cd9838d579 | ||
|
|
d8ca3ba894 | ||
|
|
8ff2c5b576 | ||
|
|
8df6dc0ca0 | ||
|
|
a1fa21d5a9 | ||
|
|
df54a1edb5 | ||
|
|
cc89608d2c | ||
|
|
da7896dbc4 | ||
|
|
7a9eff7e65 | ||
|
|
48ecd2400c | ||
|
|
78ce54bc4a | ||
|
|
1a4f2f43b7 | ||
|
|
15ad6a0180 | ||
|
|
7cb3cf4e37 | ||
|
|
c58c7e285c | ||
|
|
adc68b672d | ||
|
|
17b5b531bf | ||
|
|
52ffcc9f7c | ||
|
|
3ce9a778f8 | ||
|
|
3e9a98170e | ||
|
|
5ce9e5b03d | ||
|
|
94b6b31185 | ||
|
|
b031e1f05e | ||
|
|
aae0b29008 | ||
|
|
422c7308fd | ||
|
|
23f1cea29b | ||
|
|
633dc60d49 | ||
|
|
b9960abea6 | ||
|
|
2388758f8a | ||
|
|
b05c34a969 | ||
|
|
bfca3d9910 | ||
|
|
f9511ed7da | ||
|
|
2e93d9f022 | ||
|
|
478111e7df | ||
|
|
dd459e23e2 | ||
|
|
e50c2c2867 | ||
|
|
7f9784c443 | ||
|
|
1294efdeb9 | ||
|
|
be4e4ff47c | ||
|
|
309396f199 | ||
|
|
393e1b75e9 | ||
|
|
c139a998b8 | ||
|
|
e591f1f002 | ||
|
|
67000f0ce9 | ||
|
|
0ddf47740c | ||
|
|
05de875ace | ||
|
|
d8b7791375 | ||
|
|
b609e4ee84 | ||
|
|
0a23bb6e36 | ||
|
|
74c7883b3b | ||
|
|
0a36959ef1 | ||
|
|
2ae429b4ac | ||
|
|
13f2e90a82 | ||
|
|
74ad1f36ac | ||
|
|
cb86193459 | ||
|
|
86d0ee590f | ||
|
|
10242cd6c4 | ||
|
|
3bb930c769 | ||
|
|
ef43d85271 | ||
|
|
927b055e65 | ||
|
|
4445d096f5 | ||
|
|
48934e8544 | ||
|
|
cb570a2ba1 | ||
|
|
607bc07887 | ||
|
|
df874966a6 | ||
|
|
b5c2a6ad65 | ||
|
|
e38d082394 | ||
|
|
b9bce03f71 | ||
|
|
024d148b7f | ||
|
|
8775afc5ea | ||
|
|
2f738415b8 | ||
|
|
57cd474beb | ||
|
|
9b00421ec3 | ||
|
|
23a852bdab | ||
|
|
52d178bbe4 | ||
|
|
5703aa8af5 | ||
|
|
a4e76db672 | ||
|
|
31275122a1 | ||
|
|
beb329c31e | ||
|
|
deb56bf4f8 | ||
|
|
87b97530ff | ||
|
|
3335ea953e | ||
|
|
0fbc02864e | ||
|
|
94eec401c3 | ||
|
|
fa07889f39 | ||
|
|
82a42d1db7 | ||
|
|
a672022a6a | ||
|
|
a75874a5d0 | ||
|
|
ff7fe2acdf | ||
|
|
98d2e1a898 | ||
|
|
9517bf01ce | ||
|
|
b8b1e3d760 | ||
|
|
e5c0889361 | ||
|
|
41d3b164ea | ||
|
|
3a512f39ae | ||
|
|
40e821d0d8 | ||
|
|
ee06df97a4 | ||
|
|
0d3c9ebc2b | ||
|
|
6d412fd8e7 | ||
|
|
e0af178968 | ||
|
|
720d705df3 | ||
|
|
e35b0d1441 | ||
|
|
ddcbb1f9c2 | ||
|
|
2e90cd8d31 | ||
|
|
a2ca2729ba | ||
|
|
51600986c9 | ||
|
|
0a839430e7 | ||
|
|
abaefd0319 | ||
|
|
5a67aa7fff | ||
|
|
22e68fe973 | ||
|
|
4db5447db8 | ||
|
|
eb47c8dbc6 | ||
|
|
abd0eb53bf | ||
|
|
dbc4b677f6 | ||
|
|
f83e4cf092 | ||
|
|
bb38940638 | ||
|
|
a72a688506 | ||
|
|
1f5df7e39c | ||
|
|
8a325d40e4 | ||
|
|
581a0b67f0 | ||
|
|
a71261d5fd | ||
|
|
98f572a50e | ||
|
|
44633c2ba7 | ||
|
|
53dede734f | ||
|
|
95a4cc7b76 | ||
|
|
db3e79e240 | ||
|
|
b7f1393c33 | ||
|
|
3c1cc7fcef | ||
|
|
6097066cd8 | ||
|
|
048e35850a | ||
|
|
d2ceb39d73 | ||
|
|
2991ddfc52 | ||
|
|
a8bb3519c5 | ||
|
|
98b2ac77c8 | ||
|
|
d550487bc8 | ||
|
|
942d7ccfc6 | ||
|
|
02fa85206f | ||
|
|
b36afa3c3e | ||
|
|
3fe9d1c096 | ||
|
|
8a9f75c291 | ||
|
|
bfb8e384a8 | ||
|
|
9bc17db45d | ||
|
|
7c63a6592e | ||
|
|
1f0b1923d7 | ||
|
|
6e8996f59f | ||
|
|
79d7c6d9b3 | ||
|
|
ce052922c6 | ||
|
|
20e2472329 | ||
|
|
26ebf47c71 | ||
|
|
f54116afbb | ||
|
|
f1d2d79f00 | ||
|
|
25ace77048 | ||
|
|
91f72672a1 | ||
|
|
44e9ba1117 | ||
|
|
a5b644c23c | ||
|
|
3ff1d77c03 | ||
|
|
784c924d88 | ||
|
|
466dff96e9 | ||
|
|
fbde4797f8 | ||
|
|
2df924ae78 | ||
|
|
be5ff0a088 | ||
|
|
d8514851bf | ||
|
|
849079316a | ||
|
|
f266325fb0 | ||
|
|
dcc3422484 | ||
|
|
f812d2e318 | ||
|
|
0c63e6a624 | ||
|
|
a532421eef | ||
|
|
23f365786c | ||
|
|
8206874158 | ||
|
|
7baf681b55 | ||
|
|
fcf56b4ba6 | ||
|
|
afc028147a | ||
|
|
a7c7ac714f | ||
|
|
4b625f0f13 | ||
|
|
d6e39376c8 | ||
|
|
ffab48c77f | ||
|
|
8f66cfa2c0 | ||
|
|
a50e32d4ea | ||
|
|
a58e37e31f | ||
|
|
51aed19b29 | ||
|
|
e30569cc1b | ||
|
|
81a79c30cb | ||
|
|
2c0de9ce3d | ||
|
|
3b3dfb6dbe | ||
|
|
143831ffd0 | ||
|
|
6bd573cf07 | ||
|
|
447bc4b4da | ||
|
|
7c038c9329 | ||
|
|
a26d20cbf2 | ||
|
|
52deb7fd86 | ||
|
|
c373d5307f | ||
|
|
1ab0e318f9 | ||
|
|
062ce5f735 | ||
|
|
9b5e59f045 | ||
|
|
8cb8cfe3d5 | ||
|
|
5f4d6daf1b | ||
|
|
bebaf2d97e | ||
|
|
ef85a321bc | ||
|
|
b919d4885c | ||
|
|
f604065246 | ||
|
|
9620da287c | ||
|
|
1ca46893bb | ||
|
|
717861fb46 | ||
|
|
8839fb9af3 | ||
|
|
a3c5f50bbf | ||
|
|
5ea98ab02a | ||
|
|
782e8d5875 | ||
|
|
36abc9b123 | ||
|
|
130f6300c5 | ||
|
|
7bc7cb00ac | ||
|
|
f3ac57e3b6 | ||
|
|
a9c01e891f | ||
|
|
acd8a8dd3c | ||
|
|
f85548abeb | ||
|
|
44776189de | ||
|
|
a929e82060 | ||
|
|
5d0b001764 | ||
|
|
f369f8535d | ||
|
|
35ba74d265 | ||
|
|
c8b9cbe0d5 | ||
|
|
677fb594e8 | ||
|
|
439e872a05 | ||
|
|
d27c482e5e | ||
|
|
34e85dea8b | ||
|
|
6df173ce1d | ||
|
|
e6a1ad0127 | ||
|
|
c3c7e120c8 | ||
|
|
81822dfd1c | ||
|
|
a71a9057a2 | ||
|
|
a1d1a1078b | ||
|
|
e7cd9bbb98 | ||
|
|
908e583c69 | ||
|
|
19ae4eadfb | ||
|
|
485cee56bc | ||
|
|
9e4a236c64 | ||
|
|
3a34a079aa | ||
|
|
03c7504d2b | ||
|
|
7a9b55c21b | ||
|
|
84cb7be079 | ||
|
|
dc26580466 | ||
|
|
1636f0cb25 | ||
|
|
f6a1707684 | ||
|
|
fe55dca661 | ||
|
|
a8159c0391 | ||
|
|
23c386223c | ||
|
|
5959809fed | ||
|
|
e416b55b1a | ||
|
|
91ef686fe0 | ||
|
|
862cd974ff | ||
|
|
11cc6362b5 | ||
|
|
bdabe36029 | ||
|
|
2e237661f8 | ||
|
|
8a09731a52 | ||
|
|
4151361420 | ||
|
|
c0e1ac266c | ||
|
|
6c1f688bf1 | ||
|
|
0393d537de | ||
|
|
82bb8033ec | ||
|
|
106b19a05d | ||
|
|
7ca2f33112 | ||
|
|
ad1937b394 | ||
|
|
0fee6d8b86 | ||
|
|
237727dd62 | ||
|
|
d9184e02f5 | ||
|
|
3b903a7459 | ||
|
|
c72c335b0c | ||
|
|
51eb4e6d6b | ||
|
|
e16a910062 | ||
|
|
6b27ee6a3c | ||
|
|
226046dd16 | ||
|
|
c205ee81f0 | ||
|
|
c57ec1ea79 | ||
|
|
c3045f6a29 | ||
|
|
7ffa70422a | ||
|
|
5655f766f0 | ||
|
|
a2c8e3d87e | ||
|
|
9bcb5ef0c9 | ||
|
|
501c91f035 | ||
|
|
2133b83db4 | ||
|
|
f5e4147502 | ||
|
|
52f37242fc | ||
|
|
d49a61b63e |
3
.gitignore
vendored
3
.gitignore
vendored
@@ -34,8 +34,6 @@ awx/ui_next/coverage/
|
||||
awx/ui_next/build
|
||||
awx/ui_next/.env.local
|
||||
rsyslog.pid
|
||||
/tower-license
|
||||
/tower-license/**
|
||||
tools/prometheus/data
|
||||
tools/docker-compose/Dockerfile
|
||||
|
||||
@@ -147,3 +145,4 @@ use_dev_supervisor.txt
|
||||
.idea/*
|
||||
*.unison.tmp
|
||||
*.#
|
||||
/tools/docker-compose/overrides/
|
||||
|
||||
18
CHANGELOG.md
18
CHANGELOG.md
@@ -2,6 +2,22 @@
|
||||
|
||||
This is a list of high-level changes for each release of AWX. A full list of commits can be found at `https://github.com/ansible/awx/releases/tag/<version>`.
|
||||
|
||||
## 16.0.0 (December 10, 2020)
|
||||
- AWX now ships with a reimagined user interface. **Please read this before upgrading:** https://groups.google.com/g/awx-project/c/KuT5Ao92HWo
|
||||
- Removed support for syncing inventory from Red Hat CloudForms - https://github.com/ansible/awx/commit/0b701b3b2
|
||||
- Removed support for Mercurial-based project updates - https://github.com/ansible/awx/issues/7932
|
||||
- Upgraded NodeJS to actively maintained LTS 14.15.1 - https://github.com/ansible/awx/pull/8766
|
||||
- Added Git-LFS to the default image build - https://github.com/ansible/awx/pull/8700
|
||||
- Added the ability to specify `metadata.labels` in the podspec for container groups - https://github.com/ansible/awx/issues/8486
|
||||
- Added support for Kubernetes pod annotations - https://github.com/ansible/awx/pull/8434
|
||||
- Added the ability to label the web container in local Docker installs - https://github.com/ansible/awx/pull/8449
|
||||
- Added additional metadata (as an extra var) to playbook runs to report the SCM branch name - https://github.com/ansible/awx/pull/8433
|
||||
- Fixed a bug that caused k8s installations to fail due to an incorrect Helm repo - https://github.com/ansible/awx/issues/8715
|
||||
- Fixed a bug that prevented certain Workflow Approval resources from being deleted - https://github.com/ansible/awx/pull/8612
|
||||
- Fixed a bug that prevented the deletion of inventories stuck in "pending deletion" state - https://github.com/ansible/awx/issues/8525
|
||||
- Fixed a display bug in webhook notifications with certain unicode characters - https://github.com/ansible/awx/issues/7400
|
||||
- Improved support for exporting dependent objects (Inventory Hosts and Groups) in the `awx export` CLI tool - https://github.com/ansible/awx/commit/607bc0788
|
||||
|
||||
## 15.0.1 (October 20, 2020)
|
||||
- Added several optimizations to improve performance for a variety of high-load simultaneous job launch use cases https://github.com/ansible/awx/pull/8403
|
||||
- Added the ability to source roles and collections from requirements.yaml files (not just requirements.yml) - https://github.com/ansible/awx/issues/4540
|
||||
@@ -88,7 +104,7 @@ This is a list of high-level changes for each release of AWX. A full list of com
|
||||
- Fixed a bug that caused rsyslogd's configuration file to have world-readable file permissions, potentially leaking secrets (CVE-2020-10782)
|
||||
|
||||
## 12.0.0 (Jun 9, 2020)
|
||||
- Removed memcached as a dependency of AWX (https://github.com/ansible/awx/pull/7240)
|
||||
- Removed memcached as a dependency of AWX (https://github.com/ansible/awx/pull/7240)
|
||||
- Moved to a single container image build instead of separate awx_web and awx_task images. The container image is just `awx` (https://github.com/ansible/awx/pull/7228)
|
||||
- Official AWX container image builds now use a two-stage container build process that notably reduces the size of our published images (https://github.com/ansible/awx/pull/7017)
|
||||
- Removed support for HipChat notifications ([EoL announcement](https://www.atlassian.com/partnerships/slack/faq#faq-98b17ca3-247f-423b-9a78-70a91681eff0)); all previously-created HipChat notification templates will be deleted due to this removal.
|
||||
|
||||
@@ -83,7 +83,7 @@ Before you can run a deployment, you'll need the following installed in your loc
|
||||
- [GNU Make](https://www.gnu.org/software/make/)
|
||||
- [Git](https://git-scm.com/) Requires Version 1.8.4+
|
||||
- Python 3.6+
|
||||
- [Node 10.x LTS version](https://nodejs.org/en/download/)
|
||||
- [Node 14.x LTS version](https://nodejs.org/en/download/)
|
||||
+ This is only required if you're [building your own container images](#official-vs-building-images) with `use_container_for_build=false`
|
||||
- [NPM 6.x LTS](https://docs.npmjs.com/)
|
||||
+ This is only required if you're [building your own container images](#official-vs-building-images) with `use_container_for_build=false`
|
||||
|
||||
@@ -4,8 +4,6 @@ recursive-include awx *.mo
|
||||
recursive-include awx/static *
|
||||
recursive-include awx/templates *.html
|
||||
recursive-include awx/api/templates *.md *.html
|
||||
recursive-include awx/ui/templates *.html
|
||||
recursive-include awx/ui/static *
|
||||
recursive-include awx/ui_next/build *.html
|
||||
recursive-include awx/ui_next/build *
|
||||
recursive-include awx/playbooks *.yml
|
||||
|
||||
139
Makefile
139
Makefile
@@ -56,11 +56,6 @@ WHEEL_COMMAND ?= bdist_wheel
|
||||
SDIST_TAR_FILE ?= $(SDIST_TAR_NAME).tar.gz
|
||||
WHEEL_FILE ?= $(WHEEL_NAME)-py2-none-any.whl
|
||||
|
||||
# UI flag files
|
||||
UI_DEPS_FLAG_FILE = awx/ui/.deps_built
|
||||
UI_RELEASE_DEPS_FLAG_FILE = awx/ui/.release_deps_built
|
||||
UI_RELEASE_FLAG_FILE = awx/ui/.release_built
|
||||
|
||||
I18N_FLAG_FILE = .i18n_built
|
||||
|
||||
.PHONY: awx-link clean clean-tmp clean-venv requirements requirements_dev \
|
||||
@@ -70,22 +65,6 @@ I18N_FLAG_FILE = .i18n_built
|
||||
ui-docker-machine ui-docker ui-release ui-devel \
|
||||
ui-test ui-deps ui-test-ci VERSION
|
||||
|
||||
# remove ui build artifacts
|
||||
clean-ui: clean-languages
|
||||
rm -rf awx/ui/static/
|
||||
rm -rf awx/ui/node_modules/
|
||||
rm -rf awx/ui/test/unit/reports/
|
||||
rm -rf awx/ui/test/spec/reports/
|
||||
rm -rf awx/ui/test/e2e/reports/
|
||||
rm -rf awx/ui/client/languages/
|
||||
rm -rf awx/ui_next/node_modules/
|
||||
rm -rf node_modules
|
||||
rm -rf awx/ui_next/coverage/
|
||||
rm -rf awx/ui_next/build/locales/_build/
|
||||
rm -f $(UI_DEPS_FLAG_FILE)
|
||||
rm -f $(UI_RELEASE_DEPS_FLAG_FILE)
|
||||
rm -f $(UI_RELEASE_FLAG_FILE)
|
||||
|
||||
clean-tmp:
|
||||
rm -rf tmp/
|
||||
|
||||
@@ -480,110 +459,23 @@ else
|
||||
@echo No PO files
|
||||
endif
|
||||
|
||||
# generate UI .pot
|
||||
pot: $(UI_DEPS_FLAG_FILE)
|
||||
$(NPM_BIN) --prefix awx/ui run pot
|
||||
|
||||
# generate django .pot .po
|
||||
LANG = "en-us"
|
||||
messages:
|
||||
@if [ "$(VENV_BASE)" ]; then \
|
||||
. $(VENV_BASE)/awx/bin/activate; \
|
||||
fi; \
|
||||
$(PYTHON) manage.py makemessages -l $(LANG) --keep-pot
|
||||
|
||||
# generate l10n .json .mo
|
||||
languages: $(I18N_FLAG_FILE)
|
||||
|
||||
$(I18N_FLAG_FILE): $(UI_RELEASE_DEPS_FLAG_FILE)
|
||||
$(NPM_BIN) --prefix awx/ui run languages
|
||||
$(PYTHON) tools/scripts/compilemessages.py
|
||||
touch $(I18N_FLAG_FILE)
|
||||
|
||||
# End l10n TASKS
|
||||
# --------------------------------------
|
||||
|
||||
# UI RELEASE TASKS
|
||||
# --------------------------------------
|
||||
ui-release: $(UI_RELEASE_FLAG_FILE)
|
||||
|
||||
$(UI_RELEASE_FLAG_FILE): $(I18N_FLAG_FILE) $(UI_RELEASE_DEPS_FLAG_FILE)
|
||||
$(NPM_BIN) --prefix awx/ui run build-release
|
||||
touch $(UI_RELEASE_FLAG_FILE)
|
||||
|
||||
$(UI_RELEASE_DEPS_FLAG_FILE):
|
||||
PUPPETEER_SKIP_CHROMIUM_DOWNLOAD=1 $(NPM_BIN) --unsafe-perm --prefix awx/ui ci --no-save awx/ui
|
||||
touch $(UI_RELEASE_DEPS_FLAG_FILE)
|
||||
|
||||
# END UI RELEASE TASKS
|
||||
# --------------------------------------
|
||||
|
||||
# UI TASKS
|
||||
# --------------------------------------
|
||||
ui-deps: $(UI_DEPS_FLAG_FILE)
|
||||
|
||||
$(UI_DEPS_FLAG_FILE):
|
||||
@if [ -f ${UI_RELEASE_DEPS_FLAG_FILE} ]; then \
|
||||
rm -rf awx/ui/node_modules; \
|
||||
rm -f ${UI_RELEASE_DEPS_FLAG_FILE}; \
|
||||
fi; \
|
||||
$(NPM_BIN) --unsafe-perm --prefix awx/ui ci --no-save awx/ui
|
||||
touch $(UI_DEPS_FLAG_FILE)
|
||||
|
||||
ui-docker-machine: $(UI_DEPS_FLAG_FILE)
|
||||
$(NPM_BIN) --prefix awx/ui run ui-docker-machine -- $(MAKEFLAGS)
|
||||
|
||||
# Native docker. Builds UI and raises BrowserSync & filesystem polling.
|
||||
ui-docker: $(UI_DEPS_FLAG_FILE)
|
||||
$(NPM_BIN) --prefix awx/ui run ui-docker -- $(MAKEFLAGS)
|
||||
|
||||
# Builds UI with development UI without raising browser-sync or filesystem polling.
|
||||
ui-devel: $(UI_DEPS_FLAG_FILE)
|
||||
$(NPM_BIN) --prefix awx/ui run build-devel -- $(MAKEFLAGS)
|
||||
|
||||
ui-test: $(UI_DEPS_FLAG_FILE)
|
||||
$(NPM_BIN) --prefix awx/ui run test
|
||||
|
||||
ui-lint: $(UI_DEPS_FLAG_FILE)
|
||||
$(NPM_BIN) run --prefix awx/ui jshint
|
||||
$(NPM_BIN) run --prefix awx/ui lint
|
||||
|
||||
# A standard go-to target for API developers to use building the frontend
|
||||
ui: clean-ui ui-devel
|
||||
|
||||
ui-test-ci: $(UI_DEPS_FLAG_FILE)
|
||||
$(NPM_BIN) --prefix awx/ui run test:ci
|
||||
$(NPM_BIN) --prefix awx/ui run unit
|
||||
|
||||
jshint: $(UI_DEPS_FLAG_FILE)
|
||||
$(NPM_BIN) run --prefix awx/ui jshint
|
||||
$(NPM_BIN) run --prefix awx/ui lint
|
||||
|
||||
ui-zuul-lint-and-test:
|
||||
CHROMIUM_BIN=$(CHROMIUM_BIN) ./awx/ui/build/zuul_download_chromium.sh
|
||||
CHROMIUM_BIN=$(CHROMIUM_BIN) PUPPETEER_SKIP_CHROMIUM_DOWNLOAD=1 $(NPM_BIN) --unsafe-perm --prefix awx/ui ci --no-save awx/ui
|
||||
CHROMIUM_BIN=$(CHROMIUM_BIN) $(NPM_BIN) run --prefix awx/ui jshint
|
||||
CHROMIUM_BIN=$(CHROMIUM_BIN) $(NPM_BIN) run --prefix awx/ui lint
|
||||
CHROME_BIN=$(CHROMIUM_BIN) $(NPM_BIN) --prefix awx/ui run test:ci
|
||||
CHROME_BIN=$(CHROMIUM_BIN) $(NPM_BIN) --prefix awx/ui run unit
|
||||
|
||||
# END UI TASKS
|
||||
# --------------------------------------
|
||||
|
||||
# UI NEXT TASKS
|
||||
# --------------------------------------
|
||||
|
||||
awx/ui_next/node_modules:
|
||||
$(NPM_BIN) --prefix awx/ui_next install
|
||||
|
||||
ui-release-next:
|
||||
mkdir -p awx/ui_next/build/static
|
||||
touch awx/ui_next/build/static/.placeholder
|
||||
clean-ui:
|
||||
rm -rf node_modules
|
||||
rm -rf awx/ui_next/node_modules
|
||||
rm -rf awx/ui_next/build
|
||||
|
||||
ui-devel-next: awx/ui_next/node_modules
|
||||
ui-release: ui-devel
|
||||
ui-devel: awx/ui_next/node_modules
|
||||
$(NPM_BIN) --prefix awx/ui_next run extract-strings
|
||||
$(NPM_BIN) --prefix awx/ui_next run compile-strings
|
||||
$(NPM_BIN) --prefix awx/ui_next run build
|
||||
git checkout awx/ui_next/src/locales
|
||||
mkdir -p awx/public/static/css
|
||||
mkdir -p awx/public/static/js
|
||||
mkdir -p awx/public/static/media
|
||||
@@ -591,19 +483,12 @@ ui-devel-next: awx/ui_next/node_modules
|
||||
cp -r awx/ui_next/build/static/js/* awx/public/static/js
|
||||
cp -r awx/ui_next/build/static/media/* awx/public/static/media
|
||||
|
||||
clean-ui-next:
|
||||
rm -rf node_modules
|
||||
rm -rf awx/ui_next/node_modules
|
||||
rm -rf awx/ui_next/build
|
||||
|
||||
ui-next-zuul-lint-and-test:
|
||||
ui-zuul-lint-and-test:
|
||||
$(NPM_BIN) --prefix awx/ui_next install
|
||||
$(NPM_BIN) run --prefix awx/ui_next lint
|
||||
$(NPM_BIN) run --prefix awx/ui_next prettier-check
|
||||
$(NPM_BIN) run --prefix awx/ui_next test
|
||||
|
||||
# END UI NEXT TASKS
|
||||
# --------------------------------------
|
||||
|
||||
# Build a pip-installable package into dist/ with a timestamped version number.
|
||||
dev_build:
|
||||
@@ -613,10 +498,10 @@ dev_build:
|
||||
release_build:
|
||||
$(PYTHON) setup.py release_build
|
||||
|
||||
dist/$(SDIST_TAR_FILE): ui-release ui-release-next VERSION
|
||||
dist/$(SDIST_TAR_FILE): ui-release VERSION
|
||||
$(PYTHON) setup.py $(SDIST_COMMAND)
|
||||
|
||||
dist/$(WHEEL_FILE): ui-release ui-release-next
|
||||
dist/$(WHEEL_FILE): ui-release
|
||||
$(PYTHON) setup.py $(WHEEL_COMMAND)
|
||||
|
||||
sdist: dist/$(SDIST_TAR_FILE)
|
||||
@@ -650,9 +535,11 @@ awx/projects:
|
||||
docker-compose-isolated: awx/projects
|
||||
CURRENT_UID=$(shell id -u) TAG=$(COMPOSE_TAG) DEV_DOCKER_TAG_BASE=$(DEV_DOCKER_TAG_BASE) docker-compose -f tools/docker-compose.yml -f tools/docker-isolated-override.yml up
|
||||
|
||||
COMPOSE_UP_OPTS ?=
|
||||
|
||||
# Docker Compose Development environment
|
||||
docker-compose: docker-auth awx/projects
|
||||
CURRENT_UID=$(shell id -u) OS="$(shell docker info | grep 'Operating System')" TAG=$(COMPOSE_TAG) DEV_DOCKER_TAG_BASE=$(DEV_DOCKER_TAG_BASE) docker-compose -f tools/docker-compose.yml up --no-recreate awx
|
||||
CURRENT_UID=$(shell id -u) OS="$(shell docker info | grep 'Operating System')" TAG=$(COMPOSE_TAG) DEV_DOCKER_TAG_BASE=$(DEV_DOCKER_TAG_BASE) docker-compose -f tools/docker-compose.yml $(COMPOSE_UP_OPTS) up --no-recreate awx
|
||||
|
||||
docker-compose-cluster: docker-auth awx/projects
|
||||
CURRENT_UID=$(shell id -u) TAG=$(COMPOSE_TAG) DEV_DOCKER_TAG_BASE=$(DEV_DOCKER_TAG_BASE) docker-compose -f tools/docker-compose-cluster.yml up
|
||||
|
||||
@@ -47,8 +47,6 @@ from awx.main.utils import (
|
||||
get_object_or_400,
|
||||
decrypt_field,
|
||||
get_awx_version,
|
||||
get_licenser,
|
||||
StubLicense
|
||||
)
|
||||
from awx.main.utils.db import get_all_field_names
|
||||
from awx.main.views import ApiErrorView
|
||||
@@ -189,7 +187,8 @@ class APIView(views.APIView):
|
||||
'''
|
||||
Log warning for 400 requests. Add header with elapsed time.
|
||||
'''
|
||||
|
||||
from awx.main.utils import get_licenser
|
||||
from awx.main.utils.licensing import OpenLicense
|
||||
#
|
||||
# If the URL was rewritten, and we get a 404, we should entirely
|
||||
# replace the view in the request context with an ApiErrorView()
|
||||
@@ -225,7 +224,8 @@ class APIView(views.APIView):
|
||||
response = super(APIView, self).finalize_response(request, response, *args, **kwargs)
|
||||
time_started = getattr(self, 'time_started', None)
|
||||
response['X-API-Product-Version'] = get_awx_version()
|
||||
response['X-API-Product-Name'] = 'AWX' if isinstance(get_licenser(), StubLicense) else 'Red Hat Ansible Tower'
|
||||
response['X-API-Product-Name'] = 'AWX' if isinstance(get_licenser(), OpenLicense) else 'Red Hat Ansible Tower'
|
||||
|
||||
response['X-API-Node'] = settings.CLUSTER_HOST_ID
|
||||
if time_started:
|
||||
time_elapsed = time.time() - self.time_started
|
||||
|
||||
@@ -453,7 +453,7 @@ class BaseSerializer(serializers.ModelSerializer, metaclass=BaseSerializerMetacl
|
||||
if 'capability_map' not in self.context:
|
||||
if hasattr(self, 'polymorphic_base'):
|
||||
model = self.polymorphic_base.Meta.model
|
||||
prefetch_list = self.polymorphic_base._capabilities_prefetch
|
||||
prefetch_list = self.polymorphic_base.capabilities_prefetch
|
||||
else:
|
||||
model = self.Meta.model
|
||||
prefetch_list = self.capabilities_prefetch
|
||||
@@ -640,12 +640,9 @@ class EmptySerializer(serializers.Serializer):
|
||||
|
||||
|
||||
class UnifiedJobTemplateSerializer(BaseSerializer):
|
||||
# As a base serializer, the capabilities prefetch is not used directly
|
||||
_capabilities_prefetch = [
|
||||
'admin', 'execute',
|
||||
{'copy': ['jobtemplate.project.use', 'jobtemplate.inventory.use',
|
||||
'organization.workflow_admin']}
|
||||
]
|
||||
# As a base serializer, the capabilities prefetch is not used directly,
|
||||
# instead they are derived from the Workflow Job Template Serializer and the Job Template Serializer, respectively.
|
||||
capabilities_prefetch = []
|
||||
|
||||
class Meta:
|
||||
model = UnifiedJobTemplate
|
||||
@@ -695,7 +692,7 @@ class UnifiedJobTemplateSerializer(BaseSerializer):
|
||||
serializer.polymorphic_base = self
|
||||
# capabilities prefetch is only valid for these models
|
||||
if isinstance(obj, (JobTemplate, WorkflowJobTemplate)):
|
||||
serializer.capabilities_prefetch = self._capabilities_prefetch
|
||||
serializer.capabilities_prefetch = serializer_class.capabilities_prefetch
|
||||
else:
|
||||
serializer.capabilities_prefetch = None
|
||||
return serializer.to_representation(obj)
|
||||
@@ -1333,6 +1330,8 @@ class ProjectOptionsSerializer(BaseSerializer):
|
||||
scm_type = attrs.get('scm_type', u'') or u''
|
||||
if self.instance and not scm_type:
|
||||
valid_local_paths.append(self.instance.local_path)
|
||||
if self.instance and scm_type and "local_path" in attrs and self.instance.local_path != attrs['local_path']:
|
||||
errors['local_path'] = _(f'Cannot change local_path for {scm_type}-based projects')
|
||||
if scm_type:
|
||||
attrs.pop('local_path', None)
|
||||
if 'local_path' in attrs and attrs['local_path'] not in valid_local_paths:
|
||||
@@ -1749,7 +1748,7 @@ class HostSerializer(BaseSerializerWithVariables):
|
||||
attrs['variables'] = json.dumps(vars_dict)
|
||||
if Group.objects.filter(name=name, inventory=inventory).exists():
|
||||
raise serializers.ValidationError(_('A Group with that name already exists.'))
|
||||
|
||||
|
||||
return super(HostSerializer, self).validate(attrs)
|
||||
|
||||
def to_representation(self, obj):
|
||||
@@ -3946,12 +3945,12 @@ class ProjectUpdateEventSerializer(JobEventSerializer):
|
||||
return UriCleaner.remove_sensitive(obj.stdout)
|
||||
|
||||
def get_event_data(self, obj):
|
||||
# the project update playbook uses the git, hg, or svn modules
|
||||
# the project update playbook uses the git or svn modules
|
||||
# to clone repositories, and those modules are prone to printing
|
||||
# raw SCM URLs in their stdout (which *could* contain passwords)
|
||||
# attempt to detect and filter HTTP basic auth passwords in the stdout
|
||||
# of these types of events
|
||||
if obj.event_data.get('task_action') in ('git', 'hg', 'svn'):
|
||||
if obj.event_data.get('task_action') in ('git', 'svn'):
|
||||
try:
|
||||
return json.loads(
|
||||
UriCleaner.remove_sensitive(
|
||||
|
||||
@@ -4,7 +4,6 @@ The following lists the expected format and details of our rrules:
|
||||
* DTSTART is expected to be in UTC
|
||||
* INTERVAL is required
|
||||
* SECONDLY is not supported
|
||||
* TZID is not supported
|
||||
* RRULE must precede the rule statements
|
||||
* BYDAY is supported but not BYDAY with a numerical prefix
|
||||
* BYYEARDAY and BYWEEKNO are not supported
|
||||
|
||||
@@ -8,7 +8,7 @@ The `period` of the data can be adjusted with:
|
||||
|
||||
?period=month
|
||||
|
||||
Where `month` can be replaced with `week`, or `day`. `month` is the default.
|
||||
Where `month` can be replaced with `week`, `two_weeks`, or `day`. `month` is the default.
|
||||
|
||||
The type of job can be filtered with:
|
||||
|
||||
|
||||
@@ -15,6 +15,7 @@ from awx.api.views import (
|
||||
ApiV2PingView,
|
||||
ApiV2ConfigView,
|
||||
ApiV2SubscriptionView,
|
||||
ApiV2AttachView,
|
||||
AuthView,
|
||||
UserMeList,
|
||||
DashboardView,
|
||||
@@ -94,6 +95,7 @@ v2_urls = [
|
||||
url(r'^ping/$', ApiV2PingView.as_view(), name='api_v2_ping_view'),
|
||||
url(r'^config/$', ApiV2ConfigView.as_view(), name='api_v2_config_view'),
|
||||
url(r'^config/subscriptions/$', ApiV2SubscriptionView.as_view(), name='api_v2_subscription_view'),
|
||||
url(r'^config/attach/$', ApiV2AttachView.as_view(), name='api_v2_attach_view'),
|
||||
url(r'^auth/$', AuthView.as_view()),
|
||||
url(r'^me/$', UserMeList.as_view(), name='user_me_list'),
|
||||
url(r'^dashboard/$', DashboardView.as_view(), name='dashboard_view'),
|
||||
|
||||
@@ -153,6 +153,7 @@ from awx.api.views.root import ( # noqa
|
||||
ApiV2PingView,
|
||||
ApiV2ConfigView,
|
||||
ApiV2SubscriptionView,
|
||||
ApiV2AttachView,
|
||||
)
|
||||
from awx.api.views.webhooks import ( # noqa
|
||||
WebhookKeyView,
|
||||
@@ -241,8 +242,6 @@ class DashboardView(APIView):
|
||||
git_failed_projects = git_projects.filter(last_job_failed=True)
|
||||
svn_projects = user_projects.filter(scm_type='svn')
|
||||
svn_failed_projects = svn_projects.filter(last_job_failed=True)
|
||||
hg_projects = user_projects.filter(scm_type='hg')
|
||||
hg_failed_projects = hg_projects.filter(last_job_failed=True)
|
||||
archive_projects = user_projects.filter(scm_type='archive')
|
||||
archive_failed_projects = archive_projects.filter(last_job_failed=True)
|
||||
data['scm_types'] = {}
|
||||
@@ -256,11 +255,6 @@ class DashboardView(APIView):
|
||||
'failures_url': reverse('api:project_list', request=request) + "?scm_type=svn&last_job_failed=True",
|
||||
'total': svn_projects.count(),
|
||||
'failed': svn_failed_projects.count()}
|
||||
data['scm_types']['hg'] = {'url': reverse('api:project_list', request=request) + "?scm_type=hg",
|
||||
'label': 'Mercurial',
|
||||
'failures_url': reverse('api:project_list', request=request) + "?scm_type=hg&last_job_failed=True",
|
||||
'total': hg_projects.count(),
|
||||
'failed': hg_failed_projects.count()}
|
||||
data['scm_types']['archive'] = {'url': reverse('api:project_list', request=request) + "?scm_type=archive",
|
||||
'label': 'Remote Archive',
|
||||
'failures_url': reverse('api:project_list', request=request) + "?scm_type=archive&last_job_failed=True",
|
||||
@@ -316,6 +310,9 @@ class DashboardJobsGraphView(APIView):
|
||||
if period == 'month':
|
||||
end_date = start_date - dateutil.relativedelta.relativedelta(months=1)
|
||||
interval = 'days'
|
||||
elif period == 'two_weeks':
|
||||
end_date = start_date - dateutil.relativedelta.relativedelta(weeks=2)
|
||||
interval = 'days'
|
||||
elif period == 'week':
|
||||
end_date = start_date - dateutil.relativedelta.relativedelta(weeks=1)
|
||||
interval = 'days'
|
||||
@@ -4253,7 +4250,9 @@ class NotificationTemplateDetail(RetrieveUpdateDestroyAPIView):
|
||||
obj = self.get_object()
|
||||
if not request.user.can_access(self.model, 'delete', obj):
|
||||
return Response(status=status.HTTP_404_NOT_FOUND)
|
||||
if obj.notifications.filter(status='pending').exists():
|
||||
|
||||
hours_old = now() - dateutil.relativedelta.relativedelta(hours=8)
|
||||
if obj.notifications.filter(status='pending', created__gt=hours_old).exists():
|
||||
return Response({"error": _("Delete not allowed while there are pending notifications")},
|
||||
status=status.HTTP_405_METHOD_NOT_ALLOWED)
|
||||
return super(NotificationTemplateDetail, self).delete(request, *args, **kwargs)
|
||||
|
||||
@@ -1,9 +1,10 @@
|
||||
# Copyright (c) 2018 Ansible, Inc.
|
||||
# All Rights Reserved.
|
||||
|
||||
import base64
|
||||
import json
|
||||
import logging
|
||||
import operator
|
||||
import json
|
||||
from collections import OrderedDict
|
||||
|
||||
from django.conf import settings
|
||||
@@ -29,8 +30,8 @@ from awx.main.utils import (
|
||||
get_custom_venv_choices,
|
||||
to_python_boolean,
|
||||
)
|
||||
from awx.main.utils.licensing import validate_entitlement_manifest
|
||||
from awx.api.versioning import reverse, drf_reverse
|
||||
from awx.conf.license import get_license
|
||||
from awx.main.constants import PRIVILEGE_ESCALATION_METHODS
|
||||
from awx.main.models import (
|
||||
Project,
|
||||
@@ -178,7 +179,7 @@ class ApiV2PingView(APIView):
|
||||
class ApiV2SubscriptionView(APIView):
|
||||
|
||||
permission_classes = (IsAuthenticated,)
|
||||
name = _('Configuration')
|
||||
name = _('Subscriptions')
|
||||
swagger_topic = 'System Configuration'
|
||||
|
||||
def check_permissions(self, request):
|
||||
@@ -189,18 +190,18 @@ class ApiV2SubscriptionView(APIView):
|
||||
def post(self, request):
|
||||
from awx.main.utils.common import get_licenser
|
||||
data = request.data.copy()
|
||||
if data.get('rh_password') == '$encrypted$':
|
||||
data['rh_password'] = settings.REDHAT_PASSWORD
|
||||
if data.get('subscriptions_password') == '$encrypted$':
|
||||
data['subscriptions_password'] = settings.SUBSCRIPTIONS_PASSWORD
|
||||
try:
|
||||
user, pw = data.get('rh_username'), data.get('rh_password')
|
||||
user, pw = data.get('subscriptions_username'), data.get('subscriptions_password')
|
||||
with set_environ(**settings.AWX_TASK_ENV):
|
||||
validated = get_licenser().validate_rh(user, pw)
|
||||
if user:
|
||||
settings.REDHAT_USERNAME = data['rh_username']
|
||||
settings.SUBSCRIPTIONS_USERNAME = data['subscriptions_username']
|
||||
if pw:
|
||||
settings.REDHAT_PASSWORD = data['rh_password']
|
||||
settings.SUBSCRIPTIONS_PASSWORD = data['subscriptions_password']
|
||||
except Exception as exc:
|
||||
msg = _("Invalid License")
|
||||
msg = _("Invalid Subscription")
|
||||
if (
|
||||
isinstance(exc, requests.exceptions.HTTPError) and
|
||||
getattr(getattr(exc, 'response', None), 'status_code', None) == 401
|
||||
@@ -213,13 +214,63 @@ class ApiV2SubscriptionView(APIView):
|
||||
elif isinstance(exc, (ValueError, OSError)) and exc.args:
|
||||
msg = exc.args[0]
|
||||
else:
|
||||
logger.exception(smart_text(u"Invalid license submitted."),
|
||||
logger.exception(smart_text(u"Invalid subscription submitted."),
|
||||
extra=dict(actor=request.user.username))
|
||||
return Response({"error": msg}, status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
return Response(validated)
|
||||
|
||||
|
||||
class ApiV2AttachView(APIView):
|
||||
|
||||
permission_classes = (IsAuthenticated,)
|
||||
name = _('Attach Subscription')
|
||||
swagger_topic = 'System Configuration'
|
||||
|
||||
def check_permissions(self, request):
|
||||
super(ApiV2AttachView, self).check_permissions(request)
|
||||
if not request.user.is_superuser and request.method.lower() not in {'options', 'head'}:
|
||||
self.permission_denied(request) # Raises PermissionDenied exception.
|
||||
|
||||
def post(self, request):
|
||||
data = request.data.copy()
|
||||
pool_id = data.get('pool_id', None)
|
||||
if not pool_id:
|
||||
return Response({"error": _("No subscription pool ID provided.")}, status=status.HTTP_400_BAD_REQUEST)
|
||||
user = getattr(settings, 'SUBSCRIPTIONS_USERNAME', None)
|
||||
pw = getattr(settings, 'SUBSCRIPTIONS_PASSWORD', None)
|
||||
if pool_id and user and pw:
|
||||
from awx.main.utils.common import get_licenser
|
||||
data = request.data.copy()
|
||||
try:
|
||||
with set_environ(**settings.AWX_TASK_ENV):
|
||||
validated = get_licenser().validate_rh(user, pw)
|
||||
except Exception as exc:
|
||||
msg = _("Invalid Subscription")
|
||||
if (
|
||||
isinstance(exc, requests.exceptions.HTTPError) and
|
||||
getattr(getattr(exc, 'response', None), 'status_code', None) == 401
|
||||
):
|
||||
msg = _("The provided credentials are invalid (HTTP 401).")
|
||||
elif isinstance(exc, requests.exceptions.ProxyError):
|
||||
msg = _("Unable to connect to proxy server.")
|
||||
elif isinstance(exc, requests.exceptions.ConnectionError):
|
||||
msg = _("Could not connect to subscription service.")
|
||||
elif isinstance(exc, (ValueError, OSError)) and exc.args:
|
||||
msg = exc.args[0]
|
||||
else:
|
||||
logger.exception(smart_text(u"Invalid subscription submitted."),
|
||||
extra=dict(actor=request.user.username))
|
||||
return Response({"error": msg}, status=status.HTTP_400_BAD_REQUEST)
|
||||
for sub in validated:
|
||||
if sub['pool_id'] == pool_id:
|
||||
sub['valid_key'] = True
|
||||
settings.LICENSE = sub
|
||||
return Response(sub)
|
||||
|
||||
return Response({"error": _("Error processing subscription metadata.")}, status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
|
||||
class ApiV2ConfigView(APIView):
|
||||
|
||||
permission_classes = (IsAuthenticated,)
|
||||
@@ -234,15 +285,11 @@ class ApiV2ConfigView(APIView):
|
||||
def get(self, request, format=None):
|
||||
'''Return various sitewide configuration settings'''
|
||||
|
||||
if request.user.is_superuser or request.user.is_system_auditor:
|
||||
license_data = get_license(show_key=True)
|
||||
else:
|
||||
license_data = get_license(show_key=False)
|
||||
from awx.main.utils.common import get_licenser
|
||||
license_data = get_licenser().validate()
|
||||
|
||||
if not license_data.get('valid_key', False):
|
||||
license_data = {}
|
||||
if license_data and 'features' in license_data and 'activity_streams' in license_data['features']:
|
||||
# FIXME: Make the final setting value dependent on the feature?
|
||||
license_data['features']['activity_streams'] &= settings.ACTIVITY_STREAM_ENABLED
|
||||
|
||||
pendo_state = settings.PENDO_TRACKING_STATE if settings.PENDO_TRACKING_STATE in ('off', 'anonymous', 'detailed') else 'off'
|
||||
|
||||
@@ -281,9 +328,10 @@ class ApiV2ConfigView(APIView):
|
||||
|
||||
return Response(data)
|
||||
|
||||
|
||||
def post(self, request):
|
||||
if not isinstance(request.data, dict):
|
||||
return Response({"error": _("Invalid license data")}, status=status.HTTP_400_BAD_REQUEST)
|
||||
return Response({"error": _("Invalid subscription data")}, status=status.HTTP_400_BAD_REQUEST)
|
||||
if "eula_accepted" not in request.data:
|
||||
return Response({"error": _("Missing 'eula_accepted' property")}, status=status.HTTP_400_BAD_REQUEST)
|
||||
try:
|
||||
@@ -300,25 +348,47 @@ class ApiV2ConfigView(APIView):
|
||||
logger.info(smart_text(u"Invalid JSON submitted for license."),
|
||||
extra=dict(actor=request.user.username))
|
||||
return Response({"error": _("Invalid JSON")}, status=status.HTTP_400_BAD_REQUEST)
|
||||
try:
|
||||
from awx.main.utils.common import get_licenser
|
||||
license_data = json.loads(data_actual)
|
||||
license_data_validated = get_licenser(**license_data).validate()
|
||||
except Exception:
|
||||
logger.warning(smart_text(u"Invalid license submitted."),
|
||||
extra=dict(actor=request.user.username))
|
||||
return Response({"error": _("Invalid License")}, status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
from awx.main.utils.common import get_licenser
|
||||
license_data = json.loads(data_actual)
|
||||
if 'license_key' in license_data:
|
||||
return Response({"error": _('Legacy license submitted. A subscription manifest is now required.')}, status=status.HTTP_400_BAD_REQUEST)
|
||||
if 'manifest' in license_data:
|
||||
try:
|
||||
json_actual = json.loads(base64.b64decode(license_data['manifest']))
|
||||
if 'license_key' in json_actual:
|
||||
return Response(
|
||||
{"error": _('Legacy license submitted. A subscription manifest is now required.')},
|
||||
status=status.HTTP_400_BAD_REQUEST
|
||||
)
|
||||
except Exception:
|
||||
pass
|
||||
try:
|
||||
license_data = validate_entitlement_manifest(license_data['manifest'])
|
||||
except ValueError as e:
|
||||
return Response({"error": str(e)}, status=status.HTTP_400_BAD_REQUEST)
|
||||
except Exception:
|
||||
logger.exception('Invalid manifest submitted. {}')
|
||||
return Response({"error": _('Invalid manifest submitted.')}, status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
try:
|
||||
license_data_validated = get_licenser().license_from_manifest(license_data)
|
||||
except Exception:
|
||||
logger.warning(smart_text(u"Invalid subscription submitted."),
|
||||
extra=dict(actor=request.user.username))
|
||||
return Response({"error": _("Invalid License")}, status=status.HTTP_400_BAD_REQUEST)
|
||||
else:
|
||||
license_data_validated = get_licenser().validate()
|
||||
|
||||
# If the license is valid, write it to the database.
|
||||
if license_data_validated['valid_key']:
|
||||
settings.LICENSE = license_data
|
||||
if not settings_registry.is_setting_read_only('TOWER_URL_BASE'):
|
||||
settings.TOWER_URL_BASE = "{}://{}".format(request.scheme, request.get_host())
|
||||
return Response(license_data_validated)
|
||||
|
||||
logger.warning(smart_text(u"Invalid license submitted."),
|
||||
logger.warning(smart_text(u"Invalid subscription submitted."),
|
||||
extra=dict(actor=request.user.username))
|
||||
return Response({"error": _("Invalid license")}, status=status.HTTP_400_BAD_REQUEST)
|
||||
return Response({"error": _("Invalid subscription")}, status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
def delete(self, request):
|
||||
try:
|
||||
|
||||
@@ -25,10 +25,12 @@ if MODE == 'production':
|
||||
try:
|
||||
fd = open("/var/lib/awx/.tower_version", "r")
|
||||
if fd.read().strip() != tower_version:
|
||||
raise Exception()
|
||||
except Exception:
|
||||
raise ValueError()
|
||||
except FileNotFoundError:
|
||||
pass
|
||||
except ValueError as e:
|
||||
logger.error("Missing or incorrect metadata for Tower version. Ensure Tower was installed using the setup playbook.")
|
||||
raise Exception("Missing or incorrect metadata for Tower version. Ensure Tower was installed using the setup playbook.")
|
||||
raise Exception("Missing or incorrect metadata for Tower version. Ensure Tower was installed using the setup playbook.") from e
|
||||
|
||||
|
||||
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "awx.settings")
|
||||
|
||||
@@ -1,18 +1,14 @@
|
||||
# Copyright (c) 2016 Ansible, Inc.
|
||||
# All Rights Reserved.
|
||||
|
||||
|
||||
__all__ = ['get_license']
|
||||
|
||||
|
||||
def _get_validated_license_data():
|
||||
from awx.main.utils.common import get_licenser
|
||||
from awx.main.utils import get_licenser
|
||||
return get_licenser().validate()
|
||||
|
||||
|
||||
def get_license(show_key=False):
|
||||
def get_license():
|
||||
"""Return a dictionary representing the active license on this Tower instance."""
|
||||
license_data = _get_validated_license_data()
|
||||
if not show_key:
|
||||
license_data.pop('license_key', None)
|
||||
return license_data
|
||||
return _get_validated_license_data()
|
||||
|
||||
26
awx/conf/migrations/0008_subscriptions.py
Normal file
26
awx/conf/migrations/0008_subscriptions.py
Normal file
@@ -0,0 +1,26 @@
|
||||
# Generated by Django 2.2.11 on 2020-08-04 15:19
|
||||
|
||||
import logging
|
||||
|
||||
from django.db import migrations
|
||||
|
||||
from awx.conf.migrations._subscriptions import clear_old_license, prefill_rh_credentials
|
||||
|
||||
logger = logging.getLogger('awx.conf.migrations')
|
||||
|
||||
|
||||
def _noop(apps, schema_editor):
|
||||
pass
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('conf', '0007_v380_rename_more_settings'),
|
||||
]
|
||||
|
||||
|
||||
operations = [
|
||||
migrations.RunPython(clear_old_license, _noop),
|
||||
migrations.RunPython(prefill_rh_credentials, _noop)
|
||||
]
|
||||
34
awx/conf/migrations/_subscriptions.py
Normal file
34
awx/conf/migrations/_subscriptions.py
Normal file
@@ -0,0 +1,34 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
import logging
|
||||
from django.utils.timezone import now
|
||||
from awx.main.utils.encryption import decrypt_field, encrypt_field
|
||||
|
||||
logger = logging.getLogger('awx.conf.settings')
|
||||
|
||||
__all__ = ['clear_old_license', 'prefill_rh_credentials']
|
||||
|
||||
|
||||
def clear_old_license(apps, schema_editor):
|
||||
Setting = apps.get_model('conf', 'Setting')
|
||||
Setting.objects.filter(key='LICENSE').delete()
|
||||
|
||||
|
||||
def _migrate_setting(apps, old_key, new_key, encrypted=False):
|
||||
Setting = apps.get_model('conf', 'Setting')
|
||||
if not Setting.objects.filter(key=old_key).exists():
|
||||
return
|
||||
new_setting = Setting.objects.create(key=new_key,
|
||||
created=now(),
|
||||
modified=now()
|
||||
)
|
||||
if encrypted:
|
||||
new_setting.value = decrypt_field(Setting.objects.filter(key=old_key).first(), 'value')
|
||||
new_setting.value = encrypt_field(new_setting, 'value')
|
||||
else:
|
||||
new_setting.value = getattr(Setting.objects.filter(key=old_key).first(), 'value')
|
||||
new_setting.save()
|
||||
|
||||
|
||||
def prefill_rh_credentials(apps, schema_editor):
|
||||
_migrate_setting(apps, 'REDHAT_USERNAME', 'SUBSCRIPTIONS_USERNAME', encrypted=False)
|
||||
_migrate_setting(apps, 'REDHAT_PASSWORD', 'SUBSCRIPTIONS_PASSWORD', encrypted=True)
|
||||
@@ -78,14 +78,6 @@ class Setting(CreatedModifiedModel):
|
||||
def get_cache_id_key(self, key):
|
||||
return '{}_ID'.format(key)
|
||||
|
||||
def display_value(self):
|
||||
if self.key == 'LICENSE' and 'license_key' in self.value:
|
||||
# don't log the license key in activity stream
|
||||
value = self.value.copy()
|
||||
value['license_key'] = '********'
|
||||
return value
|
||||
return self.value
|
||||
|
||||
|
||||
import awx.conf.signals # noqa
|
||||
|
||||
|
||||
@@ -333,14 +333,14 @@ class BaseAccess(object):
|
||||
report_violation(_("License has expired."))
|
||||
|
||||
free_instances = validation_info.get('free_instances', 0)
|
||||
available_instances = validation_info.get('available_instances', 0)
|
||||
instance_count = validation_info.get('instance_count', 0)
|
||||
|
||||
if add_host_name:
|
||||
host_exists = Host.objects.filter(name=add_host_name).exists()
|
||||
if not host_exists and free_instances == 0:
|
||||
report_violation(_("License count of %s instances has been reached.") % available_instances)
|
||||
report_violation(_("License count of %s instances has been reached.") % instance_count)
|
||||
elif not host_exists and free_instances < 0:
|
||||
report_violation(_("License count of %s instances has been exceeded.") % available_instances)
|
||||
report_violation(_("License count of %s instances has been exceeded.") % instance_count)
|
||||
elif not add_host_name and free_instances < 0:
|
||||
report_violation(_("Host count exceeds available instances."))
|
||||
|
||||
|
||||
@@ -33,9 +33,9 @@ data _since_ the last report date - i.e., new data in the last 24 hours)
|
||||
'''
|
||||
|
||||
|
||||
@register('config', '1.1', description=_('General platform configuration.'))
|
||||
@register('config', '1.2', description=_('General platform configuration.'))
|
||||
def config(since, **kwargs):
|
||||
license_info = get_license(show_key=False)
|
||||
license_info = get_license()
|
||||
install_type = 'traditional'
|
||||
if os.environ.get('container') == 'oci':
|
||||
install_type = 'openshift'
|
||||
@@ -194,7 +194,6 @@ def instance_info(since, include_hostnames=False, **kwargs):
|
||||
return info
|
||||
|
||||
|
||||
@register('job_counts', '1.0', description=_('Counts of jobs by status'))
|
||||
def job_counts(since, **kwargs):
|
||||
counts = {}
|
||||
counts['total_jobs'] = models.UnifiedJob.objects.exclude(launch_type='sync').count()
|
||||
@@ -204,7 +203,6 @@ def job_counts(since, **kwargs):
|
||||
return counts
|
||||
|
||||
|
||||
@register('job_instance_counts', '1.0', description=_('Counts of jobs by execution node'))
|
||||
def job_instance_counts(since, **kwargs):
|
||||
counts = {}
|
||||
job_types = models.UnifiedJob.objects.exclude(launch_type='sync').values_list(
|
||||
@@ -282,14 +280,16 @@ def _copy_table(table, query, path):
|
||||
return file.file_list()
|
||||
|
||||
|
||||
@register('events_table', '1.1', format='csv', description=_('Automation task records'), expensive=True)
|
||||
@register('events_table', '1.2', format='csv', description=_('Automation task records'), expensive=True)
|
||||
def events_table(since, full_path, until, **kwargs):
|
||||
events_query = '''COPY (SELECT main_jobevent.id,
|
||||
main_jobevent.created,
|
||||
main_jobevent.modified,
|
||||
main_jobevent.uuid,
|
||||
main_jobevent.parent_uuid,
|
||||
main_jobevent.event,
|
||||
main_jobevent.event_data::json->'task_action' AS task_action,
|
||||
(CASE WHEN event = 'playbook_on_stats' THEN event_data END) as playbook_on_stats,
|
||||
main_jobevent.failed,
|
||||
main_jobevent.changed,
|
||||
main_jobevent.playbook,
|
||||
|
||||
@@ -24,7 +24,7 @@ logger = logging.getLogger('awx.main.analytics')
|
||||
|
||||
def _valid_license():
|
||||
try:
|
||||
if get_license(show_key=False).get('license_type', 'UNLICENSED') == 'open':
|
||||
if get_license().get('license_type', 'UNLICENSED') == 'open':
|
||||
return False
|
||||
access_registry[Job](None).check_license()
|
||||
except PermissionDenied:
|
||||
@@ -68,7 +68,7 @@ def register(key, version, description=None, format='json', expensive=False):
|
||||
|
||||
@register('projects_by_scm_type', 1)
|
||||
def projects_by_scm_type():
|
||||
return {'git': 5, 'svn': 1, 'hg': 0}
|
||||
return {'git': 5, 'svn': 1}
|
||||
"""
|
||||
|
||||
def decorate(f):
|
||||
@@ -102,7 +102,7 @@ def gather(dest=None, module=None, subset = None, since = None, until = now(), c
|
||||
|
||||
last_run = since or settings.AUTOMATION_ANALYTICS_LAST_GATHER or (now() - timedelta(weeks=4))
|
||||
logger.debug("Last analytics run was: {}".format(settings.AUTOMATION_ANALYTICS_LAST_GATHER))
|
||||
|
||||
|
||||
if _valid_license() is False:
|
||||
logger.exception("Invalid License provided, or No License Provided")
|
||||
return None
|
||||
|
||||
@@ -12,7 +12,7 @@ from prometheus_client import (
|
||||
from awx.conf.license import get_license
|
||||
from awx.main.utils import (get_awx_version, get_ansible_version)
|
||||
from awx.main.analytics.collectors import (
|
||||
counts,
|
||||
counts,
|
||||
instance_info,
|
||||
job_instance_counts,
|
||||
job_counts,
|
||||
@@ -54,7 +54,7 @@ LICENSE_INSTANCE_FREE = Gauge('awx_license_instance_free', 'Number of remaining
|
||||
|
||||
|
||||
def metrics():
|
||||
license_info = get_license(show_key=False)
|
||||
license_info = get_license()
|
||||
SYSTEM_INFO.info({
|
||||
'install_uuid': settings.INSTALL_UUID,
|
||||
'insights_analytics': str(settings.INSIGHTS_TRACKING_STATE),
|
||||
@@ -68,7 +68,7 @@ def metrics():
|
||||
'external_logger_type': getattr(settings, 'LOG_AGGREGATOR_TYPE', 'None')
|
||||
})
|
||||
|
||||
LICENSE_INSTANCE_TOTAL.set(str(license_info.get('available_instances', 0)))
|
||||
LICENSE_INSTANCE_TOTAL.set(str(license_info.get('instance_count', 0)))
|
||||
LICENSE_INSTANCE_FREE.set(str(license_info.get('free_instances', 0)))
|
||||
|
||||
current_counts = counts(None)
|
||||
|
||||
@@ -1,7 +1,5 @@
|
||||
# Python
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
|
||||
# Django
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
@@ -13,6 +11,7 @@ from rest_framework.fields import FloatField
|
||||
# Tower
|
||||
from awx.conf import fields, register, register_validate
|
||||
|
||||
|
||||
logger = logging.getLogger('awx.main.conf')
|
||||
|
||||
register(
|
||||
@@ -92,22 +91,10 @@ register(
|
||||
)
|
||||
|
||||
|
||||
def _load_default_license_from_file():
|
||||
try:
|
||||
license_file = os.environ.get('AWX_LICENSE_FILE', '/etc/tower/license')
|
||||
if os.path.exists(license_file):
|
||||
license_data = json.load(open(license_file))
|
||||
logger.debug('Read license data from "%s".', license_file)
|
||||
return license_data
|
||||
except Exception:
|
||||
logger.warning('Could not read license from "%s".', license_file, exc_info=True)
|
||||
return {}
|
||||
|
||||
|
||||
register(
|
||||
'LICENSE',
|
||||
field_class=fields.DictField,
|
||||
default=_load_default_license_from_file,
|
||||
default=lambda: {},
|
||||
label=_('License'),
|
||||
help_text=_('The license controls which features and functionality are '
|
||||
'enabled. Use /api/v2/config/ to update or change '
|
||||
@@ -124,7 +111,7 @@ register(
|
||||
encrypted=False,
|
||||
read_only=False,
|
||||
label=_('Red Hat customer username'),
|
||||
help_text=_('This username is used to retrieve license information and to send Automation Analytics'), # noqa
|
||||
help_text=_('This username is used to send data to Automation Analytics'),
|
||||
category=_('System'),
|
||||
category_slug='system',
|
||||
)
|
||||
@@ -137,7 +124,33 @@ register(
|
||||
encrypted=True,
|
||||
read_only=False,
|
||||
label=_('Red Hat customer password'),
|
||||
help_text=_('This password is used to retrieve license information and to send Automation Analytics'), # noqa
|
||||
help_text=_('This password is used to send data to Automation Analytics'),
|
||||
category=_('System'),
|
||||
category_slug='system',
|
||||
)
|
||||
|
||||
register(
|
||||
'SUBSCRIPTIONS_USERNAME',
|
||||
field_class=fields.CharField,
|
||||
default='',
|
||||
allow_blank=True,
|
||||
encrypted=False,
|
||||
read_only=False,
|
||||
label=_('Red Hat or Satellite username'),
|
||||
help_text=_('This username is used to retrieve subscription and content information'), # noqa
|
||||
category=_('System'),
|
||||
category_slug='system',
|
||||
)
|
||||
|
||||
register(
|
||||
'SUBSCRIPTIONS_PASSWORD',
|
||||
field_class=fields.CharField,
|
||||
default='',
|
||||
allow_blank=True,
|
||||
encrypted=True,
|
||||
read_only=False,
|
||||
label=_('Red Hat or Satellite password'),
|
||||
help_text=_('This password is used to retrieve subscription and content information'), # noqa
|
||||
category=_('System'),
|
||||
category_slug='system',
|
||||
)
|
||||
|
||||
@@ -1,10 +1,7 @@
|
||||
import cProfile
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
import pstats
|
||||
import signal
|
||||
import tempfile
|
||||
import time
|
||||
import traceback
|
||||
|
||||
@@ -23,6 +20,7 @@ from awx.main.models import (JobEvent, AdHocCommandEvent, ProjectUpdateEvent,
|
||||
Job)
|
||||
from awx.main.tasks import handle_success_and_failure_notifications
|
||||
from awx.main.models.events import emit_event_detail
|
||||
from awx.main.utils.profiling import AWXProfiler
|
||||
|
||||
from .base import BaseWorker
|
||||
|
||||
@@ -48,6 +46,7 @@ class CallbackBrokerWorker(BaseWorker):
|
||||
self.buff = {}
|
||||
self.pid = os.getpid()
|
||||
self.redis = redis.Redis.from_url(settings.BROKER_URL)
|
||||
self.prof = AWXProfiler("CallbackBrokerWorker")
|
||||
for key in self.redis.keys('awx_callback_receiver_statistics_*'):
|
||||
self.redis.delete(key)
|
||||
|
||||
@@ -87,19 +86,12 @@ class CallbackBrokerWorker(BaseWorker):
|
||||
)
|
||||
|
||||
def toggle_profiling(self, *args):
|
||||
if self.prof:
|
||||
self.prof.disable()
|
||||
filename = f'callback-{self.pid}.pstats'
|
||||
filepath = os.path.join(tempfile.gettempdir(), filename)
|
||||
with open(filepath, 'w') as f:
|
||||
pstats.Stats(self.prof, stream=f).sort_stats('cumulative').print_stats()
|
||||
pstats.Stats(self.prof).dump_stats(filepath + '.raw')
|
||||
self.prof = False
|
||||
logger.error(f'profiling is disabled, wrote {filepath}')
|
||||
else:
|
||||
self.prof = cProfile.Profile()
|
||||
self.prof.enable()
|
||||
if not self.prof.is_started():
|
||||
self.prof.start()
|
||||
logger.error('profiling is enabled')
|
||||
else:
|
||||
filepath = self.prof.stop()
|
||||
logger.error(f'profiling is disabled, wrote {filepath}')
|
||||
|
||||
def work_loop(self, *args, **kw):
|
||||
if settings.AWX_CALLBACK_PROFILE:
|
||||
|
||||
@@ -30,3 +30,10 @@ class _AwxTaskError():
|
||||
|
||||
|
||||
AwxTaskError = _AwxTaskError()
|
||||
|
||||
|
||||
class PostRunError(Exception):
|
||||
def __init__(self, msg, status='failed', tb=''):
|
||||
self.status = status
|
||||
self.tb = tb
|
||||
super(PostRunError, self).__init__(msg)
|
||||
|
||||
@@ -149,7 +149,6 @@ class IsolatedManager(object):
|
||||
# don't rsync source control metadata (it can be huge!)
|
||||
'- /project/.git',
|
||||
'- /project/.svn',
|
||||
'- /project/.hg',
|
||||
# don't rsync job events that are in the process of being written
|
||||
'- /artifacts/job_events/*-partial.json.tmp',
|
||||
# don't rsync the ssh_key FIFO
|
||||
|
||||
@@ -18,7 +18,5 @@ class Command(BaseCommand):
|
||||
super(Command, self).__init__()
|
||||
license = get_licenser().validate()
|
||||
if options.get('data'):
|
||||
if license.get('license_key', '') != 'UNLICENSED':
|
||||
license['license_key'] = '********'
|
||||
return json.dumps(license)
|
||||
return license.get('license_type', 'none')
|
||||
|
||||
@@ -8,5 +8,7 @@ class Command(MakeMigrations):
|
||||
def execute(self, *args, **options):
|
||||
settings = connections['default'].settings_dict.copy()
|
||||
settings['ENGINE'] = 'sqlite3'
|
||||
if 'application_name' in settings['OPTIONS']:
|
||||
del settings['OPTIONS']['application_name']
|
||||
connections['default'] = DatabaseWrapper(settings)
|
||||
return MakeMigrations().execute(*args, **options)
|
||||
|
||||
117
awx/main/management/commands/graph_jobs.py
Normal file
117
awx/main/management/commands/graph_jobs.py
Normal file
@@ -0,0 +1,117 @@
|
||||
# Python
|
||||
import asciichartpy as chart
|
||||
import collections
|
||||
import time
|
||||
import sys
|
||||
|
||||
# Django
|
||||
from django.db.models import Count
|
||||
from django.core.management.base import BaseCommand
|
||||
|
||||
# AWX
|
||||
from awx.main.models import (
|
||||
Job,
|
||||
Instance
|
||||
)
|
||||
|
||||
|
||||
DEFAULT_WIDTH = 100
|
||||
DEFAULT_HEIGHT = 30
|
||||
|
||||
|
||||
def chart_color_lookup(color_str):
|
||||
return getattr(chart, color_str)
|
||||
|
||||
|
||||
def clear_screen():
|
||||
print(chr(27) + "[2J")
|
||||
|
||||
|
||||
class JobStatus():
|
||||
def __init__(self, status, color, width):
|
||||
self.status = status
|
||||
self.color = color
|
||||
self.color_code = chart_color_lookup(color)
|
||||
self.x = collections.deque(maxlen=width)
|
||||
self.y = collections.deque(maxlen=width)
|
||||
|
||||
def tick(self, x, y):
|
||||
self.x.append(x)
|
||||
self.y.append(y)
|
||||
|
||||
|
||||
class JobStatusController:
|
||||
RESET = chart_color_lookup('reset')
|
||||
|
||||
def __init__(self, width):
|
||||
self.plots = [
|
||||
JobStatus('pending', 'red', width),
|
||||
JobStatus('waiting', 'blue', width),
|
||||
JobStatus('running', 'green', width)
|
||||
]
|
||||
self.ts_start = int(time.time())
|
||||
|
||||
def tick(self):
|
||||
ts = int(time.time()) - self.ts_start
|
||||
q = Job.objects.filter(status__in=['pending','waiting','running']).values_list('status').order_by().annotate(Count('status'))
|
||||
status_count = dict(pending=0, waiting=0, running=0)
|
||||
for status, count in q:
|
||||
status_count[status] = count
|
||||
|
||||
for p in self.plots:
|
||||
p.tick(ts, status_count[p.status])
|
||||
|
||||
def series(self):
|
||||
return [list(p.y) for p in self.plots]
|
||||
|
||||
def generate_status(self):
|
||||
line = ""
|
||||
lines = []
|
||||
for p in self.plots:
|
||||
lines.append(f'{p.color_code}{p.status} {p.y[-1]}{self.RESET}')
|
||||
|
||||
line += ", ".join(lines) + '\n'
|
||||
|
||||
width = 5
|
||||
time_running = int(time.time()) - self.ts_start
|
||||
instances = Instance.objects.all().order_by('hostname')
|
||||
line += "Capacity: " + ", ".join([f"{instance.capacity:{width}}" for instance in instances]) + '\n'
|
||||
line += "Remaining: " + ", ".join([f"{instance.remaining_capacity:{width}}" for instance in instances]) + '\n'
|
||||
line += f"Seconds running: {time_running}" + '\n'
|
||||
|
||||
return line
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = "Plot pending, waiting, running jobs over time on the terminal"
|
||||
|
||||
def add_arguments(self, parser):
|
||||
parser.add_argument('--refresh', dest='refresh', type=float, default=1.0,
|
||||
help='Time between refreshes of the graph and data in seconds (defaults to 1.0)')
|
||||
parser.add_argument('--width', dest='width', type=int, default=DEFAULT_WIDTH,
|
||||
help=f'Width of the graph (defaults to {DEFAULT_WIDTH})')
|
||||
parser.add_argument('--height', dest='height', type=int, default=DEFAULT_HEIGHT,
|
||||
help=f'Height of the graph (defaults to {DEFAULT_HEIGHT})')
|
||||
|
||||
def handle(self, *args, **options):
|
||||
refresh_seconds = options['refresh']
|
||||
width = options['width']
|
||||
height = options['height']
|
||||
|
||||
jctl = JobStatusController(width)
|
||||
|
||||
conf = {
|
||||
'colors': [chart_color_lookup(p.color) for p in jctl.plots],
|
||||
'height': height,
|
||||
}
|
||||
|
||||
while True:
|
||||
jctl.tick()
|
||||
|
||||
draw = chart.plot(jctl.series(), conf)
|
||||
status_line = jctl.generate_status()
|
||||
clear_screen()
|
||||
print(draw)
|
||||
sys.stdout.write(status_line)
|
||||
time.sleep(refresh_seconds)
|
||||
|
||||
@@ -19,6 +19,9 @@ from django.core.management.base import BaseCommand, CommandError
|
||||
from django.db import connection, transaction
|
||||
from django.utils.encoding import smart_text
|
||||
|
||||
# DRF error class to distinguish license exceptions
|
||||
from rest_framework.exceptions import PermissionDenied
|
||||
|
||||
# AWX inventory imports
|
||||
from awx.main.models.inventory import (
|
||||
Inventory,
|
||||
@@ -31,11 +34,12 @@ from awx.main.utils.safe_yaml import sanitize_jinja
|
||||
|
||||
# other AWX imports
|
||||
from awx.main.models.rbac import batch_role_ancestor_rebuilding
|
||||
# TODO: remove proot utils once we move to running inv. updates in containers
|
||||
from awx.main.utils import (
|
||||
ignore_inventory_computed_fields,
|
||||
check_proot_installed,
|
||||
wrap_args_with_proot,
|
||||
build_proot_temp_dir,
|
||||
ignore_inventory_computed_fields,
|
||||
get_licenser
|
||||
)
|
||||
from awx.main.signals import disable_activity_stream
|
||||
@@ -53,11 +57,11 @@ No license.
|
||||
See http://www.ansible.com/renew for license information.'''
|
||||
|
||||
LICENSE_MESSAGE = '''\
|
||||
Number of licensed instances exceeded, would bring available instances to %(new_count)d, system is licensed for %(available_instances)d.
|
||||
Number of licensed instances exceeded, would bring available instances to %(new_count)d, system is licensed for %(instance_count)d.
|
||||
See http://www.ansible.com/renew for license extension information.'''
|
||||
|
||||
DEMO_LICENSE_MESSAGE = '''\
|
||||
Demo mode free license count exceeded, would bring available instances to %(new_count)d, demo mode allows %(available_instances)d.
|
||||
Demo mode free license count exceeded, would bring available instances to %(new_count)d, demo mode allows %(instance_count)d.
|
||||
See http://www.ansible.com/renew for licensing information.'''
|
||||
|
||||
|
||||
@@ -75,13 +79,11 @@ class AnsibleInventoryLoader(object):
|
||||
/usr/bin/ansible/ansible-inventory -i hosts --list
|
||||
'''
|
||||
|
||||
def __init__(self, source, is_custom=False, venv_path=None, verbosity=0):
|
||||
def __init__(self, source, venv_path=None, verbosity=0):
|
||||
self.source = source
|
||||
self.source_dir = functioning_dir(self.source)
|
||||
self.is_custom = is_custom
|
||||
self.tmp_private_dir = None
|
||||
self.method = 'ansible-inventory'
|
||||
self.verbosity = verbosity
|
||||
# TODO: remove once proot has been removed
|
||||
self.tmp_private_dir = None
|
||||
if venv_path:
|
||||
self.venv_path = venv_path
|
||||
else:
|
||||
@@ -134,35 +136,31 @@ class AnsibleInventoryLoader(object):
|
||||
# inside of /venv/ansible, so we override the specified interpreter
|
||||
# https://github.com/ansible/ansible/issues/50714
|
||||
bargs = ['python', ansible_inventory_path, '-i', self.source]
|
||||
bargs.extend(['--playbook-dir', self.source_dir])
|
||||
bargs.extend(['--playbook-dir', functioning_dir(self.source)])
|
||||
if self.verbosity:
|
||||
# INFO: -vvv, DEBUG: -vvvvv, for inventory, any more than 3 makes little difference
|
||||
bargs.append('-{}'.format('v' * min(5, self.verbosity * 2 + 1)))
|
||||
logger.debug('Using base command: {}'.format(' '.join(bargs)))
|
||||
return bargs
|
||||
|
||||
# TODO: Remove this once we move to running ansible-inventory in containers
|
||||
# and don't need proot for process isolation anymore
|
||||
def get_proot_args(self, cmd, env):
|
||||
cwd = os.getcwd()
|
||||
if not check_proot_installed():
|
||||
raise RuntimeError("proot is not installed but is configured for use")
|
||||
|
||||
kwargs = {}
|
||||
if self.is_custom:
|
||||
# use source's tmp dir for proot, task manager will delete folder
|
||||
logger.debug("Using provided directory '{}' for isolation.".format(self.source_dir))
|
||||
kwargs['proot_temp_dir'] = self.source_dir
|
||||
cwd = self.source_dir
|
||||
else:
|
||||
# we cannot safely store tmp data in source dir or trust script contents
|
||||
if env['AWX_PRIVATE_DATA_DIR']:
|
||||
# If this is non-blank, file credentials are being used and we need access
|
||||
private_data_dir = functioning_dir(env['AWX_PRIVATE_DATA_DIR'])
|
||||
logger.debug("Using private credential data in '{}'.".format(private_data_dir))
|
||||
kwargs['private_data_dir'] = private_data_dir
|
||||
self.tmp_private_dir = build_proot_temp_dir()
|
||||
logger.debug("Using fresh temporary directory '{}' for isolation.".format(self.tmp_private_dir))
|
||||
kwargs['proot_temp_dir'] = self.tmp_private_dir
|
||||
kwargs['proot_show_paths'] = [functioning_dir(self.source), settings.AWX_ANSIBLE_COLLECTIONS_PATHS]
|
||||
# we cannot safely store tmp data in source dir or trust script contents
|
||||
if env['AWX_PRIVATE_DATA_DIR']:
|
||||
# If this is non-blank, file credentials are being used and we need access
|
||||
private_data_dir = functioning_dir(env['AWX_PRIVATE_DATA_DIR'])
|
||||
logger.debug("Using private credential data in '{}'.".format(private_data_dir))
|
||||
kwargs['private_data_dir'] = private_data_dir
|
||||
self.tmp_private_dir = build_proot_temp_dir()
|
||||
logger.debug("Using fresh temporary directory '{}' for isolation.".format(self.tmp_private_dir))
|
||||
kwargs['proot_temp_dir'] = self.tmp_private_dir
|
||||
kwargs['proot_show_paths'] = [functioning_dir(self.source), settings.AWX_ANSIBLE_COLLECTIONS_PATHS]
|
||||
logger.debug("Running from `{}` working directory.".format(cwd))
|
||||
|
||||
if self.venv_path != settings.ANSIBLE_VENV_PATH:
|
||||
@@ -170,12 +168,14 @@ class AnsibleInventoryLoader(object):
|
||||
|
||||
return wrap_args_with_proot(cmd, cwd, **kwargs)
|
||||
|
||||
|
||||
def command_to_json(self, cmd):
|
||||
data = {}
|
||||
stdout, stderr = '', ''
|
||||
env = self.build_env()
|
||||
|
||||
if ((self.is_custom or 'AWX_PRIVATE_DATA_DIR' in env) and
|
||||
# TODO: remove proot args once inv. updates run in containers
|
||||
if (('AWX_PRIVATE_DATA_DIR' in env) and
|
||||
getattr(settings, 'AWX_PROOT_ENABLED', False)):
|
||||
cmd = self.get_proot_args(cmd, env)
|
||||
|
||||
@@ -184,11 +184,13 @@ class AnsibleInventoryLoader(object):
|
||||
stdout = smart_text(stdout)
|
||||
stderr = smart_text(stderr)
|
||||
|
||||
# TODO: can be removed when proot is removed
|
||||
if self.tmp_private_dir:
|
||||
shutil.rmtree(self.tmp_private_dir, True)
|
||||
|
||||
if proc.returncode != 0:
|
||||
raise RuntimeError('%s failed (rc=%d) with stdout:\n%s\nstderr:\n%s' % (
|
||||
self.method, proc.returncode, stdout, stderr))
|
||||
'ansible-inventory', proc.returncode, stdout, stderr))
|
||||
|
||||
for line in stderr.splitlines():
|
||||
logger.error(line)
|
||||
@@ -231,9 +233,9 @@ class Command(BaseCommand):
|
||||
action='store_true', default=False,
|
||||
help='overwrite (rather than merge) variables')
|
||||
parser.add_argument('--keep-vars', dest='keep_vars', action='store_true', default=False,
|
||||
help='use database variables if set')
|
||||
help='DEPRECATED legacy option, has no effect')
|
||||
parser.add_argument('--custom', dest='custom', action='store_true', default=False,
|
||||
help='this is a custom inventory script')
|
||||
help='DEPRECATED indicates a custom inventory script, no longer used')
|
||||
parser.add_argument('--source', dest='source', type=str, default=None,
|
||||
metavar='s', help='inventory directory, file, or script to load')
|
||||
parser.add_argument('--enabled-var', dest='enabled_var', type=str,
|
||||
@@ -259,10 +261,10 @@ class Command(BaseCommand):
|
||||
'specifies the unique, immutable instance ID, may be '
|
||||
'specified as "foo.bar" to traverse nested dicts.')
|
||||
|
||||
def set_logging_level(self):
|
||||
def set_logging_level(self, verbosity):
|
||||
log_levels = dict(enumerate([logging.WARNING, logging.INFO,
|
||||
logging.DEBUG, 0]))
|
||||
logger.setLevel(log_levels.get(self.verbosity, 0))
|
||||
logger.setLevel(log_levels.get(verbosity, 0))
|
||||
|
||||
def _get_instance_id(self, variables, default=''):
|
||||
'''
|
||||
@@ -322,7 +324,8 @@ class Command(BaseCommand):
|
||||
else:
|
||||
raise NotImplementedError('Value of enabled {} not understood.'.format(enabled))
|
||||
|
||||
def get_source_absolute_path(self, source):
|
||||
@staticmethod
|
||||
def get_source_absolute_path(source):
|
||||
if not os.path.exists(source):
|
||||
raise IOError('Source does not exist: %s' % source)
|
||||
source = os.path.join(os.getcwd(), os.path.dirname(source),
|
||||
@@ -330,61 +333,6 @@ class Command(BaseCommand):
|
||||
source = os.path.normpath(os.path.abspath(source))
|
||||
return source
|
||||
|
||||
def load_inventory_from_database(self):
|
||||
'''
|
||||
Load inventory and related objects from the database.
|
||||
'''
|
||||
# Load inventory object based on name or ID.
|
||||
if self.inventory_id:
|
||||
q = dict(id=self.inventory_id)
|
||||
else:
|
||||
q = dict(name=self.inventory_name)
|
||||
try:
|
||||
self.inventory = Inventory.objects.get(**q)
|
||||
except Inventory.DoesNotExist:
|
||||
raise CommandError('Inventory with %s = %s cannot be found' % list(q.items())[0])
|
||||
except Inventory.MultipleObjectsReturned:
|
||||
raise CommandError('Inventory with %s = %s returned multiple results' % list(q.items())[0])
|
||||
logger.info('Updating inventory %d: %s' % (self.inventory.pk,
|
||||
self.inventory.name))
|
||||
|
||||
# Load inventory source if specified via environment variable (when
|
||||
# inventory_import is called from an InventoryUpdate task).
|
||||
inventory_source_id = os.getenv('INVENTORY_SOURCE_ID', None)
|
||||
inventory_update_id = os.getenv('INVENTORY_UPDATE_ID', None)
|
||||
if inventory_source_id:
|
||||
try:
|
||||
self.inventory_source = InventorySource.objects.get(pk=inventory_source_id,
|
||||
inventory=self.inventory)
|
||||
except InventorySource.DoesNotExist:
|
||||
raise CommandError('Inventory source with id=%s not found' %
|
||||
inventory_source_id)
|
||||
try:
|
||||
self.inventory_update = InventoryUpdate.objects.get(pk=inventory_update_id)
|
||||
except InventoryUpdate.DoesNotExist:
|
||||
raise CommandError('Inventory update with id=%s not found' %
|
||||
inventory_update_id)
|
||||
# Otherwise, create a new inventory source to capture this invocation
|
||||
# via command line.
|
||||
else:
|
||||
with ignore_inventory_computed_fields():
|
||||
self.inventory_source, created = InventorySource.objects.get_or_create(
|
||||
inventory=self.inventory,
|
||||
source='file',
|
||||
source_path=os.path.abspath(self.source),
|
||||
overwrite=self.overwrite,
|
||||
overwrite_vars=self.overwrite_vars,
|
||||
)
|
||||
self.inventory_update = self.inventory_source.create_inventory_update(
|
||||
_eager_fields=dict(
|
||||
job_args=json.dumps(sys.argv),
|
||||
job_env=dict(os.environ.items()),
|
||||
job_cwd=os.getcwd())
|
||||
)
|
||||
|
||||
# FIXME: Wait or raise error if inventory is being updated by another
|
||||
# source.
|
||||
|
||||
def _batch_add_m2m(self, related_manager, *objs, **kwargs):
|
||||
key = (related_manager.instance.pk, related_manager.through._meta.db_table)
|
||||
flush = bool(kwargs.get('flush', False))
|
||||
@@ -894,21 +842,21 @@ class Command(BaseCommand):
|
||||
source_vars = self.all_group.variables
|
||||
remote_license_type = source_vars.get('tower_metadata', {}).get('license_type', None)
|
||||
if remote_license_type is None:
|
||||
raise CommandError('Unexpected Error: Tower inventory plugin missing needed metadata!')
|
||||
raise PermissionDenied('Unexpected Error: Tower inventory plugin missing needed metadata!')
|
||||
if local_license_type != remote_license_type:
|
||||
raise CommandError('Tower server licenses must match: source: {} local: {}'.format(
|
||||
raise PermissionDenied('Tower server licenses must match: source: {} local: {}'.format(
|
||||
remote_license_type, local_license_type
|
||||
))
|
||||
|
||||
def check_license(self):
|
||||
license_info = get_licenser().validate()
|
||||
local_license_type = license_info.get('license_type', 'UNLICENSED')
|
||||
if license_info.get('license_key', 'UNLICENSED') == 'UNLICENSED':
|
||||
if local_license_type == 'UNLICENSED':
|
||||
logger.error(LICENSE_NON_EXISTANT_MESSAGE)
|
||||
raise CommandError('No license found!')
|
||||
raise PermissionDenied('No license found!')
|
||||
elif local_license_type == 'open':
|
||||
return
|
||||
available_instances = license_info.get('available_instances', 0)
|
||||
instance_count = license_info.get('instance_count', 0)
|
||||
free_instances = license_info.get('free_instances', 0)
|
||||
time_remaining = license_info.get('time_remaining', 0)
|
||||
hard_error = license_info.get('trial', False) is True or license_info['instance_count'] == 10
|
||||
@@ -916,24 +864,24 @@ class Command(BaseCommand):
|
||||
if time_remaining <= 0:
|
||||
if hard_error:
|
||||
logger.error(LICENSE_EXPIRED_MESSAGE)
|
||||
raise CommandError("License has expired!")
|
||||
raise PermissionDenied("License has expired!")
|
||||
else:
|
||||
logger.warning(LICENSE_EXPIRED_MESSAGE)
|
||||
# special check for tower-type inventory sources
|
||||
# but only if running the plugin
|
||||
TOWER_SOURCE_FILES = ['tower.yml', 'tower.yaml']
|
||||
if self.inventory_source.source == 'tower' and any(f in self.source for f in TOWER_SOURCE_FILES):
|
||||
if self.inventory_source.source == 'tower' and any(f in self.inventory_source.source_path for f in TOWER_SOURCE_FILES):
|
||||
# only if this is the 2nd call to license check, we cannot compare before running plugin
|
||||
if hasattr(self, 'all_group'):
|
||||
self.remote_tower_license_compare(local_license_type)
|
||||
if free_instances < 0:
|
||||
d = {
|
||||
'new_count': new_count,
|
||||
'available_instances': available_instances,
|
||||
'instance_count': instance_count,
|
||||
}
|
||||
if hard_error:
|
||||
logger.error(LICENSE_MESSAGE % d)
|
||||
raise CommandError('License count exceeded!')
|
||||
raise PermissionDenied('License count exceeded!')
|
||||
else:
|
||||
logger.warning(LICENSE_MESSAGE % d)
|
||||
|
||||
@@ -948,7 +896,7 @@ class Command(BaseCommand):
|
||||
|
||||
active_count = Host.objects.org_active_count(org.id)
|
||||
if active_count > org.max_hosts:
|
||||
raise CommandError('Host limit for organization exceeded!')
|
||||
raise PermissionDenied('Host limit for organization exceeded!')
|
||||
|
||||
def mark_license_failure(self, save=True):
|
||||
self.inventory_update.license_error = True
|
||||
@@ -959,16 +907,103 @@ class Command(BaseCommand):
|
||||
self.inventory_update.save(update_fields=['org_host_limit_error'])
|
||||
|
||||
def handle(self, *args, **options):
|
||||
self.verbosity = int(options.get('verbosity', 1))
|
||||
self.set_logging_level()
|
||||
self.inventory_name = options.get('inventory_name', None)
|
||||
self.inventory_id = options.get('inventory_id', None)
|
||||
venv_path = options.get('venv', None)
|
||||
# Load inventory and related objects from database.
|
||||
inventory_name = options.get('inventory_name', None)
|
||||
inventory_id = options.get('inventory_id', None)
|
||||
if inventory_name and inventory_id:
|
||||
raise CommandError('--inventory-name and --inventory-id are mutually exclusive')
|
||||
elif not inventory_name and not inventory_id:
|
||||
raise CommandError('--inventory-name or --inventory-id is required')
|
||||
|
||||
with advisory_lock('inventory_{}_import'.format(inventory_id)):
|
||||
# Obtain rest of the options needed to run update
|
||||
raw_source = options.get('source', None)
|
||||
if not raw_source:
|
||||
raise CommandError('--source is required')
|
||||
verbosity = int(options.get('verbosity', 1))
|
||||
self.set_logging_level(verbosity)
|
||||
venv_path = options.get('venv', None)
|
||||
|
||||
# Load inventory object based on name or ID.
|
||||
if inventory_id:
|
||||
q = dict(id=inventory_id)
|
||||
else:
|
||||
q = dict(name=inventory_name)
|
||||
try:
|
||||
inventory = Inventory.objects.get(**q)
|
||||
except Inventory.DoesNotExist:
|
||||
raise CommandError('Inventory with %s = %s cannot be found' % list(q.items())[0])
|
||||
except Inventory.MultipleObjectsReturned:
|
||||
raise CommandError('Inventory with %s = %s returned multiple results' % list(q.items())[0])
|
||||
logger.info('Updating inventory %d: %s' % (inventory.pk, inventory.name))
|
||||
|
||||
|
||||
# Create ad-hoc inventory source and inventory update objects
|
||||
with ignore_inventory_computed_fields():
|
||||
source = Command.get_source_absolute_path(raw_source)
|
||||
|
||||
inventory_source, created = InventorySource.objects.get_or_create(
|
||||
inventory=inventory,
|
||||
source='file',
|
||||
source_path=os.path.abspath(source),
|
||||
overwrite=bool(options.get('overwrite', False)),
|
||||
overwrite_vars=bool(options.get('overwrite_vars', False)),
|
||||
)
|
||||
inventory_update = inventory_source.create_inventory_update(
|
||||
_eager_fields=dict(
|
||||
job_args=json.dumps(sys.argv),
|
||||
job_env=dict(os.environ.items()),
|
||||
job_cwd=os.getcwd())
|
||||
)
|
||||
|
||||
data = AnsibleInventoryLoader(
|
||||
source=source, venv_path=venv_path, verbosity=verbosity
|
||||
).load()
|
||||
|
||||
logger.debug('Finished loading from source: %s', source)
|
||||
|
||||
status, tb, exc = 'error', '', None
|
||||
try:
|
||||
self.perform_update(options, data, inventory_update)
|
||||
status = 'successful'
|
||||
except Exception as e:
|
||||
exc = e
|
||||
if isinstance(e, KeyboardInterrupt):
|
||||
status = 'canceled'
|
||||
else:
|
||||
tb = traceback.format_exc()
|
||||
|
||||
with ignore_inventory_computed_fields():
|
||||
inventory_update = InventoryUpdate.objects.get(pk=inventory_update.pk)
|
||||
inventory_update.result_traceback = tb
|
||||
inventory_update.status = status
|
||||
inventory_update.save(update_fields=['status', 'result_traceback'])
|
||||
inventory_source.status = status
|
||||
inventory_source.save(update_fields=['status'])
|
||||
|
||||
if exc:
|
||||
logger.error(str(exc))
|
||||
|
||||
if exc:
|
||||
if isinstance(exc, CommandError):
|
||||
sys.exit(1)
|
||||
raise exc
|
||||
|
||||
def perform_update(self, options, data, inventory_update):
|
||||
"""Shared method for both awx-manage CLI updates and inventory updates
|
||||
from the tasks system.
|
||||
|
||||
This saves the inventory data to the database, calling load_into_database
|
||||
but also wraps that method in a host of options processing
|
||||
"""
|
||||
# outside of normal options, these are needed as part of programatic interface
|
||||
self.inventory = inventory_update.inventory
|
||||
self.inventory_source = inventory_update.inventory_source
|
||||
self.inventory_update = inventory_update
|
||||
|
||||
# the update options, could be parser object or dict
|
||||
self.overwrite = bool(options.get('overwrite', False))
|
||||
self.overwrite_vars = bool(options.get('overwrite_vars', False))
|
||||
self.keep_vars = bool(options.get('keep_vars', False))
|
||||
self.is_custom = bool(options.get('custom', False))
|
||||
self.source = options.get('source', None)
|
||||
self.enabled_var = options.get('enabled_var', None)
|
||||
self.enabled_value = options.get('enabled_value', None)
|
||||
self.group_filter = options.get('group_filter', None) or r'^.+$'
|
||||
@@ -976,17 +1011,6 @@ class Command(BaseCommand):
|
||||
self.exclude_empty_groups = bool(options.get('exclude_empty_groups', False))
|
||||
self.instance_id_var = options.get('instance_id_var', None)
|
||||
|
||||
self.invoked_from_dispatcher = False if os.getenv('INVENTORY_SOURCE_ID', None) is None else True
|
||||
|
||||
# Load inventory and related objects from database.
|
||||
if self.inventory_name and self.inventory_id:
|
||||
raise CommandError('--inventory-name and --inventory-id are mutually exclusive')
|
||||
elif not self.inventory_name and not self.inventory_id:
|
||||
raise CommandError('--inventory-name or --inventory-id is required')
|
||||
if (self.overwrite or self.overwrite_vars) and self.keep_vars:
|
||||
raise CommandError('--overwrite/--overwrite-vars and --keep-vars are mutually exclusive')
|
||||
if not self.source:
|
||||
raise CommandError('--source is required')
|
||||
try:
|
||||
self.group_filter_re = re.compile(self.group_filter)
|
||||
except re.error:
|
||||
@@ -997,146 +1021,115 @@ class Command(BaseCommand):
|
||||
raise CommandError('invalid regular expression for --host-filter')
|
||||
|
||||
begin = time.time()
|
||||
with advisory_lock('inventory_{}_update'.format(self.inventory_id)):
|
||||
self.load_inventory_from_database()
|
||||
|
||||
# Since perform_update can be invoked either through the awx-manage CLI
|
||||
# or from the task system, we need to create a new lock at this level
|
||||
# (even though inventory_import.Command.handle -- which calls
|
||||
# perform_update -- has its own lock, inventory_ID_import)
|
||||
with advisory_lock('inventory_{}_perform_update'.format(self.inventory.id)):
|
||||
|
||||
try:
|
||||
self.check_license()
|
||||
except CommandError as e:
|
||||
except PermissionDenied as e:
|
||||
self.mark_license_failure(save=True)
|
||||
raise e
|
||||
|
||||
try:
|
||||
# Check the per-org host limits
|
||||
self.check_org_host_limit()
|
||||
except CommandError as e:
|
||||
except PermissionDenied as e:
|
||||
self.mark_org_limits_failure(save=True)
|
||||
raise e
|
||||
|
||||
status, tb, exc = 'error', '', None
|
||||
try:
|
||||
if settings.SQL_DEBUG:
|
||||
queries_before = len(connection.queries)
|
||||
if settings.SQL_DEBUG:
|
||||
queries_before = len(connection.queries)
|
||||
|
||||
# Update inventory update for this command line invocation.
|
||||
with ignore_inventory_computed_fields():
|
||||
iu = self.inventory_update
|
||||
if iu.status != 'running':
|
||||
with transaction.atomic():
|
||||
self.inventory_update.status = 'running'
|
||||
self.inventory_update.save()
|
||||
# Update inventory update for this command line invocation.
|
||||
with ignore_inventory_computed_fields():
|
||||
# TODO: move this to before perform_update
|
||||
iu = self.inventory_update
|
||||
if iu.status != 'running':
|
||||
with transaction.atomic():
|
||||
self.inventory_update.status = 'running'
|
||||
self.inventory_update.save()
|
||||
|
||||
source = self.get_source_absolute_path(self.source)
|
||||
logger.info('Processing JSON output...')
|
||||
inventory = MemInventory(
|
||||
group_filter_re=self.group_filter_re, host_filter_re=self.host_filter_re)
|
||||
inventory = dict_to_mem_data(data, inventory=inventory)
|
||||
|
||||
data = AnsibleInventoryLoader(source=source, is_custom=self.is_custom,
|
||||
venv_path=venv_path, verbosity=self.verbosity).load()
|
||||
logger.info('Loaded %d groups, %d hosts', len(inventory.all_group.all_groups),
|
||||
len(inventory.all_group.all_hosts))
|
||||
|
||||
logger.debug('Finished loading from source: %s', source)
|
||||
logger.info('Processing JSON output...')
|
||||
inventory = MemInventory(
|
||||
group_filter_re=self.group_filter_re, host_filter_re=self.host_filter_re)
|
||||
inventory = dict_to_mem_data(data, inventory=inventory)
|
||||
if self.exclude_empty_groups:
|
||||
inventory.delete_empty_groups()
|
||||
|
||||
del data # forget dict from import, could be large
|
||||
self.all_group = inventory.all_group
|
||||
|
||||
logger.info('Loaded %d groups, %d hosts', len(inventory.all_group.all_groups),
|
||||
len(inventory.all_group.all_hosts))
|
||||
if settings.DEBUG:
|
||||
# depending on inventory source, this output can be
|
||||
# *exceedingly* verbose - crawling a deeply nested
|
||||
# inventory/group data structure and printing metadata about
|
||||
# each host and its memberships
|
||||
#
|
||||
# it's easy for this scale of data to overwhelm pexpect,
|
||||
# (and it's likely only useful for purposes of debugging the
|
||||
# actual inventory import code), so only print it if we have to:
|
||||
# https://github.com/ansible/ansible-tower/issues/7414#issuecomment-321615104
|
||||
self.all_group.debug_tree()
|
||||
|
||||
if self.exclude_empty_groups:
|
||||
inventory.delete_empty_groups()
|
||||
|
||||
self.all_group = inventory.all_group
|
||||
|
||||
if settings.DEBUG:
|
||||
# depending on inventory source, this output can be
|
||||
# *exceedingly* verbose - crawling a deeply nested
|
||||
# inventory/group data structure and printing metadata about
|
||||
# each host and its memberships
|
||||
#
|
||||
# it's easy for this scale of data to overwhelm pexpect,
|
||||
# (and it's likely only useful for purposes of debugging the
|
||||
# actual inventory import code), so only print it if we have to:
|
||||
# https://github.com/ansible/ansible-tower/issues/7414#issuecomment-321615104
|
||||
self.all_group.debug_tree()
|
||||
|
||||
with batch_role_ancestor_rebuilding():
|
||||
# If using with transaction.atomic() with try ... catch,
|
||||
# with transaction.atomic() must be inside the try section of the code as per Django docs
|
||||
try:
|
||||
# Ensure that this is managed as an atomic SQL transaction,
|
||||
# and thus properly rolled back if there is an issue.
|
||||
with transaction.atomic():
|
||||
# Merge/overwrite inventory into database.
|
||||
if settings.SQL_DEBUG:
|
||||
logger.warning('loading into database...')
|
||||
with ignore_inventory_computed_fields():
|
||||
if getattr(settings, 'ACTIVITY_STREAM_ENABLED_FOR_INVENTORY_SYNC', True):
|
||||
with batch_role_ancestor_rebuilding():
|
||||
# If using with transaction.atomic() with try ... catch,
|
||||
# with transaction.atomic() must be inside the try section of the code as per Django docs
|
||||
try:
|
||||
# Ensure that this is managed as an atomic SQL transaction,
|
||||
# and thus properly rolled back if there is an issue.
|
||||
with transaction.atomic():
|
||||
# Merge/overwrite inventory into database.
|
||||
if settings.SQL_DEBUG:
|
||||
logger.warning('loading into database...')
|
||||
with ignore_inventory_computed_fields():
|
||||
if getattr(settings, 'ACTIVITY_STREAM_ENABLED_FOR_INVENTORY_SYNC', True):
|
||||
self.load_into_database()
|
||||
else:
|
||||
with disable_activity_stream():
|
||||
self.load_into_database()
|
||||
else:
|
||||
with disable_activity_stream():
|
||||
self.load_into_database()
|
||||
if settings.SQL_DEBUG:
|
||||
queries_before2 = len(connection.queries)
|
||||
self.inventory.update_computed_fields()
|
||||
if settings.SQL_DEBUG:
|
||||
logger.warning('update computed fields took %d queries',
|
||||
len(connection.queries) - queries_before2)
|
||||
# Check if the license is valid.
|
||||
# If the license is not valid, a CommandError will be thrown,
|
||||
# and inventory update will be marked as invalid.
|
||||
# with transaction.atomic() will roll back the changes.
|
||||
license_fail = True
|
||||
self.check_license()
|
||||
if settings.SQL_DEBUG:
|
||||
queries_before2 = len(connection.queries)
|
||||
self.inventory.update_computed_fields()
|
||||
if settings.SQL_DEBUG:
|
||||
logger.warning('update computed fields took %d queries',
|
||||
len(connection.queries) - queries_before2)
|
||||
|
||||
# Check the per-org host limits
|
||||
license_fail = False
|
||||
self.check_org_host_limit()
|
||||
except CommandError as e:
|
||||
if license_fail:
|
||||
self.mark_license_failure()
|
||||
else:
|
||||
self.mark_org_limits_failure()
|
||||
raise e
|
||||
# Check if the license is valid.
|
||||
# If the license is not valid, a CommandError will be thrown,
|
||||
# and inventory update will be marked as invalid.
|
||||
# with transaction.atomic() will roll back the changes.
|
||||
license_fail = True
|
||||
self.check_license()
|
||||
|
||||
if settings.SQL_DEBUG:
|
||||
logger.warning('Inventory import completed for %s in %0.1fs',
|
||||
self.inventory_source.name, time.time() - begin)
|
||||
# Check the per-org host limits
|
||||
license_fail = False
|
||||
self.check_org_host_limit()
|
||||
except PermissionDenied as e:
|
||||
if license_fail:
|
||||
self.mark_license_failure(save=True)
|
||||
else:
|
||||
logger.info('Inventory import completed for %s in %0.1fs',
|
||||
self.inventory_source.name, time.time() - begin)
|
||||
status = 'successful'
|
||||
self.mark_org_limits_failure(save=True)
|
||||
raise e
|
||||
|
||||
# If we're in debug mode, then log the queries and time
|
||||
# used to do the operation.
|
||||
if settings.SQL_DEBUG:
|
||||
queries_this_import = connection.queries[queries_before:]
|
||||
sqltime = sum(float(x['time']) for x in queries_this_import)
|
||||
logger.warning('Inventory import required %d queries '
|
||||
'taking %0.3fs', len(queries_this_import),
|
||||
sqltime)
|
||||
except Exception as e:
|
||||
if isinstance(e, KeyboardInterrupt):
|
||||
status = 'canceled'
|
||||
exc = e
|
||||
elif isinstance(e, CommandError):
|
||||
exc = e
|
||||
logger.warning('Inventory import completed for %s in %0.1fs',
|
||||
self.inventory_source.name, time.time() - begin)
|
||||
else:
|
||||
tb = traceback.format_exc()
|
||||
exc = e
|
||||
logger.info('Inventory import completed for %s in %0.1fs',
|
||||
self.inventory_source.name, time.time() - begin)
|
||||
|
||||
if not self.invoked_from_dispatcher:
|
||||
with ignore_inventory_computed_fields():
|
||||
self.inventory_update = InventoryUpdate.objects.get(pk=self.inventory_update.pk)
|
||||
self.inventory_update.result_traceback = tb
|
||||
self.inventory_update.status = status
|
||||
self.inventory_update.save(update_fields=['status', 'result_traceback'])
|
||||
self.inventory_source.status = status
|
||||
self.inventory_source.save(update_fields=['status'])
|
||||
|
||||
if exc:
|
||||
logger.error(str(exc))
|
||||
|
||||
if exc:
|
||||
if isinstance(exc, CommandError):
|
||||
sys.exit(1)
|
||||
raise exc
|
||||
# If we're in debug mode, then log the queries and time
|
||||
# used to do the operation.
|
||||
if settings.SQL_DEBUG:
|
||||
queries_this_import = connection.queries[queries_before:]
|
||||
sqltime = sum(float(x['time']) for x in queries_this_import)
|
||||
logger.warning('Inventory import required %d queries '
|
||||
'taking %0.3fs', len(queries_this_import),
|
||||
sqltime)
|
||||
|
||||
@@ -19,7 +19,9 @@ class Command(BaseCommand):
|
||||
profile_sql.delay(
|
||||
threshold=options['threshold'], minutes=options['minutes']
|
||||
)
|
||||
print(f"Logging initiated with a threshold of {options['threshold']} second(s) and a duration of"
|
||||
f" {options['minutes']} minute(s), any queries that meet criteria can"
|
||||
f" be found in /var/log/tower/profile/."
|
||||
)
|
||||
if options['threshold'] > 0:
|
||||
print(f"SQL profiling initiated with a threshold of {options['threshold']} second(s) and a"
|
||||
f" duration of {options['minutes']} minute(s), any queries that meet criteria can"
|
||||
f" be found in /var/log/tower/profile/.")
|
||||
else:
|
||||
print("SQL profiling disabled.")
|
||||
|
||||
@@ -1,13 +1,9 @@
|
||||
# Copyright (c) 2015 Ansible, Inc.
|
||||
# All Rights Reserved.
|
||||
|
||||
import uuid
|
||||
import logging
|
||||
import threading
|
||||
import time
|
||||
import cProfile
|
||||
import pstats
|
||||
import os
|
||||
import urllib.parse
|
||||
|
||||
from django.conf import settings
|
||||
@@ -22,6 +18,7 @@ from django.urls import reverse, resolve
|
||||
|
||||
from awx.main.utils.named_url_graph import generate_graph, GraphNode
|
||||
from awx.conf import fields, register
|
||||
from awx.main.utils.profiling import AWXProfiler
|
||||
|
||||
|
||||
logger = logging.getLogger('awx.main.middleware')
|
||||
@@ -32,11 +29,14 @@ class TimingMiddleware(threading.local, MiddlewareMixin):
|
||||
|
||||
dest = '/var/log/tower/profile'
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
self.prof = AWXProfiler("TimingMiddleware")
|
||||
|
||||
def process_request(self, request):
|
||||
self.start_time = time.time()
|
||||
if settings.AWX_REQUEST_PROFILE:
|
||||
self.prof = cProfile.Profile()
|
||||
self.prof.enable()
|
||||
self.prof.start()
|
||||
|
||||
def process_response(self, request, response):
|
||||
if not hasattr(self, 'start_time'): # some tools may not invoke process_request
|
||||
@@ -44,33 +44,10 @@ class TimingMiddleware(threading.local, MiddlewareMixin):
|
||||
total_time = time.time() - self.start_time
|
||||
response['X-API-Total-Time'] = '%0.3fs' % total_time
|
||||
if settings.AWX_REQUEST_PROFILE:
|
||||
self.prof.disable()
|
||||
cprofile_file = self.save_profile_file(request)
|
||||
response['cprofile_file'] = cprofile_file
|
||||
response['X-API-Profile-File'] = self.prof.stop()
|
||||
perf_logger.info('api response times', extra=dict(python_objects=dict(request=request, response=response)))
|
||||
return response
|
||||
|
||||
def save_profile_file(self, request):
|
||||
if not os.path.isdir(self.dest):
|
||||
os.makedirs(self.dest)
|
||||
filename = '%.3fs-%s.pstats' % (pstats.Stats(self.prof).total_tt, uuid.uuid4())
|
||||
filepath = os.path.join(self.dest, filename)
|
||||
with open(filepath, 'w') as f:
|
||||
f.write('%s %s\n' % (request.method, request.get_full_path()))
|
||||
pstats.Stats(self.prof, stream=f).sort_stats('cumulative').print_stats()
|
||||
|
||||
if settings.AWX_REQUEST_PROFILE_WITH_DOT:
|
||||
from gprof2dot import main as generate_dot
|
||||
raw = os.path.join(self.dest, filename) + '.raw'
|
||||
pstats.Stats(self.prof).dump_stats(raw)
|
||||
generate_dot([
|
||||
'-n', '2.5', '-f', 'pstats', '-o',
|
||||
os.path.join( self.dest, filename).replace('.pstats', '.dot'),
|
||||
raw
|
||||
])
|
||||
os.remove(raw)
|
||||
return filepath
|
||||
|
||||
|
||||
class SessionTimeoutMiddleware(MiddlewareMixin):
|
||||
"""
|
||||
@@ -204,4 +181,4 @@ class MigrationRanCheckMiddleware(MiddlewareMixin):
|
||||
plan = executor.migration_plan(executor.loader.graph.leaf_nodes())
|
||||
if bool(plan) and \
|
||||
getattr(resolve(request.path), 'url_name', '') != 'migrations_notran':
|
||||
return redirect(reverse("ui:migrations_notran"))
|
||||
return redirect(reverse("ui_next:migrations_notran"))
|
||||
|
||||
@@ -1,11 +1,7 @@
|
||||
# Generated by Django 2.2.11 on 2020-05-01 13:25
|
||||
|
||||
from django.db import migrations, models
|
||||
from awx.main.migrations._inventory_source import create_scm_script_substitute
|
||||
|
||||
|
||||
def convert_cloudforms_to_scm(apps, schema_editor):
|
||||
create_scm_script_substitute(apps, 'cloudforms')
|
||||
from awx.main.migrations._inventory_source import delete_cloudforms_inv_source
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
@@ -15,7 +11,7 @@ class Migration(migrations.Migration):
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RunPython(convert_cloudforms_to_scm),
|
||||
migrations.RunPython(delete_cloudforms_inv_source),
|
||||
migrations.AlterField(
|
||||
model_name='inventorysource',
|
||||
name='source',
|
||||
|
||||
@@ -0,0 +1,13 @@
|
||||
from django.db import migrations
|
||||
from awx.main.migrations._inventory_source import delete_cloudforms_inv_source
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('main', '0121_delete_toweranalyticsstate'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RunPython(delete_cloudforms_inv_source),
|
||||
]
|
||||
23
awx/main/migrations/0123_drop_hg_support.py
Normal file
23
awx/main/migrations/0123_drop_hg_support.py
Normal file
@@ -0,0 +1,23 @@
|
||||
from django.db import migrations, models
|
||||
from awx.main.migrations._hg_removal import delete_hg_scm
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('main', '0122_really_remove_cloudforms_inventory'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RunPython(delete_hg_scm),
|
||||
migrations.AlterField(
|
||||
model_name='project',
|
||||
name='scm_type',
|
||||
field=models.CharField(blank=True, choices=[('', 'Manual'), ('git', 'Git'), ('svn', 'Subversion'), ('insights', 'Red Hat Insights'), ('archive', 'Remote Archive')], default='', help_text='Specifies the source control system used to store the project.', max_length=8, verbose_name='SCM Type'),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='projectupdate',
|
||||
name='scm_type',
|
||||
field=models.CharField(blank=True, choices=[('', 'Manual'), ('git', 'Git'), ('svn', 'Subversion'), ('insights', 'Red Hat Insights'), ('archive', 'Remote Archive')], default='', help_text='Specifies the source control system used to store the project.', max_length=8, verbose_name='SCM Type'),
|
||||
),
|
||||
]
|
||||
19
awx/main/migrations/_hg_removal.py
Normal file
19
awx/main/migrations/_hg_removal.py
Normal file
@@ -0,0 +1,19 @@
|
||||
import logging
|
||||
|
||||
from awx.main.utils.common import set_current_apps
|
||||
|
||||
logger = logging.getLogger('awx.main.migrations')
|
||||
|
||||
|
||||
def delete_hg_scm(apps, schema_editor):
|
||||
set_current_apps(apps)
|
||||
Project = apps.get_model('main', 'Project')
|
||||
ProjectUpdate = apps.get_model('main', 'ProjectUpdate')
|
||||
|
||||
ProjectUpdate.objects.filter(project__scm_type='hg').update(scm_type='')
|
||||
update_ct = Project.objects.filter(scm_type='hg').update(scm_type='')
|
||||
|
||||
if update_ct:
|
||||
logger.warn('Changed {} mercurial projects to manual, deprecation period ended'.format(
|
||||
update_ct
|
||||
))
|
||||
@@ -5,6 +5,7 @@ from uuid import uuid4
|
||||
from django.utils.encoding import smart_text
|
||||
from django.utils.timezone import now
|
||||
|
||||
from awx.main.utils.common import set_current_apps
|
||||
from awx.main.utils.common import parse_yaml_or_json
|
||||
|
||||
logger = logging.getLogger('awx.main.migrations')
|
||||
@@ -91,43 +92,14 @@ def back_out_new_instance_id(apps, source, new_id):
|
||||
))
|
||||
|
||||
|
||||
def create_scm_script_substitute(apps, source):
|
||||
"""Only applies for cloudforms in practice, but written generally.
|
||||
Given a source type, this will replace all inventory sources of that type
|
||||
with SCM inventory sources that source the script from Ansible core
|
||||
"""
|
||||
# the revision in the Ansible 2.9 stable branch this project will start out as
|
||||
# it can still be updated manually later (but staying within 2.9 branch), if desired
|
||||
ansible_rev = '6f83b9aff42331e15c55a171de0a8b001208c18c'
|
||||
def delete_cloudforms_inv_source(apps, schema_editor):
|
||||
set_current_apps(apps)
|
||||
InventorySource = apps.get_model('main', 'InventorySource')
|
||||
ContentType = apps.get_model('contenttypes', 'ContentType')
|
||||
Project = apps.get_model('main', 'Project')
|
||||
if not InventorySource.objects.filter(source=source).exists():
|
||||
logger.debug('No sources of type {} to migrate'.format(source))
|
||||
return
|
||||
proj_name = 'Replacement project for {} type sources - {}'.format(source, uuid4())
|
||||
right_now = now()
|
||||
project = Project.objects.create(
|
||||
name=proj_name,
|
||||
created=right_now,
|
||||
modified=right_now,
|
||||
description='Created by migration',
|
||||
polymorphic_ctype=ContentType.objects.get(model='project'),
|
||||
# project-specific fields
|
||||
scm_type='git',
|
||||
scm_url='https://github.com/ansible/ansible.git',
|
||||
scm_branch='stable-2.9',
|
||||
scm_revision=ansible_rev
|
||||
)
|
||||
ct = 0
|
||||
for inv_src in InventorySource.objects.filter(source=source).iterator():
|
||||
inv_src.source = 'scm'
|
||||
inv_src.source_project = project
|
||||
inv_src.source_path = 'contrib/inventory/{}.py'.format(source)
|
||||
inv_src.scm_last_revision = ansible_rev
|
||||
inv_src.save(update_fields=['source', 'source_project', 'source_path', 'scm_last_revision'])
|
||||
logger.debug('Changed inventory source {} to scm type'.format(inv_src.pk))
|
||||
ct += 1
|
||||
InventoryUpdate = apps.get_model('main', 'InventoryUpdate')
|
||||
CredentialType = apps.get_model('main', 'CredentialType')
|
||||
InventoryUpdate.objects.filter(inventory_source__source='cloudforms').delete()
|
||||
InventorySource.objects.filter(source='cloudforms').delete()
|
||||
ct = CredentialType.objects.filter(namespace='cloudforms').first()
|
||||
if ct:
|
||||
logger.info('Changed total of {} inventory sources from {} type to scm'.format(ct, source))
|
||||
|
||||
ct.credentials.all().delete()
|
||||
ct.delete()
|
||||
|
||||
@@ -1,9 +1,13 @@
|
||||
import json
|
||||
import re
|
||||
import logging
|
||||
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
from django.utils.encoding import iri_to_uri
|
||||
|
||||
|
||||
FrozenInjectors = dict()
|
||||
logger = logging.getLogger('awx.main.migrations')
|
||||
|
||||
|
||||
class PluginFileInjector(object):
|
||||
@@ -129,6 +133,7 @@ class azure_rm(PluginFileInjector):
|
||||
ret['exclude_host_filters'].append("location not in {}".format(repr(python_regions)))
|
||||
return ret
|
||||
|
||||
|
||||
class ec2(PluginFileInjector):
|
||||
plugin_name = 'aws_ec2'
|
||||
namespace = 'amazon'
|
||||
@@ -586,6 +591,7 @@ class openstack(PluginFileInjector):
|
||||
ret['inventory_hostname'] = use_host_name_for_name(source_vars['use_hostnames'])
|
||||
return ret
|
||||
|
||||
|
||||
class rhv(PluginFileInjector):
|
||||
"""ovirt uses the custom credential templating, and that is all
|
||||
"""
|
||||
|
||||
@@ -881,33 +881,6 @@ ManagedCredentialType(
|
||||
}
|
||||
)
|
||||
|
||||
ManagedCredentialType(
|
||||
namespace='cloudforms',
|
||||
kind='cloud',
|
||||
name=ugettext_noop('Red Hat CloudForms'),
|
||||
managed_by_tower=True,
|
||||
inputs={
|
||||
'fields': [{
|
||||
'id': 'host',
|
||||
'label': ugettext_noop('CloudForms URL'),
|
||||
'type': 'string',
|
||||
'help_text': ugettext_noop('Enter the URL for the virtual machine that '
|
||||
'corresponds to your CloudForms instance. '
|
||||
'For example, https://cloudforms.example.org')
|
||||
}, {
|
||||
'id': 'username',
|
||||
'label': ugettext_noop('Username'),
|
||||
'type': 'string'
|
||||
}, {
|
||||
'id': 'password',
|
||||
'label': ugettext_noop('Password'),
|
||||
'type': 'string',
|
||||
'secret': True,
|
||||
}],
|
||||
'required': ['host', 'username', 'password'],
|
||||
}
|
||||
)
|
||||
|
||||
ManagedCredentialType(
|
||||
namespace='gce',
|
||||
kind='cloud',
|
||||
|
||||
@@ -798,6 +798,10 @@ class Job(UnifiedJob, JobOptions, SurveyJobMixin, JobNotificationMixin, TaskMana
|
||||
if self.project:
|
||||
for name in ('awx', 'tower'):
|
||||
r['{}_project_revision'.format(name)] = self.project.scm_revision
|
||||
r['{}_project_scm_branch'.format(name)] = self.project.scm_branch
|
||||
if self.scm_branch:
|
||||
for name in ('awx', 'tower'):
|
||||
r['{}_job_scm_branch'.format(name)] = self.scm_branch
|
||||
if self.job_template:
|
||||
for name in ('awx', 'tower'):
|
||||
r['{}_job_template_id'.format(name)] = self.job_template.pk
|
||||
|
||||
@@ -52,7 +52,6 @@ class ProjectOptions(models.Model):
|
||||
SCM_TYPE_CHOICES = [
|
||||
('', _('Manual')),
|
||||
('git', _('Git')),
|
||||
('hg', _('Mercurial')),
|
||||
('svn', _('Subversion')),
|
||||
('insights', _('Red Hat Insights')),
|
||||
('archive', _('Remote Archive')),
|
||||
|
||||
@@ -674,7 +674,7 @@ class WorkflowJob(UnifiedJob, WorkflowJobOptions, SurveyJobMixin, JobNotificatio
|
||||
return self.status == 'running'
|
||||
|
||||
|
||||
class WorkflowApprovalTemplate(UnifiedJobTemplate):
|
||||
class WorkflowApprovalTemplate(UnifiedJobTemplate, RelatedJobsMixin):
|
||||
|
||||
FIELDS_TO_PRESERVE_AT_COPY = ['description', 'timeout',]
|
||||
|
||||
@@ -702,6 +702,12 @@ class WorkflowApprovalTemplate(UnifiedJobTemplate):
|
||||
def workflow_job_template(self):
|
||||
return self.workflowjobtemplatenodes.first().workflow_job_template
|
||||
|
||||
'''
|
||||
RelatedJobsMixin
|
||||
'''
|
||||
def _get_related_jobs(self):
|
||||
return UnifiedJob.objects.filter(unified_job_template=self)
|
||||
|
||||
|
||||
class WorkflowApproval(UnifiedJob, JobNotificationMixin):
|
||||
class Meta:
|
||||
|
||||
@@ -57,6 +57,7 @@ class WebhookBackend(AWXBaseEmailBackend, CustomNotificationBase):
|
||||
|
||||
def send_messages(self, messages):
|
||||
sent_messages = 0
|
||||
self.headers['Content-Type'] = 'application/json'
|
||||
if 'User-Agent' not in self.headers:
|
||||
self.headers['User-Agent'] = "Tower {}".format(get_awx_version())
|
||||
if self.http_method.lower() not in ['put','post']:
|
||||
@@ -68,7 +69,7 @@ class WebhookBackend(AWXBaseEmailBackend, CustomNotificationBase):
|
||||
auth = (self.username, self.password)
|
||||
r = chosen_method("{}".format(m.recipients()[0]),
|
||||
auth=auth,
|
||||
json=m.body,
|
||||
data=json.dumps(m.body, ensure_ascii=False).encode('utf-8'),
|
||||
headers=self.headers,
|
||||
verify=(not self.disable_ssl_verification))
|
||||
if r.status_code >= 400:
|
||||
|
||||
@@ -12,6 +12,24 @@ from awx.main.utils.common import parse_yaml_or_json
|
||||
logger = logging.getLogger('awx.main.scheduler')
|
||||
|
||||
|
||||
def deepmerge(a, b):
|
||||
"""
|
||||
Merge dict structures and return the result.
|
||||
|
||||
>>> a = {'first': {'all_rows': {'pass': 'dog', 'number': '1'}}}
|
||||
>>> b = {'first': {'all_rows': {'fail': 'cat', 'number': '5'}}}
|
||||
>>> import pprint; pprint.pprint(deepmerge(a, b))
|
||||
{'first': {'all_rows': {'fail': 'cat', 'number': '5', 'pass': 'dog'}}}
|
||||
"""
|
||||
if isinstance(a, dict) and isinstance(b, dict):
|
||||
return dict([(k, deepmerge(a.get(k), b.get(k)))
|
||||
for k in set(a.keys()).union(b.keys())])
|
||||
elif b is None:
|
||||
return a
|
||||
else:
|
||||
return b
|
||||
|
||||
|
||||
class PodManager(object):
|
||||
|
||||
def __init__(self, task=None):
|
||||
@@ -128,11 +146,13 @@ class PodManager(object):
|
||||
pod_spec = {**default_pod_spec, **pod_spec_override}
|
||||
|
||||
if self.task:
|
||||
pod_spec['metadata']['name'] = self.pod_name
|
||||
pod_spec['metadata']['labels'] = {
|
||||
'ansible-awx': settings.INSTALL_UUID,
|
||||
'ansible-awx-job-id': str(self.task.id)
|
||||
}
|
||||
pod_spec['metadata'] = deepmerge(
|
||||
pod_spec.get('metadata', {}),
|
||||
dict(name=self.pod_name,
|
||||
labels={
|
||||
'ansible-awx': settings.INSTALL_UUID,
|
||||
'ansible-awx-job-id': str(self.task.id)
|
||||
}))
|
||||
pod_spec['spec']['containers'][0]['name'] = self.pod_name
|
||||
|
||||
return pod_spec
|
||||
|
||||
@@ -14,6 +14,7 @@ from django.db import transaction, connection
|
||||
from django.utils.translation import ugettext_lazy as _, gettext_noop
|
||||
from django.utils.timezone import now as tz_now
|
||||
from django.conf import settings
|
||||
from django.db.models import Q
|
||||
|
||||
# AWX
|
||||
from awx.main.dispatch.reaper import reap_job
|
||||
@@ -67,7 +68,7 @@ class TaskManager():
|
||||
'''
|
||||
Init AFTER we know this instance of the task manager will run because the lock is acquired.
|
||||
'''
|
||||
instances = Instance.objects.filter(capacity__gt=0, enabled=True)
|
||||
instances = Instance.objects.filter(~Q(hostname=None), capacity__gt=0, enabled=True)
|
||||
self.real_instances = {i.hostname: i for i in instances}
|
||||
|
||||
instances_partial = [SimpleNamespace(obj=instance,
|
||||
@@ -284,7 +285,7 @@ class TaskManager():
|
||||
for group in InstanceGroup.objects.all():
|
||||
if group.is_containerized or group.controller_id:
|
||||
continue
|
||||
match = group.fit_task_to_most_remaining_capacity_instance(task)
|
||||
match = group.fit_task_to_most_remaining_capacity_instance(task, group.instances.all())
|
||||
if match:
|
||||
break
|
||||
task.instance_group = rampart_group
|
||||
@@ -528,7 +529,8 @@ class TaskManager():
|
||||
logger.debug("Starting {} in group {} instance {} (remaining_capacity={})".format(
|
||||
task.log_format, rampart_group.name, execution_instance.hostname, remaining_capacity))
|
||||
|
||||
execution_instance = self.real_instances[execution_instance.hostname]
|
||||
if execution_instance:
|
||||
execution_instance = self.real_instances[execution_instance.hostname]
|
||||
self.graph[rampart_group.name]['graph'].add_job(task)
|
||||
self.start_task(task, rampart_group, task.get_jobs_fail_chain(), execution_instance)
|
||||
found_acceptable_queue = True
|
||||
|
||||
@@ -23,7 +23,6 @@ import fcntl
|
||||
from pathlib import Path
|
||||
from uuid import uuid4
|
||||
import urllib.parse as urlparse
|
||||
import shlex
|
||||
|
||||
# Django
|
||||
from django.conf import settings
|
||||
@@ -64,7 +63,7 @@ from awx.main.models import (
|
||||
build_safe_env, enforce_bigint_pk_migration
|
||||
)
|
||||
from awx.main.constants import ACTIVE_STATES
|
||||
from awx.main.exceptions import AwxTaskError
|
||||
from awx.main.exceptions import AwxTaskError, PostRunError
|
||||
from awx.main.queue import CallbackQueueDispatcher
|
||||
from awx.main.isolated import manager as isolated_manager
|
||||
from awx.main.dispatch.publish import task
|
||||
@@ -79,6 +78,7 @@ from awx.main.utils.external_logging import reconfigure_rsyslog
|
||||
from awx.main.utils.safe_yaml import safe_dump, sanitize_jinja
|
||||
from awx.main.utils.reload import stop_local_services
|
||||
from awx.main.utils.pglock import advisory_lock
|
||||
from awx.main.utils.handlers import SpecialInventoryHandler
|
||||
from awx.main.consumers import emit_channel_notification
|
||||
from awx.main import analytics
|
||||
from awx.conf import settings_registry
|
||||
@@ -313,7 +313,7 @@ def delete_project_files(project_path):
|
||||
|
||||
@task(queue='tower_broadcast_all')
|
||||
def profile_sql(threshold=1, minutes=1):
|
||||
if threshold == 0:
|
||||
if threshold <= 0:
|
||||
cache.delete('awx-profile-sql-threshold')
|
||||
logger.error('SQL PROFILING DISABLED')
|
||||
else:
|
||||
@@ -1225,6 +1225,13 @@ class BaseTask(object):
|
||||
Ansible runner puts a parent_uuid on each event, no matter what the type.
|
||||
AWX only saves the parent_uuid if the event is for a Job.
|
||||
'''
|
||||
# cache end_line locally for RunInventoryUpdate tasks
|
||||
# which generate job events from two 'streams':
|
||||
# ansible-inventory and the awx.main.commands.inventory_import
|
||||
# logger
|
||||
if isinstance(self, RunInventoryUpdate):
|
||||
self.end_line = event_data['end_line']
|
||||
|
||||
if event_data.get(self.event_data_key, None):
|
||||
if self.event_data_key != 'job_id':
|
||||
event_data.pop('parent_uuid', None)
|
||||
@@ -1253,7 +1260,7 @@ class BaseTask(object):
|
||||
# so it *should* have a negligible performance impact
|
||||
task = event_data.get('event_data', {}).get('task_action')
|
||||
try:
|
||||
if task in ('git', 'hg', 'svn'):
|
||||
if task in ('git', 'svn'):
|
||||
event_data_json = json.dumps(event_data)
|
||||
event_data_json = UriCleaner.remove_sensitive(event_data_json)
|
||||
event_data = json.loads(event_data_json)
|
||||
@@ -1521,6 +1528,12 @@ class BaseTask(object):
|
||||
|
||||
try:
|
||||
self.post_run_hook(self.instance, status)
|
||||
except PostRunError as exc:
|
||||
if status == 'successful':
|
||||
status = exc.status
|
||||
extra_update_fields['job_explanation'] = exc.args[0]
|
||||
if exc.tb:
|
||||
extra_update_fields['result_traceback'] = exc.tb
|
||||
except Exception:
|
||||
logger.exception('{} Post run hook errored.'.format(self.instance.log_format))
|
||||
|
||||
@@ -2141,7 +2154,7 @@ class RunProjectUpdate(BaseTask):
|
||||
elif not scm_branch:
|
||||
raise RuntimeError('Could not determine a revision to run from project.')
|
||||
elif not scm_branch:
|
||||
scm_branch = {'hg': 'tip'}.get(project_update.scm_type, 'HEAD')
|
||||
scm_branch = 'HEAD'
|
||||
|
||||
galaxy_creds_are_defined = (
|
||||
project_update.project.organization and
|
||||
@@ -2150,7 +2163,7 @@ class RunProjectUpdate(BaseTask):
|
||||
if not galaxy_creds_are_defined and (
|
||||
settings.AWX_ROLES_ENABLED or settings.AWX_COLLECTIONS_ENABLED
|
||||
):
|
||||
logger.debug(
|
||||
logger.warning(
|
||||
'Galaxy role/collection syncing is enabled, but no '
|
||||
f'credentials are configured for {project_update.project.organization}.'
|
||||
)
|
||||
@@ -2160,7 +2173,7 @@ class RunProjectUpdate(BaseTask):
|
||||
'local_path': os.path.basename(project_update.project.local_path),
|
||||
'project_path': project_update.get_project_path(check_if_exists=False), # deprecated
|
||||
'insights_url': settings.INSIGHTS_URL_BASE,
|
||||
'awx_license_type': get_license(show_key=False).get('license_type', 'UNLICENSED'),
|
||||
'awx_license_type': get_license().get('license_type', 'UNLICENSED'),
|
||||
'awx_version': get_awx_version(),
|
||||
'scm_url': scm_url,
|
||||
'scm_branch': scm_branch,
|
||||
@@ -2417,9 +2430,10 @@ class RunProjectUpdate(BaseTask):
|
||||
shutil.rmtree(stage_path) # cannot trust content update produced
|
||||
|
||||
if self.job_private_data_dir:
|
||||
# copy project folder before resetting to default branch
|
||||
# because some git-tree-specific resources (like submodules) might matter
|
||||
self.make_local_copy(instance, self.job_private_data_dir)
|
||||
if status == 'successful':
|
||||
# copy project folder before resetting to default branch
|
||||
# because some git-tree-specific resources (like submodules) might matter
|
||||
self.make_local_copy(instance, self.job_private_data_dir)
|
||||
if self.original_branch:
|
||||
# for git project syncs, non-default branches can be problems
|
||||
# restore to branch the repo was on before this run
|
||||
@@ -2461,6 +2475,14 @@ class RunInventoryUpdate(BaseTask):
|
||||
event_model = InventoryUpdateEvent
|
||||
event_data_key = 'inventory_update_id'
|
||||
|
||||
# TODO: remove once inv updates run in containers
|
||||
def should_use_proot(self, inventory_update):
|
||||
'''
|
||||
Return whether this task should use proot.
|
||||
'''
|
||||
return getattr(settings, 'AWX_PROOT_ENABLED', False)
|
||||
|
||||
# TODO: remove once inv updates run in containers
|
||||
@property
|
||||
def proot_show_paths(self):
|
||||
return [settings.AWX_ANSIBLE_COLLECTIONS_PATHS]
|
||||
@@ -2485,15 +2507,11 @@ class RunInventoryUpdate(BaseTask):
|
||||
return injector.build_private_data(inventory_update, private_data_dir)
|
||||
|
||||
def build_env(self, inventory_update, private_data_dir, isolated, private_data_files=None):
|
||||
"""Build environment dictionary for inventory import.
|
||||
"""Build environment dictionary for ansible-inventory.
|
||||
|
||||
This used to be the mechanism by which any data that needs to be passed
|
||||
to the inventory update script is set up. In particular, this is how
|
||||
inventory update is aware of its proper credentials.
|
||||
|
||||
Most environment injection is now accomplished by the credential
|
||||
injectors. The primary purpose this still serves is to
|
||||
still point to the inventory update INI or config file.
|
||||
Most environment variables related to credentials or configuration
|
||||
are accomplished by the inventory source injectors (in this method)
|
||||
or custom credential type injectors (in main run method).
|
||||
"""
|
||||
env = super(RunInventoryUpdate, self).build_env(inventory_update,
|
||||
private_data_dir,
|
||||
@@ -2501,8 +2519,11 @@ class RunInventoryUpdate(BaseTask):
|
||||
private_data_files=private_data_files)
|
||||
if private_data_files is None:
|
||||
private_data_files = {}
|
||||
self.add_awx_venv(env)
|
||||
# Pass inventory source ID to inventory script.
|
||||
# TODO: remove once containers replace custom venvs
|
||||
self.add_ansible_venv(inventory_update.ansible_virtualenv_path, env, isolated=isolated)
|
||||
|
||||
# Legacy environment variables, were used as signal to awx-manage command
|
||||
# now they are provided in case some scripts may be relying on them
|
||||
env['INVENTORY_SOURCE_ID'] = str(inventory_update.inventory_source_id)
|
||||
env['INVENTORY_UPDATE_ID'] = str(inventory_update.pk)
|
||||
env.update(STANDARD_INVENTORY_UPDATE_ENV)
|
||||
@@ -2565,47 +2586,25 @@ class RunInventoryUpdate(BaseTask):
|
||||
if inventory is None:
|
||||
raise RuntimeError('Inventory Source is not associated with an Inventory.')
|
||||
|
||||
# Piece together the initial command to run via. the shell.
|
||||
args = ['awx-manage', 'inventory_import']
|
||||
args.extend(['--inventory-id', str(inventory.pk)])
|
||||
args = ['ansible-inventory', '--list', '--export']
|
||||
|
||||
# Add appropriate arguments for overwrite if the inventory_update
|
||||
# object calls for it.
|
||||
if inventory_update.overwrite:
|
||||
args.append('--overwrite')
|
||||
if inventory_update.overwrite_vars:
|
||||
args.append('--overwrite-vars')
|
||||
# Add arguments for the source inventory file/script/thing
|
||||
source_location = self.pseudo_build_inventory(inventory_update, private_data_dir)
|
||||
args.append('-i')
|
||||
args.append(source_location)
|
||||
|
||||
# Declare the virtualenv the management command should activate
|
||||
# as it calls ansible-inventory
|
||||
args.extend(['--venv', inventory_update.ansible_virtualenv_path])
|
||||
args.append('--output')
|
||||
args.append(os.path.join(private_data_dir, 'artifacts', 'output.json'))
|
||||
|
||||
src = inventory_update.source
|
||||
if inventory_update.enabled_var:
|
||||
args.extend(['--enabled-var', shlex.quote(inventory_update.enabled_var)])
|
||||
args.extend(['--enabled-value', shlex.quote(inventory_update.enabled_value)])
|
||||
if os.path.isdir(source_location):
|
||||
playbook_dir = source_location
|
||||
else:
|
||||
if getattr(settings, '%s_ENABLED_VAR' % src.upper(), False):
|
||||
args.extend(['--enabled-var',
|
||||
getattr(settings, '%s_ENABLED_VAR' % src.upper())])
|
||||
if getattr(settings, '%s_ENABLED_VALUE' % src.upper(), False):
|
||||
args.extend(['--enabled-value',
|
||||
getattr(settings, '%s_ENABLED_VALUE' % src.upper())])
|
||||
if inventory_update.host_filter:
|
||||
args.extend(['--host-filter', shlex.quote(inventory_update.host_filter)])
|
||||
if getattr(settings, '%s_EXCLUDE_EMPTY_GROUPS' % src.upper()):
|
||||
args.append('--exclude-empty-groups')
|
||||
if getattr(settings, '%s_INSTANCE_ID_VAR' % src.upper(), False):
|
||||
args.extend(['--instance-id-var',
|
||||
"'{}'".format(getattr(settings, '%s_INSTANCE_ID_VAR' % src.upper())),])
|
||||
# Add arguments for the source inventory script
|
||||
args.append('--source')
|
||||
args.append(self.pseudo_build_inventory(inventory_update, private_data_dir))
|
||||
if src == 'custom':
|
||||
args.append("--custom")
|
||||
args.append('-v%d' % inventory_update.verbosity)
|
||||
if settings.DEBUG:
|
||||
args.append('--traceback')
|
||||
playbook_dir = os.path.dirname(source_location)
|
||||
args.extend(['--playbook-dir', playbook_dir])
|
||||
|
||||
if inventory_update.verbosity:
|
||||
args.append('-' + 'v' * min(5, inventory_update.verbosity * 2 + 1))
|
||||
|
||||
return args
|
||||
|
||||
def build_inventory(self, inventory_update, private_data_dir):
|
||||
@@ -2645,11 +2644,9 @@ class RunInventoryUpdate(BaseTask):
|
||||
|
||||
def build_cwd(self, inventory_update, private_data_dir):
|
||||
'''
|
||||
There are two cases where the inventory "source" is in a different
|
||||
There is one case where the inventory "source" is in a different
|
||||
location from the private data:
|
||||
- deprecated vendored inventory scripts in awx/plugins/inventory
|
||||
- SCM, where source needs to live in the project folder
|
||||
in these cases, the inventory does not exist in the standard tempdir
|
||||
'''
|
||||
src = inventory_update.source
|
||||
if src == 'scm' and inventory_update.source_project_update:
|
||||
@@ -2707,6 +2704,75 @@ class RunInventoryUpdate(BaseTask):
|
||||
# This follows update, not sync, so make copy here
|
||||
RunProjectUpdate.make_local_copy(source_project, private_data_dir)
|
||||
|
||||
def post_run_hook(self, inventory_update, status):
|
||||
if status != 'successful':
|
||||
return # nothing to save, step out of the way to allow error reporting
|
||||
|
||||
private_data_dir = inventory_update.job_env['AWX_PRIVATE_DATA_DIR']
|
||||
expected_output = os.path.join(private_data_dir, 'artifacts', 'output.json')
|
||||
with open(expected_output) as f:
|
||||
data = json.load(f)
|
||||
|
||||
# build inventory save options
|
||||
options = dict(
|
||||
overwrite=inventory_update.overwrite,
|
||||
overwrite_vars=inventory_update.overwrite_vars,
|
||||
)
|
||||
src = inventory_update.source
|
||||
|
||||
if inventory_update.enabled_var:
|
||||
options['enabled_var'] = inventory_update.enabled_var
|
||||
options['enabled_value'] = inventory_update.enabled_value
|
||||
else:
|
||||
if getattr(settings, '%s_ENABLED_VAR' % src.upper(), False):
|
||||
options['enabled_var'] = getattr(settings, '%s_ENABLED_VAR' % src.upper())
|
||||
if getattr(settings, '%s_ENABLED_VALUE' % src.upper(), False):
|
||||
options['enabled_value'] = getattr(settings, '%s_ENABLED_VALUE' % src.upper())
|
||||
|
||||
if inventory_update.host_filter:
|
||||
options['host_filter'] = inventory_update.host_filter
|
||||
|
||||
if getattr(settings, '%s_EXCLUDE_EMPTY_GROUPS' % src.upper()):
|
||||
options['exclude_empty_groups'] = True
|
||||
if getattr(settings, '%s_INSTANCE_ID_VAR' % src.upper(), False):
|
||||
options['instance_id_var'] = getattr(settings, '%s_INSTANCE_ID_VAR' % src.upper())
|
||||
|
||||
# Verbosity is applied to saving process, as well as ansible-inventory CLI option
|
||||
if inventory_update.verbosity:
|
||||
options['verbosity'] = inventory_update.verbosity
|
||||
|
||||
handler = SpecialInventoryHandler(
|
||||
self.event_handler, self.cancel_callback,
|
||||
verbosity=inventory_update.verbosity,
|
||||
job_timeout=self.get_instance_timeout(self.instance),
|
||||
start_time=inventory_update.started,
|
||||
counter=self.event_ct, initial_line=self.end_line
|
||||
)
|
||||
inv_logger = logging.getLogger('awx.main.commands.inventory_import')
|
||||
formatter = inv_logger.handlers[0].formatter
|
||||
formatter.job_start = inventory_update.started
|
||||
handler.formatter = formatter
|
||||
inv_logger.handlers[0] = handler
|
||||
|
||||
from awx.main.management.commands.inventory_import import Command as InventoryImportCommand
|
||||
cmd = InventoryImportCommand()
|
||||
try:
|
||||
# save the inventory data to database.
|
||||
# canceling exceptions will be handled in the global post_run_hook
|
||||
cmd.perform_update(options, data, inventory_update)
|
||||
except PermissionDenied as exc:
|
||||
logger.exception('License error saving {} content'.format(inventory_update.log_format))
|
||||
raise PostRunError(str(exc), status='error')
|
||||
except PostRunError:
|
||||
logger.exception('Error saving {} content, rolling back changes'.format(inventory_update.log_format))
|
||||
raise
|
||||
except Exception:
|
||||
logger.exception('Exception saving {} content, rolling back changes.'.format(
|
||||
inventory_update.log_format))
|
||||
raise PostRunError(
|
||||
'Error occured while saving inventory data, see traceback or server logs',
|
||||
status='error', tb=traceback.format_exc())
|
||||
|
||||
|
||||
@task(queue=get_local_queuename)
|
||||
class RunAdHocCommand(BaseTask):
|
||||
|
||||
@@ -1,34 +0,0 @@
|
||||
import pytest
|
||||
import random
|
||||
|
||||
from awx.main.models import Project
|
||||
from awx.main.analytics import collectors
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_empty():
|
||||
assert collectors.projects_by_scm_type(None) == {
|
||||
'manual': 0,
|
||||
'git': 0,
|
||||
'svn': 0,
|
||||
'hg': 0,
|
||||
'insights': 0,
|
||||
'archive': 0,
|
||||
}
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@pytest.mark.parametrize('scm_type', [t[0] for t in Project.SCM_TYPE_CHOICES])
|
||||
def test_multiple(scm_type):
|
||||
expected = {
|
||||
'manual': 0,
|
||||
'git': 0,
|
||||
'svn': 0,
|
||||
'hg': 0,
|
||||
'insights': 0,
|
||||
'archive': 0,
|
||||
}
|
||||
for i in range(random.randint(0, 10)):
|
||||
Project(scm_type=scm_type).save()
|
||||
expected[scm_type or 'manual'] += 1
|
||||
assert collectors.projects_by_scm_type(None) == expected
|
||||
@@ -675,33 +675,6 @@ def test_net_create_ok(post, organization, admin):
|
||||
assert cred.inputs['authorize'] is True
|
||||
|
||||
|
||||
#
|
||||
# Cloudforms Credentials
|
||||
#
|
||||
@pytest.mark.django_db
|
||||
def test_cloudforms_create_ok(post, organization, admin):
|
||||
params = {
|
||||
'credential_type': 1,
|
||||
'name': 'Best credential ever',
|
||||
'inputs': {
|
||||
'host': 'some_host',
|
||||
'username': 'some_username',
|
||||
'password': 'some_password',
|
||||
}
|
||||
}
|
||||
cloudforms = CredentialType.defaults['cloudforms']()
|
||||
cloudforms.save()
|
||||
params['organization'] = organization.id
|
||||
response = post(reverse('api:credential_list'), params, admin)
|
||||
assert response.status_code == 201
|
||||
|
||||
assert Credential.objects.count() == 1
|
||||
cred = Credential.objects.all()[:1].get()
|
||||
assert cred.inputs['host'] == 'some_host'
|
||||
assert cred.inputs['username'] == 'some_username'
|
||||
assert decrypt_field(cred, 'password') == 'some_password'
|
||||
|
||||
|
||||
#
|
||||
# GCE Credentials
|
||||
#
|
||||
|
||||
@@ -99,3 +99,12 @@ def test_changing_overwrite_behavior_okay_if_not_used(post, patch, organization,
|
||||
expect=200
|
||||
)
|
||||
assert Project.objects.get(pk=r1.data['id']).allow_override is False
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_scm_project_local_path_invalid(get, patch, project, admin):
|
||||
url = reverse('api:project_detail', kwargs={'pk': project.id})
|
||||
resp = patch(url, {'local_path': '/foo/bar'}, user=admin, expect=400)
|
||||
assert resp.data['local_path'] == [
|
||||
'Cannot change local_path for git-based projects'
|
||||
]
|
||||
|
||||
@@ -282,10 +282,6 @@ def test_prefetch_ujt_project_capabilities(alice, project, job_template, mocker)
|
||||
list_serializer.child.to_representation(project)
|
||||
assert 'capability_map' not in list_serializer.child.context
|
||||
|
||||
# Models for which the prefetch is valid for do
|
||||
list_serializer.child.to_representation(job_template)
|
||||
assert set(list_serializer.child.context['capability_map'][job_template.id].keys()) == set(('copy', 'edit', 'start'))
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_prefetch_group_capabilities(group, rando):
|
||||
|
||||
@@ -349,7 +349,7 @@ def test_months_with_31_days(post, admin_user):
|
||||
('MINUTELY', 1, 60),
|
||||
('MINUTELY', 15, 15 * 60),
|
||||
('HOURLY', 1, 3600),
|
||||
('HOURLY', 4, 3600 * 4),
|
||||
('HOURLY', 2, 3600 * 2),
|
||||
))
|
||||
def test_really_old_dtstart(post, admin_user, freq, delta, total_seconds):
|
||||
url = reverse('api:schedule_rrule')
|
||||
|
||||
File diff suppressed because one or more lines are too long
@@ -9,6 +9,9 @@ import os
|
||||
# Django
|
||||
from django.core.management.base import CommandError
|
||||
|
||||
# for license errors
|
||||
from rest_framework.exceptions import PermissionDenied
|
||||
|
||||
# AWX
|
||||
from awx.main.management.commands import inventory_import
|
||||
from awx.main.models import Inventory, Host, Group, InventorySource
|
||||
@@ -83,7 +86,7 @@ class MockLoader:
|
||||
return self._data
|
||||
|
||||
|
||||
def mock_logging(self):
|
||||
def mock_logging(self, level):
|
||||
pass
|
||||
|
||||
|
||||
@@ -322,6 +325,6 @@ def test_tower_version_compare():
|
||||
"version": "2.0.1-1068-g09684e2c41"
|
||||
}
|
||||
}
|
||||
with pytest.raises(CommandError):
|
||||
with pytest.raises(PermissionDenied):
|
||||
cmd.remote_tower_license_compare('very_supported')
|
||||
cmd.remote_tower_license_compare('open')
|
||||
|
||||
@@ -1,13 +0,0 @@
|
||||
# Copyright (c) 2015 Ansible, Inc.
|
||||
# All Rights Reserved.
|
||||
|
||||
from awx.main.utils.common import StubLicense
|
||||
|
||||
|
||||
def test_stub_license():
|
||||
license_actual = StubLicense().validate()
|
||||
assert license_actual['license_key'] == 'OPEN'
|
||||
assert license_actual['valid_key']
|
||||
assert license_actual['compliant']
|
||||
assert license_actual['license_type'] == 'open'
|
||||
|
||||
@@ -79,7 +79,6 @@ def test_default_cred_types():
|
||||
'aws',
|
||||
'azure_kv',
|
||||
'azure_rm',
|
||||
'cloudforms',
|
||||
'conjur',
|
||||
'galaxy_api_token',
|
||||
'gce',
|
||||
|
||||
@@ -214,6 +214,9 @@ def test_inventory_update_injected_content(this_kind, inventory, fake_credential
|
||||
f"'{inventory_filename}' file not found in inventory update runtime files {content.keys()}"
|
||||
|
||||
env.pop('ANSIBLE_COLLECTIONS_PATHS', None) # collection paths not relevant to this test
|
||||
env.pop('PYTHONPATH')
|
||||
env.pop('VIRTUAL_ENV')
|
||||
env.pop('PROOT_TMP_DIR')
|
||||
base_dir = os.path.join(DATA, 'plugins')
|
||||
if not os.path.exists(base_dir):
|
||||
os.mkdir(base_dir)
|
||||
|
||||
@@ -5,7 +5,7 @@ from awx.main.migrations import _inventory_source as invsrc
|
||||
|
||||
from django.apps import apps
|
||||
|
||||
from awx.main.models import InventorySource
|
||||
from awx.main.models import InventorySource, InventoryUpdate, ManagedCredentialType, CredentialType, Credential
|
||||
|
||||
|
||||
@pytest.mark.parametrize('vars,id_var,result', [
|
||||
@@ -42,16 +42,40 @@ def test_apply_new_instance_id(inventory_source):
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_replacement_scm_sources(inventory):
|
||||
inv_source = InventorySource.objects.create(
|
||||
name='test',
|
||||
inventory=inventory,
|
||||
organization=inventory.organization,
|
||||
source='ec2'
|
||||
def test_cloudforms_inventory_removal(inventory):
|
||||
ManagedCredentialType(
|
||||
name='Red Hat CloudForms',
|
||||
namespace='cloudforms',
|
||||
kind='cloud',
|
||||
managed_by_tower=True,
|
||||
inputs={},
|
||||
)
|
||||
invsrc.create_scm_script_substitute(apps, 'ec2')
|
||||
inv_source.refresh_from_db()
|
||||
assert inv_source.source == 'scm'
|
||||
assert inv_source.source_project
|
||||
project = inv_source.source_project
|
||||
assert 'Replacement project for' in project.name
|
||||
CredentialType.defaults['cloudforms']().save()
|
||||
cloudforms = CredentialType.objects.get(namespace='cloudforms')
|
||||
Credential.objects.create(
|
||||
name='test',
|
||||
credential_type=cloudforms,
|
||||
)
|
||||
|
||||
for source in ('ec2', 'cloudforms'):
|
||||
i = InventorySource.objects.create(
|
||||
name='test',
|
||||
inventory=inventory,
|
||||
organization=inventory.organization,
|
||||
source=source,
|
||||
)
|
||||
InventoryUpdate.objects.create(
|
||||
name='test update',
|
||||
inventory_source=i,
|
||||
source=source,
|
||||
)
|
||||
assert Credential.objects.count() == 1
|
||||
assert InventorySource.objects.count() == 2 # ec2 + cf
|
||||
assert InventoryUpdate.objects.count() == 2 # ec2 + cf
|
||||
invsrc.delete_cloudforms_inv_source(apps, None)
|
||||
assert InventorySource.objects.count() == 1 # ec2
|
||||
assert InventoryUpdate.objects.count() == 1 # ec2
|
||||
assert InventorySource.objects.first().source == 'ec2'
|
||||
assert InventoryUpdate.objects.first().source == 'ec2'
|
||||
assert Credential.objects.count() == 0
|
||||
assert CredentialType.objects.filter(namespace='cloudforms').exists() is False
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
|
||||
import glob
|
||||
import json
|
||||
import os
|
||||
|
||||
from django.conf import settings
|
||||
@@ -30,8 +29,7 @@ def test_python_and_js_licenses():
|
||||
# Check variations of '-' and '_' in filenames due to python
|
||||
for fname in [name, name.replace('-','_')]:
|
||||
if entry.startswith(fname) and entry.endswith('.tar.gz'):
|
||||
entry = entry[:-7]
|
||||
(n, v) = entry.rsplit('-',1)
|
||||
v = entry.split(name + '-')[1].split('.tar.gz')[0]
|
||||
return v
|
||||
return None
|
||||
|
||||
@@ -66,28 +64,6 @@ def test_python_and_js_licenses():
|
||||
ret[name] = { 'name': name, 'version': version}
|
||||
return ret
|
||||
|
||||
|
||||
def read_ui_requirements(path):
|
||||
def json_deps(jsondata):
|
||||
ret = {}
|
||||
deps = jsondata.get('dependencies',{})
|
||||
for key in deps.keys():
|
||||
key = key.lower()
|
||||
devonly = deps[key].get('dev',False)
|
||||
if not devonly:
|
||||
if key not in ret.keys():
|
||||
depname = key.replace('/','-')
|
||||
ret[depname] = {
|
||||
'name': depname,
|
||||
'version': deps[key]['version']
|
||||
}
|
||||
ret.update(json_deps(deps[key]))
|
||||
return ret
|
||||
|
||||
with open('%s/package-lock.json' % path) as f:
|
||||
jsondata = json.load(f)
|
||||
return json_deps(jsondata)
|
||||
|
||||
def remediate_licenses_and_requirements(licenses, requirements):
|
||||
errors = []
|
||||
items = list(licenses.keys())
|
||||
@@ -114,12 +90,9 @@ def test_python_and_js_licenses():
|
||||
|
||||
base_dir = settings.BASE_DIR
|
||||
api_licenses = index_licenses('%s/../docs/licenses' % base_dir)
|
||||
ui_licenses = index_licenses('%s/../docs/licenses/ui' % base_dir)
|
||||
api_requirements = read_api_requirements('%s/../requirements' % base_dir)
|
||||
ui_requirements = read_ui_requirements('%s/ui' % base_dir)
|
||||
|
||||
errors = []
|
||||
errors += remediate_licenses_and_requirements(ui_licenses, ui_requirements)
|
||||
errors += remediate_licenses_and_requirements(api_licenses, api_requirements)
|
||||
if errors:
|
||||
raise Exception('Included licenses not consistent with requirements:\n%s' %
|
||||
|
||||
@@ -33,32 +33,6 @@ class TestInvalidOptions:
|
||||
assert 'inventory-id' in str(err.value)
|
||||
assert 'exclusive' in str(err.value)
|
||||
|
||||
def test_invalid_options_id_and_keep_vars(self):
|
||||
# You can't overwrite and keep_vars at the same time, that wouldn't make sense
|
||||
cmd = Command()
|
||||
with pytest.raises(CommandError) as err:
|
||||
cmd.handle(
|
||||
inventory_id=42, overwrite=True, keep_vars=True
|
||||
)
|
||||
assert 'overwrite-vars' in str(err.value)
|
||||
assert 'exclusive' in str(err.value)
|
||||
|
||||
def test_invalid_options_id_but_no_source(self):
|
||||
# Need a source to import
|
||||
cmd = Command()
|
||||
with pytest.raises(CommandError) as err:
|
||||
cmd.handle(
|
||||
inventory_id=42, overwrite=True, keep_vars=True
|
||||
)
|
||||
assert 'overwrite-vars' in str(err.value)
|
||||
assert 'exclusive' in str(err.value)
|
||||
with pytest.raises(CommandError) as err:
|
||||
cmd.handle(
|
||||
inventory_id=42, overwrite_vars=True, keep_vars=True
|
||||
)
|
||||
assert 'overwrite-vars' in str(err.value)
|
||||
assert 'exclusive' in str(err.value)
|
||||
|
||||
def test_invalid_options_missing_source(self):
|
||||
cmd = Command()
|
||||
with pytest.raises(CommandError) as err:
|
||||
|
||||
@@ -39,6 +39,8 @@ from awx.main import tasks
|
||||
from awx.main.utils import encrypt_field, encrypt_value
|
||||
from awx.main.utils.safe_yaml import SafeLoader
|
||||
|
||||
from awx.main.utils.licensing import Licenser
|
||||
|
||||
|
||||
class TestJobExecution(object):
|
||||
EXAMPLE_PRIVATE_KEY = '-----BEGIN PRIVATE KEY-----\nxyz==\n-----END PRIVATE KEY-----'
|
||||
@@ -1830,7 +1832,10 @@ class TestProjectUpdateGalaxyCredentials(TestJobExecution):
|
||||
|
||||
task = RunProjectUpdate()
|
||||
env = task.build_env(project_update, private_data_dir)
|
||||
task.build_extra_vars_file(project_update, private_data_dir)
|
||||
|
||||
with mock.patch.object(Licenser, 'validate', lambda *args, **kw: {}):
|
||||
task.build_extra_vars_file(project_update, private_data_dir)
|
||||
|
||||
assert task.__vars__['roles_enabled'] is False
|
||||
assert task.__vars__['collections_enabled'] is False
|
||||
for k in env:
|
||||
@@ -1850,7 +1855,10 @@ class TestProjectUpdateGalaxyCredentials(TestJobExecution):
|
||||
project_update.project.organization.galaxy_credentials.add(public_galaxy)
|
||||
task = RunProjectUpdate()
|
||||
env = task.build_env(project_update, private_data_dir)
|
||||
task.build_extra_vars_file(project_update, private_data_dir)
|
||||
|
||||
with mock.patch.object(Licenser, 'validate', lambda *args, **kw: {}):
|
||||
task.build_extra_vars_file(project_update, private_data_dir)
|
||||
|
||||
assert task.__vars__['roles_enabled'] is True
|
||||
assert task.__vars__['collections_enabled'] is True
|
||||
assert sorted([
|
||||
@@ -1901,19 +1909,16 @@ class TestProjectUpdateCredentials(TestJobExecution):
|
||||
parametrize = {
|
||||
'test_username_and_password_auth': [
|
||||
dict(scm_type='git'),
|
||||
dict(scm_type='hg'),
|
||||
dict(scm_type='svn'),
|
||||
dict(scm_type='archive'),
|
||||
],
|
||||
'test_ssh_key_auth': [
|
||||
dict(scm_type='git'),
|
||||
dict(scm_type='hg'),
|
||||
dict(scm_type='svn'),
|
||||
dict(scm_type='archive'),
|
||||
],
|
||||
'test_awx_task_env': [
|
||||
dict(scm_type='git'),
|
||||
dict(scm_type='hg'),
|
||||
dict(scm_type='svn'),
|
||||
dict(scm_type='archive'),
|
||||
]
|
||||
@@ -1935,7 +1940,9 @@ class TestProjectUpdateCredentials(TestJobExecution):
|
||||
assert settings.PROJECTS_ROOT in process_isolation['process_isolation_show_paths']
|
||||
|
||||
task._write_extra_vars_file = mock.Mock()
|
||||
task.build_extra_vars_file(project_update, private_data_dir)
|
||||
|
||||
with mock.patch.object(Licenser, 'validate', lambda *args, **kw: {}):
|
||||
task.build_extra_vars_file(project_update, private_data_dir)
|
||||
|
||||
call_args, _ = task._write_extra_vars_file.call_args_list[0]
|
||||
_, extra_vars = call_args
|
||||
@@ -2051,8 +2058,8 @@ class TestInventoryUpdateCredentials(TestJobExecution):
|
||||
credential, env, {}, [], private_data_dir
|
||||
)
|
||||
|
||||
assert '--custom' in ' '.join(args)
|
||||
script = args[args.index('--source') + 1]
|
||||
assert '-i' in ' '.join(args)
|
||||
script = args[args.index('-i') + 1]
|
||||
with open(script, 'r') as f:
|
||||
assert f.read() == inventory_update.source_script.script
|
||||
assert env['FOO'] == 'BAR'
|
||||
@@ -2140,10 +2147,6 @@ class TestInventoryUpdateCredentials(TestJobExecution):
|
||||
return cred
|
||||
inventory_update.get_cloud_credential = get_cred
|
||||
inventory_update.get_extra_credentials = mocker.Mock(return_value=[])
|
||||
inventory_update.source_vars = {
|
||||
'include_powerstate': 'yes',
|
||||
'group_by_resource_group': 'no'
|
||||
}
|
||||
|
||||
private_data_files = task.build_private_data_files(inventory_update, private_data_dir)
|
||||
env = task.build_env(inventory_update, private_data_dir, False, private_data_files)
|
||||
@@ -2177,11 +2180,6 @@ class TestInventoryUpdateCredentials(TestJobExecution):
|
||||
return cred
|
||||
inventory_update.get_cloud_credential = get_cred
|
||||
inventory_update.get_extra_credentials = mocker.Mock(return_value=[])
|
||||
inventory_update.source_vars = {
|
||||
'include_powerstate': 'yes',
|
||||
'group_by_resource_group': 'no',
|
||||
'group_by_security_group': 'no'
|
||||
}
|
||||
|
||||
private_data_files = task.build_private_data_files(inventory_update, private_data_dir)
|
||||
env = task.build_env(inventory_update, private_data_dir, False, private_data_files)
|
||||
@@ -2296,21 +2294,14 @@ class TestInventoryUpdateCredentials(TestJobExecution):
|
||||
inventory_update.get_cloud_credential = get_cred
|
||||
inventory_update.get_extra_credentials = mocker.Mock(return_value=[])
|
||||
|
||||
inventory_update.source_vars = {
|
||||
'satellite6_group_patterns': '[a,b,c]',
|
||||
'satellite6_group_prefix': 'hey_',
|
||||
'satellite6_want_hostcollections': True,
|
||||
'satellite6_want_ansible_ssh_host': True,
|
||||
'satellite6_rich_params': True,
|
||||
'satellite6_want_facts': False
|
||||
}
|
||||
|
||||
private_data_files = task.build_private_data_files(inventory_update, private_data_dir)
|
||||
env = task.build_env(inventory_update, private_data_dir, False, private_data_files)
|
||||
safe_env = build_safe_env(env)
|
||||
|
||||
env["FOREMAN_SERVER"] == "https://example.org",
|
||||
env["FOREMAN_USER"] == "bob",
|
||||
env["FOREMAN_PASSWORD"] == "secret",
|
||||
assert env["FOREMAN_SERVER"] == "https://example.org"
|
||||
assert env["FOREMAN_USER"] == "bob"
|
||||
assert env["FOREMAN_PASSWORD"] == "secret"
|
||||
assert safe_env["FOREMAN_PASSWORD"] == tasks.HIDDEN_PASSWORD
|
||||
|
||||
@pytest.mark.parametrize('verify', [True, False])
|
||||
def test_tower_source(self, verify, inventory_update, private_data_dir, mocker):
|
||||
|
||||
@@ -55,8 +55,7 @@ __all__ = [
|
||||
'model_instance_diff', 'parse_yaml_or_json', 'RequireDebugTrueOrTest',
|
||||
'has_model_field_prefetched', 'set_environ', 'IllegalArgumentError',
|
||||
'get_custom_venv_choices', 'get_external_account', 'task_manager_bulk_reschedule',
|
||||
'schedule_task_manager', 'classproperty', 'create_temporary_fifo', 'truncate_stdout',
|
||||
'StubLicense'
|
||||
'schedule_task_manager', 'classproperty', 'create_temporary_fifo', 'truncate_stdout'
|
||||
]
|
||||
|
||||
|
||||
@@ -190,7 +189,7 @@ def get_awx_version():
|
||||
|
||||
|
||||
def get_awx_http_client_headers():
|
||||
license = get_license(show_key=False).get('license_type', 'UNLICENSED')
|
||||
license = get_license().get('license_type', 'UNLICENSED')
|
||||
headers = {
|
||||
'Content-Type': 'application/json',
|
||||
'User-Agent': '{} {} ({})'.format(
|
||||
@@ -202,34 +201,15 @@ def get_awx_http_client_headers():
|
||||
return headers
|
||||
|
||||
|
||||
class StubLicense(object):
|
||||
|
||||
features = {
|
||||
'activity_streams': True,
|
||||
'ha': True,
|
||||
'ldap': True,
|
||||
'multiple_organizations': True,
|
||||
'surveys': True,
|
||||
'system_tracking': True,
|
||||
'rebranding': True,
|
||||
'enterprise_auth': True,
|
||||
'workflows': True,
|
||||
}
|
||||
|
||||
def validate(self):
|
||||
return dict(license_key='OPEN',
|
||||
valid_key=True,
|
||||
compliant=True,
|
||||
features=self.features,
|
||||
license_type='open')
|
||||
|
||||
|
||||
def get_licenser(*args, **kwargs):
|
||||
from awx.main.utils.licensing import Licenser, OpenLicense
|
||||
try:
|
||||
from tower_license import TowerLicense
|
||||
return TowerLicense(*args, **kwargs)
|
||||
except ImportError:
|
||||
return StubLicense(*args, **kwargs)
|
||||
if os.path.exists('/var/lib/awx/.tower_version'):
|
||||
return Licenser(*args, **kwargs)
|
||||
else:
|
||||
return OpenLicense()
|
||||
except Exception as e:
|
||||
raise ValueError(_('Error importing Tower License: %s') % e)
|
||||
|
||||
|
||||
def update_scm_url(scm_type, url, username=True, password=True,
|
||||
@@ -242,9 +222,8 @@ def update_scm_url(scm_type, url, username=True, password=True,
|
||||
'''
|
||||
# Handle all of the URL formats supported by the SCM systems:
|
||||
# git: https://www.kernel.org/pub/software/scm/git/docs/git-clone.html#URLS
|
||||
# hg: http://www.selenic.com/mercurial/hg.1.html#url-paths
|
||||
# svn: http://svnbook.red-bean.com/en/1.7/svn-book.html#svn.advanced.reposurls
|
||||
if scm_type not in ('git', 'hg', 'svn', 'insights', 'archive'):
|
||||
if scm_type not in ('git', 'svn', 'insights', 'archive'):
|
||||
raise ValueError(_('Unsupported SCM type "%s"') % str(scm_type))
|
||||
if not url.strip():
|
||||
return ''
|
||||
@@ -276,8 +255,8 @@ def update_scm_url(scm_type, url, username=True, password=True,
|
||||
# SCP style before passed to git module.
|
||||
parts = urllib.parse.urlsplit('git+ssh://%s' % modified_url)
|
||||
# Handle local paths specified without file scheme (e.g. /path/to/foo).
|
||||
# Only supported by git and hg.
|
||||
elif scm_type in ('git', 'hg'):
|
||||
# Only supported by git.
|
||||
elif scm_type == 'git':
|
||||
if not url.startswith('/'):
|
||||
parts = urllib.parse.urlsplit('file:///%s' % url)
|
||||
else:
|
||||
@@ -288,7 +267,6 @@ def update_scm_url(scm_type, url, username=True, password=True,
|
||||
# Validate that scheme is valid for given scm_type.
|
||||
scm_type_schemes = {
|
||||
'git': ('ssh', 'git', 'git+ssh', 'http', 'https', 'ftp', 'ftps', 'file'),
|
||||
'hg': ('http', 'https', 'ssh', 'file'),
|
||||
'svn': ('http', 'https', 'svn', 'svn+ssh', 'file'),
|
||||
'insights': ('http', 'https'),
|
||||
'archive': ('http', 'https'),
|
||||
@@ -320,12 +298,6 @@ def update_scm_url(scm_type, url, username=True, password=True,
|
||||
if scm_type == 'git' and parts.scheme.endswith('ssh') and parts.hostname in special_git_hosts and netloc_password:
|
||||
#raise ValueError('Password not allowed for SSH access to %s.' % parts.hostname)
|
||||
netloc_password = ''
|
||||
special_hg_hosts = ('bitbucket.org', 'altssh.bitbucket.org')
|
||||
if scm_type == 'hg' and parts.scheme == 'ssh' and parts.hostname in special_hg_hosts and netloc_username != 'hg':
|
||||
raise ValueError(_('Username must be "hg" for SSH access to %s.') % parts.hostname)
|
||||
if scm_type == 'hg' and parts.scheme == 'ssh' and netloc_password:
|
||||
#raise ValueError('Password not supported for SSH with Mercurial.')
|
||||
netloc_password = ''
|
||||
|
||||
if netloc_username and parts.scheme != 'file' and scm_type not in ("insights", "archive"):
|
||||
netloc = u':'.join([urllib.parse.quote(x,safe='') for x in (netloc_username, netloc_password) if x])
|
||||
|
||||
@@ -9,6 +9,7 @@ import socket
|
||||
from datetime import datetime
|
||||
|
||||
from dateutil.tz import tzutc
|
||||
from django.utils.timezone import now
|
||||
from django.core.serializers.json import DjangoJSONEncoder
|
||||
from django.conf import settings
|
||||
|
||||
@@ -17,8 +18,15 @@ class TimeFormatter(logging.Formatter):
|
||||
'''
|
||||
Custom log formatter used for inventory imports
|
||||
'''
|
||||
def __init__(self, start_time=None, **kwargs):
|
||||
if start_time is None:
|
||||
self.job_start = now()
|
||||
else:
|
||||
self.job_start = start_time
|
||||
super(TimeFormatter, self).__init__(**kwargs)
|
||||
|
||||
def format(self, record):
|
||||
record.relativeSeconds = record.relativeCreated / 1000.0
|
||||
record.relativeSeconds = (now() - self.job_start).total_seconds()
|
||||
return logging.Formatter.format(self, record)
|
||||
|
||||
|
||||
|
||||
@@ -7,6 +7,10 @@ import os.path
|
||||
|
||||
# Django
|
||||
from django.conf import settings
|
||||
from django.utils.timezone import now
|
||||
|
||||
# AWX
|
||||
from awx.main.exceptions import PostRunError
|
||||
|
||||
|
||||
class RSysLogHandler(logging.handlers.SysLogHandler):
|
||||
@@ -40,6 +44,58 @@ class RSysLogHandler(logging.handlers.SysLogHandler):
|
||||
pass
|
||||
|
||||
|
||||
class SpecialInventoryHandler(logging.Handler):
|
||||
"""Logging handler used for the saving-to-database part of inventory updates
|
||||
ran by the task system
|
||||
this dispatches events directly to be processed by the callback receiver,
|
||||
as opposed to ansible-runner
|
||||
"""
|
||||
|
||||
def __init__(self, event_handler, cancel_callback, job_timeout, verbosity,
|
||||
start_time=None, counter=0, initial_line=0, **kwargs):
|
||||
self.event_handler = event_handler
|
||||
self.cancel_callback = cancel_callback
|
||||
self.job_timeout = job_timeout
|
||||
if start_time is None:
|
||||
self.job_start = now()
|
||||
else:
|
||||
self.job_start = start_time
|
||||
self.last_check = self.job_start
|
||||
self.counter = counter
|
||||
self.skip_level = [logging.WARNING, logging.INFO, logging.DEBUG, 0][verbosity]
|
||||
self._current_line = initial_line
|
||||
super(SpecialInventoryHandler, self).__init__(**kwargs)
|
||||
|
||||
def emit(self, record):
|
||||
# check cancel and timeout status regardless of log level
|
||||
this_time = now()
|
||||
if (this_time - self.last_check).total_seconds() > 0.5: # cancel callback is expensive
|
||||
self.last_check = this_time
|
||||
if self.cancel_callback():
|
||||
raise PostRunError('Inventory update has been canceled', status='canceled')
|
||||
if self.job_timeout and ((this_time - self.job_start).total_seconds() > self.job_timeout):
|
||||
raise PostRunError('Inventory update has timed out', status='canceled')
|
||||
|
||||
# skip logging for low severity logs
|
||||
if record.levelno < self.skip_level:
|
||||
return
|
||||
|
||||
self.counter += 1
|
||||
msg = self.format(record)
|
||||
n_lines = len(msg.strip().split('\n')) # don't count line breaks at boundry of text
|
||||
dispatch_data = dict(
|
||||
created=now().isoformat(),
|
||||
event='verbose',
|
||||
counter=self.counter,
|
||||
stdout=msg,
|
||||
start_line=self._current_line,
|
||||
end_line=self._current_line + n_lines
|
||||
)
|
||||
self._current_line += n_lines
|
||||
|
||||
self.event_handler(dispatch_data)
|
||||
|
||||
|
||||
ColorHandler = logging.StreamHandler
|
||||
|
||||
if settings.COLOR_LOGS is True:
|
||||
|
||||
439
awx/main/utils/licensing.py
Normal file
439
awx/main/utils/licensing.py
Normal file
@@ -0,0 +1,439 @@
|
||||
# Copyright (c) 2015 Ansible, Inc.
|
||||
# All Rights Reserved.
|
||||
|
||||
'''
|
||||
This is intended to be a lightweight license class for verifying subscriptions, and parsing subscription data
|
||||
from entitlement certificates.
|
||||
|
||||
The Licenser class can do the following:
|
||||
- Parse an Entitlement cert to generate license
|
||||
'''
|
||||
|
||||
import base64
|
||||
import configparser
|
||||
from datetime import datetime, timezone
|
||||
import collections
|
||||
import copy
|
||||
import io
|
||||
import json
|
||||
import logging
|
||||
import re
|
||||
import requests
|
||||
import time
|
||||
import zipfile
|
||||
|
||||
from dateutil.parser import parse as parse_date
|
||||
|
||||
from cryptography.exceptions import InvalidSignature
|
||||
from cryptography.hazmat.primitives import hashes
|
||||
from cryptography.hazmat.backends import default_backend
|
||||
from cryptography.hazmat.primitives.asymmetric import padding
|
||||
from cryptography import x509
|
||||
|
||||
# Django
|
||||
from django.conf import settings
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
|
||||
# AWX
|
||||
from awx.main.models import Host
|
||||
|
||||
MAX_INSTANCES = 9999999
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def rhsm_config():
|
||||
path = '/etc/rhsm/rhsm.conf'
|
||||
config = configparser.ConfigParser()
|
||||
config.read(path)
|
||||
return config
|
||||
|
||||
|
||||
def validate_entitlement_manifest(data):
|
||||
buff = io.BytesIO()
|
||||
buff.write(base64.b64decode(data))
|
||||
try:
|
||||
z = zipfile.ZipFile(buff)
|
||||
except zipfile.BadZipFile as e:
|
||||
raise ValueError(_("Invalid manifest: a subscription manifest zip file is required.")) from e
|
||||
buff = io.BytesIO()
|
||||
|
||||
files = z.namelist()
|
||||
if 'consumer_export.zip' not in files or 'signature' not in files:
|
||||
raise ValueError(_("Invalid manifest: missing required files."))
|
||||
export = z.open('consumer_export.zip').read()
|
||||
sig = z.open('signature').read()
|
||||
with open('/etc/tower/candlepin-redhat-ca.crt', 'rb') as f:
|
||||
cert = x509.load_pem_x509_certificate(f.read(), backend=default_backend())
|
||||
key = cert.public_key()
|
||||
try:
|
||||
key.verify(sig, export, padding=padding.PKCS1v15(), algorithm=hashes.SHA256())
|
||||
except InvalidSignature as e:
|
||||
raise ValueError(_("Invalid manifest: signature verification failed.")) from e
|
||||
|
||||
buff.write(export)
|
||||
z = zipfile.ZipFile(buff)
|
||||
subs = []
|
||||
for f in z.filelist:
|
||||
if f.filename.startswith('export/entitlements') and f.filename.endswith('.json'):
|
||||
subs.append(json.loads(z.open(f).read()))
|
||||
if subs:
|
||||
return subs
|
||||
raise ValueError(_("Invalid manifest: manifest contains no subscriptions."))
|
||||
|
||||
|
||||
class OpenLicense(object):
|
||||
def validate(self):
|
||||
return dict(
|
||||
license_type='open',
|
||||
valid_key=True,
|
||||
subscription_name='OPEN',
|
||||
product_name="AWX",
|
||||
)
|
||||
|
||||
|
||||
class Licenser(object):
|
||||
# warn when there is a month (30 days) left on the subscription
|
||||
SUBSCRIPTION_TIMEOUT = 60 * 60 * 24 * 30
|
||||
|
||||
UNLICENSED_DATA = dict(
|
||||
subscription_name=None,
|
||||
sku=None,
|
||||
support_level=None,
|
||||
instance_count=0,
|
||||
license_date=0,
|
||||
license_type="UNLICENSED",
|
||||
product_name="Red Hat Ansible Automation Platform",
|
||||
valid_key=False
|
||||
)
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
self._attrs = dict(
|
||||
instance_count=0,
|
||||
license_date=0,
|
||||
license_type='UNLICENSED',
|
||||
)
|
||||
self.config = rhsm_config()
|
||||
if not kwargs:
|
||||
license_setting = getattr(settings, 'LICENSE', None)
|
||||
if license_setting is not None:
|
||||
kwargs = license_setting
|
||||
|
||||
if 'company_name' in kwargs:
|
||||
kwargs.pop('company_name')
|
||||
self._attrs.update(kwargs)
|
||||
if 'valid_key' in self._attrs:
|
||||
if not self._attrs['valid_key']:
|
||||
self._unset_attrs()
|
||||
else:
|
||||
self._unset_attrs()
|
||||
|
||||
|
||||
def _unset_attrs(self):
|
||||
self._attrs = self.UNLICENSED_DATA.copy()
|
||||
|
||||
|
||||
def license_from_manifest(self, manifest):
|
||||
def is_appropriate_manifest_sub(sub):
|
||||
if sub['pool']['activeSubscription'] is False:
|
||||
return False
|
||||
now = datetime.now(timezone.utc)
|
||||
if parse_date(sub['startDate']) > now:
|
||||
return False
|
||||
if parse_date(sub['endDate']) < now:
|
||||
return False
|
||||
products = sub['pool']['providedProducts']
|
||||
if any(product.get('productId') == '480' for product in products):
|
||||
return True
|
||||
return False
|
||||
|
||||
def _can_aggregate(sub, license):
|
||||
# We aggregate multiple subs into a larger meta-sub, if they match
|
||||
#
|
||||
# No current sub in aggregate
|
||||
if not license:
|
||||
return True
|
||||
# Same SKU type (SER vs MCT vs others)?
|
||||
if license['sku'][0:3] != sub['pool']['productId'][0:3]:
|
||||
return False
|
||||
return True
|
||||
|
||||
# Parse output for subscription metadata to build config
|
||||
license = dict()
|
||||
for sub in manifest:
|
||||
if not is_appropriate_manifest_sub(sub):
|
||||
logger.warning("Subscription %s (%s) in manifest is not active or for another product" %
|
||||
(sub['pool']['productName'], sub['pool']['productId']))
|
||||
continue
|
||||
if not _can_aggregate(sub, license):
|
||||
logger.warning("Subscription %s (%s) in manifest does not match other manifest subscriptions" %
|
||||
(sub['pool']['productName'], sub['pool']['productId']))
|
||||
continue
|
||||
|
||||
license.setdefault('sku', sub['pool']['productId'])
|
||||
license.setdefault('subscription_name', sub['pool']['productName'])
|
||||
license.setdefault('pool_id', sub['pool']['id'])
|
||||
license.setdefault('product_name', sub['pool']['productName'])
|
||||
license.setdefault('valid_key', True)
|
||||
license.setdefault('license_type', 'enterprise')
|
||||
license.setdefault('satellite', False)
|
||||
# Use the nearest end date
|
||||
endDate = parse_date(sub['endDate'])
|
||||
currentEndDateStr = license.get('license_date', '4102462800') # 2100-01-01
|
||||
currentEndDate = datetime.fromtimestamp(int(currentEndDateStr), timezone.utc)
|
||||
if endDate < currentEndDate:
|
||||
license['license_date'] = endDate.strftime('%s')
|
||||
instances = sub['quantity']
|
||||
license['instance_count'] = license.get('instance_count', 0) + instances
|
||||
license['subscription_name'] = re.sub(r'[\d]* Managed Nodes', '%d Managed Nodes' % license['instance_count'], license['subscription_name'])
|
||||
|
||||
if not license:
|
||||
logger.error("No valid subscriptions found in manifest")
|
||||
self._attrs.update(license)
|
||||
settings.LICENSE = self._attrs
|
||||
return self._attrs
|
||||
|
||||
|
||||
def update(self, **kwargs):
|
||||
# Update attributes of the current license.
|
||||
if 'instance_count' in kwargs:
|
||||
kwargs['instance_count'] = int(kwargs['instance_count'])
|
||||
if 'license_date' in kwargs:
|
||||
kwargs['license_date'] = int(kwargs['license_date'])
|
||||
self._attrs.update(kwargs)
|
||||
|
||||
|
||||
def validate_rh(self, user, pw):
|
||||
try:
|
||||
host = 'https://' + str(self.config.get("server", "hostname"))
|
||||
except Exception:
|
||||
logger.exception('Cannot access rhsm.conf, make sure subscription manager is installed and configured.')
|
||||
host = None
|
||||
if not host:
|
||||
host = getattr(settings, 'REDHAT_CANDLEPIN_HOST', None)
|
||||
|
||||
if not user:
|
||||
raise ValueError('subscriptions_username is required')
|
||||
|
||||
if not pw:
|
||||
raise ValueError('subscriptions_password is required')
|
||||
|
||||
if host and user and pw:
|
||||
if 'subscription.rhsm.redhat.com' in host:
|
||||
json = self.get_rhsm_subs(host, user, pw)
|
||||
else:
|
||||
json = self.get_satellite_subs(host, user, pw)
|
||||
return self.generate_license_options_from_entitlements(json)
|
||||
return []
|
||||
|
||||
|
||||
def get_rhsm_subs(self, host, user, pw):
|
||||
verify = getattr(settings, 'REDHAT_CANDLEPIN_VERIFY', True)
|
||||
json = []
|
||||
try:
|
||||
subs = requests.get(
|
||||
'/'.join([host, 'subscription/users/{}/owners'.format(user)]),
|
||||
verify=verify,
|
||||
auth=(user, pw)
|
||||
)
|
||||
except requests.exceptions.ConnectionError as error:
|
||||
raise error
|
||||
except OSError as error:
|
||||
raise OSError('Unable to open certificate bundle {}. Check that Ansible Tower is running on Red Hat Enterprise Linux.'.format(verify)) from error # noqa
|
||||
subs.raise_for_status()
|
||||
|
||||
for sub in subs.json():
|
||||
resp = requests.get(
|
||||
'/'.join([
|
||||
host,
|
||||
'subscription/owners/{}/pools/?match=*tower*'.format(sub['key'])
|
||||
]),
|
||||
verify=verify,
|
||||
auth=(user, pw)
|
||||
)
|
||||
resp.raise_for_status()
|
||||
json.extend(resp.json())
|
||||
return json
|
||||
|
||||
|
||||
def get_satellite_subs(self, host, user, pw):
|
||||
port = None
|
||||
try:
|
||||
verify = str(self.config.get("rhsm", "repo_ca_cert"))
|
||||
port = str(self.config.get("server", "port"))
|
||||
except Exception as e:
|
||||
logger.exception('Unable to read rhsm config to get ca_cert location. {}'.format(str(e)))
|
||||
verify = getattr(settings, 'REDHAT_CANDLEPIN_VERIFY', True)
|
||||
if port:
|
||||
host = ':'.join([host, port])
|
||||
json = []
|
||||
try:
|
||||
orgs = requests.get(
|
||||
'/'.join([host, 'katello/api/organizations']),
|
||||
verify=verify,
|
||||
auth=(user, pw)
|
||||
)
|
||||
except requests.exceptions.ConnectionError as error:
|
||||
raise error
|
||||
except OSError as error:
|
||||
raise OSError('Unable to open certificate bundle {}. Check that Ansible Tower is running on Red Hat Enterprise Linux.'.format(verify)) from error # noqa
|
||||
orgs.raise_for_status()
|
||||
|
||||
for org in orgs.json()['results']:
|
||||
resp = requests.get(
|
||||
'/'.join([
|
||||
host,
|
||||
'/katello/api/organizations/{}/subscriptions/?search=Red Hat Ansible Automation'.format(org['id'])
|
||||
]),
|
||||
verify=verify,
|
||||
auth=(user, pw)
|
||||
)
|
||||
resp.raise_for_status()
|
||||
results = resp.json()['results']
|
||||
if results != []:
|
||||
for sub in results:
|
||||
# Parse output for subscription metadata to build config
|
||||
license = dict()
|
||||
license['productId'] = sub['product_id']
|
||||
license['quantity'] = int(sub['quantity'])
|
||||
license['support_level'] = sub['support_level']
|
||||
license['subscription_name'] = sub['name']
|
||||
license['id'] = sub['upstream_pool_id']
|
||||
license['endDate'] = sub['end_date']
|
||||
license['productName'] = "Red Hat Ansible Automation"
|
||||
license['valid_key'] = True
|
||||
license['license_type'] = 'enterprise'
|
||||
license['satellite'] = True
|
||||
json.append(license)
|
||||
return json
|
||||
|
||||
|
||||
def is_appropriate_sat_sub(self, sub):
|
||||
if 'Red Hat Ansible Automation' not in sub['subscription_name']:
|
||||
return False
|
||||
return True
|
||||
|
||||
|
||||
def is_appropriate_sub(self, sub):
|
||||
if sub['activeSubscription'] is False:
|
||||
return False
|
||||
# Products that contain Ansible Tower
|
||||
products = sub.get('providedProducts', [])
|
||||
if any(product.get('productId') == '480' for product in products):
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def generate_license_options_from_entitlements(self, json):
|
||||
from dateutil.parser import parse
|
||||
ValidSub = collections.namedtuple('ValidSub', 'sku name support_level end_date trial quantity pool_id satellite')
|
||||
valid_subs = []
|
||||
for sub in json:
|
||||
satellite = sub.get('satellite')
|
||||
if satellite:
|
||||
is_valid = self.is_appropriate_sat_sub(sub)
|
||||
else:
|
||||
is_valid = self.is_appropriate_sub(sub)
|
||||
if is_valid:
|
||||
try:
|
||||
end_date = parse(sub.get('endDate'))
|
||||
except Exception:
|
||||
continue
|
||||
now = datetime.utcnow()
|
||||
now = now.replace(tzinfo=end_date.tzinfo)
|
||||
if end_date < now:
|
||||
# If the sub has a past end date, skip it
|
||||
continue
|
||||
try:
|
||||
quantity = int(sub['quantity'])
|
||||
if quantity == -1:
|
||||
# effectively, unlimited
|
||||
quantity = MAX_INSTANCES
|
||||
except Exception:
|
||||
continue
|
||||
|
||||
sku = sub['productId']
|
||||
trial = sku.startswith('S') # i.e.,, SER/SVC
|
||||
support_level = ''
|
||||
pool_id = sub['id']
|
||||
if satellite:
|
||||
support_level = sub['support_level']
|
||||
else:
|
||||
for attr in sub.get('productAttributes', []):
|
||||
if attr.get('name') == 'support_level':
|
||||
support_level = attr.get('value')
|
||||
|
||||
valid_subs.append(ValidSub(
|
||||
sku, sub['productName'], support_level, end_date, trial, quantity, pool_id, satellite
|
||||
))
|
||||
|
||||
if valid_subs:
|
||||
licenses = []
|
||||
for sub in valid_subs:
|
||||
license = self.__class__(subscription_name='Red Hat Ansible Automation Platform')
|
||||
license._attrs['instance_count'] = int(sub.quantity)
|
||||
license._attrs['sku'] = sub.sku
|
||||
license._attrs['support_level'] = sub.support_level
|
||||
license._attrs['license_type'] = 'enterprise'
|
||||
if sub.trial:
|
||||
license._attrs['trial'] = True
|
||||
license._attrs['license_type'] = 'trial'
|
||||
license._attrs['instance_count'] = min(
|
||||
MAX_INSTANCES, license._attrs['instance_count']
|
||||
)
|
||||
human_instances = license._attrs['instance_count']
|
||||
if human_instances == MAX_INSTANCES:
|
||||
human_instances = 'Unlimited'
|
||||
subscription_name = re.sub(
|
||||
r' \([\d]+ Managed Nodes',
|
||||
' ({} Managed Nodes'.format(human_instances),
|
||||
sub.name
|
||||
)
|
||||
license._attrs['subscription_name'] = subscription_name
|
||||
license._attrs['satellite'] = satellite
|
||||
license._attrs['valid_key'] = True
|
||||
license.update(
|
||||
license_date=int(sub.end_date.strftime('%s'))
|
||||
)
|
||||
license.update(
|
||||
pool_id=sub.pool_id
|
||||
)
|
||||
licenses.append(license._attrs.copy())
|
||||
return licenses
|
||||
|
||||
raise ValueError(
|
||||
'No valid Red Hat Ansible Automation subscription could be found for this account.' # noqa
|
||||
)
|
||||
|
||||
|
||||
def validate(self):
|
||||
# Return license attributes with additional validation info.
|
||||
attrs = copy.deepcopy(self._attrs)
|
||||
type = attrs.get('license_type', 'none')
|
||||
|
||||
if (type == 'UNLICENSED' or False):
|
||||
attrs.update(dict(valid_key=False, compliant=False))
|
||||
return attrs
|
||||
attrs['valid_key'] = True
|
||||
|
||||
if Host:
|
||||
current_instances = Host.objects.active_count()
|
||||
else:
|
||||
current_instances = 0
|
||||
instance_count = int(attrs.get('instance_count', 0))
|
||||
attrs['current_instances'] = current_instances
|
||||
free_instances = (instance_count - current_instances)
|
||||
attrs['free_instances'] = max(0, free_instances)
|
||||
|
||||
license_date = int(attrs.get('license_date', 0) or 0)
|
||||
current_date = int(time.time())
|
||||
time_remaining = license_date - current_date
|
||||
attrs['time_remaining'] = time_remaining
|
||||
if attrs.setdefault('trial', False):
|
||||
attrs['grace_period_remaining'] = time_remaining
|
||||
else:
|
||||
attrs['grace_period_remaining'] = (license_date + 2592000) - current_date
|
||||
attrs['compliant'] = bool(time_remaining > 0 and free_instances >= 0)
|
||||
attrs['date_warning'] = bool(time_remaining < self.SUBSCRIPTION_TIMEOUT)
|
||||
attrs['date_expired'] = bool(time_remaining <= 0)
|
||||
return attrs
|
||||
151
awx/main/utils/profiling.py
Normal file
151
awx/main/utils/profiling.py
Normal file
@@ -0,0 +1,151 @@
|
||||
import cProfile
|
||||
import functools
|
||||
import pstats
|
||||
import os
|
||||
import uuid
|
||||
import datetime
|
||||
import json
|
||||
import sys
|
||||
|
||||
|
||||
class AWXProfileBase:
|
||||
def __init__(self, name, dest):
|
||||
self.name = name
|
||||
self.dest = dest
|
||||
self.results = {}
|
||||
|
||||
def generate_results(self):
|
||||
raise RuntimeError("define me")
|
||||
|
||||
def output_results(self, fname=None):
|
||||
if not os.path.isdir(self.dest):
|
||||
os.makedirs(self.dest)
|
||||
|
||||
if fname:
|
||||
fpath = os.path.join(self.dest, fname)
|
||||
with open(fpath, 'w') as f:
|
||||
f.write(json.dumps(self.results, indent=2))
|
||||
|
||||
|
||||
class AWXTiming(AWXProfileBase):
|
||||
def __init__(self, name, dest='/var/log/tower/timing'):
|
||||
super().__init__(name, dest)
|
||||
|
||||
self.time_start = None
|
||||
self.time_end = None
|
||||
|
||||
def start(self):
|
||||
self.time_start = datetime.datetime.now()
|
||||
|
||||
def stop(self):
|
||||
self.time_end = datetime.datetime.now()
|
||||
|
||||
self.generate_results()
|
||||
self.output_results()
|
||||
|
||||
def generate_results(self):
|
||||
diff = (self.time_end - self.time_start).total_seconds()
|
||||
self.results = {
|
||||
'name': self.name,
|
||||
'diff': f'{diff}-seconds',
|
||||
}
|
||||
|
||||
def output_results(self):
|
||||
fname = f"{self.results['diff']}-{self.name}-{uuid.uuid4()}.time"
|
||||
super().output_results(fname)
|
||||
|
||||
|
||||
def timing(name, *init_args, **init_kwargs):
|
||||
def decorator_profile(func):
|
||||
@functools.wraps(func)
|
||||
def wrapper_profile(*args, **kwargs):
|
||||
timing = AWXTiming(name, *init_args, **init_kwargs)
|
||||
timing.start()
|
||||
res = func(*args, **kwargs)
|
||||
timing.stop()
|
||||
return res
|
||||
return wrapper_profile
|
||||
return decorator_profile
|
||||
|
||||
|
||||
class AWXProfiler(AWXProfileBase):
|
||||
def __init__(self, name, dest='/var/log/tower/profile', dot_enabled=True):
|
||||
'''
|
||||
Try to do as little as possible in init. Instead, do the init
|
||||
only when the profiling is started.
|
||||
'''
|
||||
super().__init__(name, dest)
|
||||
self.started = False
|
||||
self.dot_enabled = dot_enabled
|
||||
self.results = {
|
||||
'total_time_seconds': 0,
|
||||
}
|
||||
|
||||
def generate_results(self):
|
||||
self.results['total_time_seconds'] = pstats.Stats(self.prof).total_tt
|
||||
|
||||
def output_results(self):
|
||||
super().output_results()
|
||||
|
||||
filename_base = '%.3fs-%s-%s-%s' % (self.results['total_time_seconds'], self.name, self.pid, uuid.uuid4())
|
||||
pstats_filepath = os.path.join(self.dest, f"{filename_base}.pstats")
|
||||
extra_data = ""
|
||||
|
||||
if self.dot_enabled:
|
||||
try:
|
||||
from gprof2dot import main as generate_dot
|
||||
except ImportError:
|
||||
extra_data = 'Dot graph generation failed due to package "gprof2dot" being unavailable.'
|
||||
else:
|
||||
raw_filepath = os.path.join(self.dest, f"{filename_base}.raw")
|
||||
dot_filepath = os.path.join(self.dest, f"{filename_base}.dot")
|
||||
|
||||
pstats.Stats(self.prof).dump_stats(raw_filepath)
|
||||
generate_dot([
|
||||
'-n', '2.5', '-f', 'pstats', '-o',
|
||||
dot_filepath,
|
||||
raw_filepath
|
||||
])
|
||||
os.remove(raw_filepath)
|
||||
|
||||
with open(pstats_filepath, 'w') as f:
|
||||
print(f"{self.name}, {extra_data}", file=f)
|
||||
pstats.Stats(self.prof, stream=f).sort_stats('cumulative').print_stats()
|
||||
return pstats_filepath
|
||||
|
||||
|
||||
def start(self):
|
||||
self.prof = cProfile.Profile()
|
||||
self.pid = os.getpid()
|
||||
|
||||
self.prof.enable()
|
||||
self.started = True
|
||||
|
||||
def is_started(self):
|
||||
return self.started
|
||||
|
||||
def stop(self):
|
||||
if self.started:
|
||||
self.prof.disable()
|
||||
|
||||
self.generate_results()
|
||||
res = self.output_results()
|
||||
self.started = False
|
||||
return res
|
||||
else:
|
||||
print("AWXProfiler::stop() called without calling start() first", file=sys.stderr)
|
||||
return None
|
||||
|
||||
|
||||
def profile(name, *init_args, **init_kwargs):
|
||||
def decorator_profile(func):
|
||||
@functools.wraps(func)
|
||||
def wrapper_profile(*args, **kwargs):
|
||||
prof = AWXProfiler(name, *init_args, **init_kwargs)
|
||||
prof.start()
|
||||
res = func(*args, **kwargs)
|
||||
prof.stop()
|
||||
return res
|
||||
return wrapper_profile
|
||||
return decorator_profile
|
||||
|
||||
@@ -1,15 +0,0 @@
|
||||
from __future__ import (absolute_import, division, print_function)
|
||||
__metaclass__ = type
|
||||
|
||||
from ansible.plugins.action import ActionBase
|
||||
|
||||
|
||||
class ActionModule(ActionBase):
|
||||
|
||||
def run(self, tmp=None, task_vars=None):
|
||||
self._supports_check_mode = False
|
||||
result = super(ActionModule, self).run(tmp, task_vars)
|
||||
result['changed'] = result['failed'] = False
|
||||
result['msg'] = ''
|
||||
self._display.deprecated("Mercurial support is deprecated")
|
||||
return result
|
||||
@@ -48,12 +48,6 @@
|
||||
tags:
|
||||
- update_git
|
||||
|
||||
- block:
|
||||
- name: include hg tasks
|
||||
include_tasks: project_update_hg_tasks.yml
|
||||
tags:
|
||||
- update_hg
|
||||
|
||||
- block:
|
||||
- name: update project using svn
|
||||
subversion:
|
||||
@@ -150,7 +144,6 @@
|
||||
msg: "Repository Version {{ scm_version }}"
|
||||
tags:
|
||||
- update_git
|
||||
- update_hg
|
||||
- update_svn
|
||||
- update_insights
|
||||
- update_archive
|
||||
|
||||
@@ -1,20 +0,0 @@
|
||||
---
|
||||
- name: Mercurial support is deprecated.
|
||||
hg_deprecation:
|
||||
|
||||
- name: update project using hg
|
||||
hg:
|
||||
dest: "{{project_path|quote}}"
|
||||
repo: "{{scm_url|quote}}"
|
||||
revision: "{{scm_branch|quote}}"
|
||||
force: "{{scm_clean}}"
|
||||
register: hg_result
|
||||
|
||||
- name: Set the hg repository version
|
||||
set_fact:
|
||||
scm_version: "{{ hg_result['after'] }}"
|
||||
when: "'after' in hg_result"
|
||||
|
||||
- name: parse hg version string properly
|
||||
set_fact:
|
||||
scm_version: "{{scm_version|regex_replace('^([A-Za-z0-9]+).*$', '\\1')}}"
|
||||
@@ -91,7 +91,6 @@ USE_L10N = True
|
||||
USE_TZ = True
|
||||
|
||||
STATICFILES_DIRS = (
|
||||
os.path.join(BASE_DIR, 'ui', 'static'),
|
||||
os.path.join(BASE_DIR, 'ui_next', 'build', 'static'),
|
||||
os.path.join(BASE_DIR, 'static'),
|
||||
)
|
||||
@@ -249,8 +248,7 @@ TEMPLATES = [
|
||||
'django.template.context_processors.static',
|
||||
'django.template.context_processors.tz',
|
||||
'django.contrib.messages.context_processors.messages',
|
||||
'awx.ui.context_processors.settings',
|
||||
'awx.ui.context_processors.version',
|
||||
'awx.ui.context_processors.csp',
|
||||
'social_django.context_processors.backends',
|
||||
'social_django.context_processors.login_redirect',
|
||||
],
|
||||
|
||||
@@ -184,3 +184,6 @@ else:
|
||||
pass
|
||||
|
||||
AWX_CALLBACK_PROFILE = True
|
||||
|
||||
if 'sqlite3' not in DATABASES['default']['ENGINE']: # noqa
|
||||
DATABASES['default'].setdefault('OPTIONS', dict()).setdefault('application_name', f'{CLUSTER_HOST_ID}-{os.getpid()}-{" ".join(sys.argv)}'[:63]) # noqa
|
||||
|
||||
@@ -175,13 +175,6 @@ TEST_GIT_PUBLIC_HTTPS = 'https://github.com/ansible/ansible.github.com.git'
|
||||
TEST_GIT_PRIVATE_HTTPS = 'https://github.com/ansible/product-docs.git'
|
||||
TEST_GIT_PRIVATE_SSH = 'git@github.com:ansible/product-docs.git'
|
||||
|
||||
TEST_HG_USERNAME = ''
|
||||
TEST_HG_PASSWORD = ''
|
||||
TEST_HG_KEY_DATA = TEST_SSH_KEY_DATA
|
||||
TEST_HG_PUBLIC_HTTPS = 'https://bitbucket.org/cchurch/django-hotrunner'
|
||||
TEST_HG_PRIVATE_HTTPS = ''
|
||||
TEST_HG_PRIVATE_SSH = ''
|
||||
|
||||
TEST_SVN_USERNAME = ''
|
||||
TEST_SVN_PASSWORD = ''
|
||||
TEST_SVN_PUBLIC_HTTPS = 'https://github.com/ansible/ansible.github.com'
|
||||
|
||||
@@ -38,7 +38,7 @@ if is_testing(sys.argv):
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
# AMQP configuration.
|
||||
BROKER_URL = 'amqp://guest:guest@localhost:5672'
|
||||
|
||||
@@ -146,13 +146,6 @@ TEST_GIT_PUBLIC_HTTPS = 'https://github.com/ansible/ansible.github.com.git'
|
||||
TEST_GIT_PRIVATE_HTTPS = 'https://github.com/ansible/product-docs.git'
|
||||
TEST_GIT_PRIVATE_SSH = 'git@github.com:ansible/product-docs.git'
|
||||
|
||||
TEST_HG_USERNAME = ''
|
||||
TEST_HG_PASSWORD = ''
|
||||
TEST_HG_KEY_DATA = TEST_SSH_KEY_DATA
|
||||
TEST_HG_PUBLIC_HTTPS = 'https://bitbucket.org/cchurch/django-hotrunner'
|
||||
TEST_HG_PRIVATE_HTTPS = ''
|
||||
TEST_HG_PRIVATE_SSH = ''
|
||||
|
||||
TEST_SVN_USERNAME = ''
|
||||
TEST_SVN_PASSWORD = ''
|
||||
TEST_SVN_PUBLIC_HTTPS = 'https://github.com/ansible/ansible.github.com'
|
||||
|
||||
@@ -102,6 +102,7 @@ except IOError:
|
||||
else:
|
||||
raise
|
||||
|
||||
# The below runs AFTER all of the custom settings are imported.
|
||||
|
||||
CELERYBEAT_SCHEDULE.update({ # noqa
|
||||
'isolated_heartbeat': {
|
||||
@@ -110,3 +111,5 @@ CELERYBEAT_SCHEDULE.update({ # noqa
|
||||
'options': {'expires': AWX_ISOLATED_PERIODIC_CHECK * 2}, # noqa
|
||||
}
|
||||
})
|
||||
|
||||
DATABASES['default'].setdefault('OPTIONS', dict()).setdefault('application_name', f'{CLUSTER_HOST_ID}-{os.getpid()}-{" ".join(sys.argv)}'[:63]) # noqa
|
||||
|
||||
@@ -25,7 +25,7 @@ class BaseRedirectView(RedirectView):
|
||||
def get_redirect_url(self, *args, **kwargs):
|
||||
last_path = self.request.COOKIES.get('lastPath', '')
|
||||
last_path = urllib.parse.quote(urllib.parse.unquote(last_path).strip('"'))
|
||||
url = reverse('ui:index')
|
||||
url = reverse('ui_next:index')
|
||||
if last_path:
|
||||
return '%s#%s' % (url, last_path)
|
||||
else:
|
||||
|
||||
@@ -1,19 +0,0 @@
|
||||
Gruntfile.js
|
||||
karma.*.js
|
||||
webpack.*.js
|
||||
nightwatch.*.js
|
||||
|
||||
etc
|
||||
coverage
|
||||
grunt-tasks
|
||||
node_modules
|
||||
po
|
||||
static
|
||||
templates
|
||||
|
||||
client/src/**/*.js
|
||||
client/assets/**/*.js
|
||||
test/spec/**/*.js
|
||||
|
||||
!client/src/app.start.js
|
||||
!client/src/vendor.js
|
||||
@@ -1,72 +0,0 @@
|
||||
const path = require('path');
|
||||
|
||||
module.exports = {
|
||||
root: true,
|
||||
extends: [
|
||||
'airbnb-base'
|
||||
],
|
||||
plugins: [
|
||||
'import',
|
||||
'disable'
|
||||
],
|
||||
settings: {
|
||||
'import/resolver': {
|
||||
webpack: {
|
||||
config: path.join(__dirname, 'build/webpack.development.js')
|
||||
}
|
||||
},
|
||||
'eslint-plugin-disable': {
|
||||
paths: {
|
||||
import: ['**/build/*.js']
|
||||
}
|
||||
}
|
||||
},
|
||||
env: {
|
||||
browser: true,
|
||||
node: true
|
||||
},
|
||||
globals: {
|
||||
angular: true,
|
||||
d3: true,
|
||||
$: true,
|
||||
_: true,
|
||||
codemirror: true,
|
||||
jsyaml: true,
|
||||
crypto: true
|
||||
},
|
||||
rules: {
|
||||
'arrow-parens': 'off',
|
||||
'comma-dangle': 'off',
|
||||
indent: ['error', 4, {
|
||||
SwitchCase: 1
|
||||
}],
|
||||
'max-len': ['error', {
|
||||
code: 100,
|
||||
ignoreStrings: true,
|
||||
ignoreTemplateLiterals: true,
|
||||
}],
|
||||
'no-continue': 'off',
|
||||
'no-debugger': 'off',
|
||||
'no-mixed-operators': 'off',
|
||||
'no-param-reassign': 'off',
|
||||
'no-plusplus': 'off',
|
||||
'no-underscore-dangle': 'off',
|
||||
'no-use-before-define': 'off',
|
||||
'no-multiple-empty-lines': ['error', { max: 1 }],
|
||||
'object-curly-newline': 'off',
|
||||
'space-before-function-paren': ['error', 'always'],
|
||||
'no-trailing-spaces': ['error'],
|
||||
'prefer-destructuring': ['error', {
|
||||
'VariableDeclarator': {
|
||||
'array': false,
|
||||
'object': true
|
||||
},
|
||||
'AssignmentExpression': {
|
||||
'array': false,
|
||||
'object': true
|
||||
}
|
||||
}, {
|
||||
'enforceForRenamedProperties': false
|
||||
}]
|
||||
}
|
||||
};
|
||||
@@ -1,49 +0,0 @@
|
||||
{
|
||||
"browser": true,
|
||||
"node": true,
|
||||
"jquery": true,
|
||||
"esnext": true,
|
||||
"globalstrict": true,
|
||||
"curly": true,
|
||||
"immed": true,
|
||||
"latedef": "nofunc",
|
||||
"noarg": true,
|
||||
"nonew": true,
|
||||
"maxerr": 10000,
|
||||
"notypeof": true,
|
||||
"globals": {
|
||||
"$ENV": true,
|
||||
"require": true,
|
||||
"global": true,
|
||||
"beforeEach": false,
|
||||
"inject": false,
|
||||
"module": false,
|
||||
"angular":false,
|
||||
"alert":false,
|
||||
"$AnsibleConfig":true,
|
||||
"$basePath":true,
|
||||
"jsyaml":false,
|
||||
"_":false,
|
||||
"d3":false,
|
||||
"Donut3D":false,
|
||||
"nv":false,
|
||||
"it": false,
|
||||
"xit": false,
|
||||
"expect": false,
|
||||
"context": false,
|
||||
"describe": false,
|
||||
"moment": false,
|
||||
"spyOn": false,
|
||||
"jasmine": false,
|
||||
"dagre": false,
|
||||
"crypto": false
|
||||
},
|
||||
"strict": false,
|
||||
"quotmark": false,
|
||||
"trailing": true,
|
||||
"undef": true,
|
||||
"unused": true,
|
||||
"eqeqeq": true,
|
||||
"indent": 4,
|
||||
"newcap": false
|
||||
}
|
||||
@@ -1 +0,0 @@
|
||||
progress=false
|
||||
@@ -1,20 +0,0 @@
|
||||
module.exports = function(grunt) {
|
||||
// Load grunt tasks & configurations automatically from dir grunt/
|
||||
require('load-grunt-tasks')(grunt);
|
||||
// display task timings
|
||||
require('time-grunt')(grunt);
|
||||
|
||||
var options = {
|
||||
config: {
|
||||
src: './grunt-tasks/*.js'
|
||||
},
|
||||
pkg: grunt.file.readJSON('package.json')
|
||||
};
|
||||
|
||||
var configs = require('load-grunt-configs')(grunt, options);
|
||||
|
||||
// Project configuration.
|
||||
grunt.initConfig(configs);
|
||||
grunt.loadNpmTasks('grunt-newer');
|
||||
grunt.loadNpmTasks('grunt-angular-gettext');
|
||||
};
|
||||
103
awx/ui/README.md
103
awx/ui/README.md
@@ -1,103 +0,0 @@
|
||||
# AWX UI
|
||||
|
||||
## Requirements
|
||||
- node.js 10.x LTS
|
||||
- npm >=6.x
|
||||
- bzip2, gcc-c++, git, make
|
||||
|
||||
## Development
|
||||
The API development server will need to be running. See [CONTRIBUTING.md](../../CONTRIBUTING.md).
|
||||
|
||||
```shell
|
||||
# Build ui for the devel environment - reachable at https://localhost:8043
|
||||
make ui-devel
|
||||
|
||||
# Alternatively, start the ui development server. While running, the ui will be reachable
|
||||
# at https://localhost:3000 and updated automatically when code changes.
|
||||
make ui-docker
|
||||
|
||||
# When using docker machine, use this command to start the ui development server instead.
|
||||
DOCKER_MACHINE_NAME=default make ui-docker-machine
|
||||
```
|
||||
|
||||
## Development with an external server
|
||||
If you normally run awx on an external host/server (in this example, `awx.local`),
|
||||
you'll need to reconfigure the webpack proxy slightly for `make ui-docker` to
|
||||
work:
|
||||
|
||||
```javascript
|
||||
/awx/settings/development.py
|
||||
+
|
||||
+CSRF_TRUSTED_ORIGINS = ['awx.local:8043']
|
||||
|
||||
awx/ui/build/webpack.watch.js
|
||||
- host: '127.0.0.1',
|
||||
+ host: '0.0.0.0',
|
||||
+ disableHostCheck: true,
|
||||
|
||||
/awx/ui/package.json
|
||||
@@ -7,7 +7,7 @@
|
||||
"config": {
|
||||
...
|
||||
+ "django_host": "awx.local"
|
||||
},
|
||||
```
|
||||
|
||||
## Testing
|
||||
```shell
|
||||
# run linters
|
||||
make jshint
|
||||
|
||||
# run unit tests
|
||||
make ui-test-ci
|
||||
|
||||
# run e2e tests - see awx/ui/test/e2e for more information
|
||||
npm --prefix awx/ui run e2e
|
||||
```
|
||||
**Note**: Unit tests are run on your host machine and not in the development containers.
|
||||
|
||||
## Adding dependencies
|
||||
```shell
|
||||
# add an exact development or build dependency
|
||||
npm install --prefix awx/ui --save-dev --save-exact dev-package@1.2.3
|
||||
|
||||
# add an exact production dependency
|
||||
npm install --prefix awx/ui --save --save-exact prod-package@1.23
|
||||
|
||||
# add the updated package.json and package-lock.json files to scm
|
||||
git add awx/ui/package.json awx/ui/package-lock.json
|
||||
```
|
||||
|
||||
## Removing dependencies
|
||||
```shell
|
||||
# remove a development or build dependency
|
||||
npm uninstall --prefix awx/ui --save-dev dev-package
|
||||
|
||||
# remove a production dependency
|
||||
npm uninstall --prefix awx/ui --save prod-package
|
||||
```
|
||||
|
||||
## Building for Production
|
||||
```shell
|
||||
# built files are placed in awx/ui/static
|
||||
make ui-release
|
||||
```
|
||||
|
||||
## Internationalization
|
||||
Application strings marked for translation are extracted and used to generate `.pot` files using the following command:
|
||||
```shell
|
||||
# extract strings and generate .pot files
|
||||
make pot
|
||||
```
|
||||
To include the translations in the development environment, we compile them prior to building the ui:
|
||||
```shell
|
||||
# remove any prior ui builds
|
||||
make clean-ui
|
||||
|
||||
# compile the .pot files to javascript files usable by the application
|
||||
make languages
|
||||
|
||||
# build the ui with translations included
|
||||
make ui-devel
|
||||
```
|
||||
**Note**: Python 3.6 is required to compile the `.pot` files.
|
||||
@@ -2,3 +2,4 @@
|
||||
# All Rights Reserved.
|
||||
|
||||
default_app_config = 'awx.ui.apps.UIConfig'
|
||||
|
||||
|
||||
@@ -7,3 +7,4 @@ class UIConfig(AppConfig):
|
||||
|
||||
name = 'awx.ui'
|
||||
verbose_name = _('UI')
|
||||
|
||||
|
||||
@@ -1,235 +0,0 @@
|
||||
const path = require('path');
|
||||
|
||||
const webpack = require('webpack');
|
||||
const CleanWebpackPlugin = require('clean-webpack-plugin');
|
||||
const CopyWebpackPlugin = require('copy-webpack-plugin');
|
||||
const HtmlWebpackPlugin = require('html-webpack-plugin');
|
||||
const ExtractTextPlugin = require('extract-text-webpack-plugin');
|
||||
|
||||
const CLIENT_PATH = path.resolve(__dirname, '../client');
|
||||
const LIB_PATH = path.join(CLIENT_PATH, 'lib');
|
||||
const UI_PATH = path.resolve(__dirname, '..');
|
||||
|
||||
const ASSETS_PATH = path.join(CLIENT_PATH, 'assets');
|
||||
const COMPONENTS_PATH = path.join(LIB_PATH, 'components');
|
||||
const COVERAGE_PATH = path.join(UI_PATH, 'coverage');
|
||||
const FEATURES_PATH = path.join(CLIENT_PATH, 'features');
|
||||
const LANGUAGES_PATH = path.join(CLIENT_PATH, 'languages');
|
||||
const MODELS_PATH = path.join(LIB_PATH, 'models');
|
||||
const NODE_MODULES_PATH = path.join(UI_PATH, 'node_modules');
|
||||
const SERVICES_PATH = path.join(LIB_PATH, 'services');
|
||||
const SRC_PATH = path.join(CLIENT_PATH, 'src');
|
||||
const STATIC_PATH = path.join(UI_PATH, 'static');
|
||||
const TEST_PATH = path.join(UI_PATH, 'test');
|
||||
const THEME_PATH = path.join(LIB_PATH, 'theme');
|
||||
|
||||
const APP_ENTRY = path.join(SRC_PATH, 'app.js');
|
||||
const VENDOR_ENTRY = path.join(SRC_PATH, 'vendor.js');
|
||||
const INDEX_ENTRY = path.join(CLIENT_PATH, 'index.template.ejs');
|
||||
const INDEX_OUTPUT = path.join(UI_PATH, 'templates/ui/index.html');
|
||||
const INSTALL_RUNNING_ENTRY = path.join(CLIENT_PATH, 'installing.template.ejs');
|
||||
const INSTALL_RUNNING_OUTPUT = path.join(UI_PATH, 'templates/ui/installing.html');
|
||||
const THEME_ENTRY = path.join(LIB_PATH, 'theme', 'index.less');
|
||||
const OUTPUT = 'js/[name].[chunkhash].js';
|
||||
const CHUNKS = ['vendor', 'app'];
|
||||
|
||||
const VENDOR = VENDOR_ENTRY;
|
||||
const APP = [THEME_ENTRY, APP_ENTRY];
|
||||
|
||||
const base = {
|
||||
entry: {
|
||||
vendor: VENDOR,
|
||||
app: APP
|
||||
},
|
||||
output: {
|
||||
path: STATIC_PATH,
|
||||
publicPath: '',
|
||||
filename: OUTPUT
|
||||
},
|
||||
stats: {
|
||||
children: false,
|
||||
modules: false,
|
||||
chunks: false,
|
||||
excludeAssets: name => {
|
||||
const chunkNames = `(${CHUNKS.join('|')})`;
|
||||
const outputPattern = new RegExp(`${chunkNames}.[a-f0-9]+.(js|css)(|.map)$`, 'i');
|
||||
|
||||
return !outputPattern.test(name);
|
||||
}
|
||||
},
|
||||
module: {
|
||||
rules: [
|
||||
{
|
||||
test: /\.js$/,
|
||||
use: {
|
||||
loader: 'istanbul-instrumenter-loader',
|
||||
options: { esModules: true }
|
||||
},
|
||||
enforce: 'pre',
|
||||
include: [
|
||||
/src\/network-ui\//
|
||||
]
|
||||
},
|
||||
{
|
||||
test: /\.js$/,
|
||||
loader: 'babel-loader',
|
||||
exclude: /node_modules/,
|
||||
options: {
|
||||
presets: [
|
||||
['env', {
|
||||
targets: {
|
||||
browsers: ['last 2 versions']
|
||||
}
|
||||
}]
|
||||
]
|
||||
}
|
||||
},
|
||||
{
|
||||
test: /\.css$/,
|
||||
use: ExtractTextPlugin.extract({
|
||||
use: {
|
||||
loader: 'css-loader',
|
||||
options: {
|
||||
url: false
|
||||
}
|
||||
}
|
||||
})
|
||||
},
|
||||
{
|
||||
test: /lib\/theme\/index.less$/,
|
||||
use: ExtractTextPlugin.extract({
|
||||
use: ['css-loader', 'less-loader']
|
||||
})
|
||||
},
|
||||
{
|
||||
test: /\.html$/,
|
||||
use: ['ngtemplate-loader', 'html-loader'],
|
||||
include: [
|
||||
/lib\/components\//,
|
||||
/features\//,
|
||||
/src\//
|
||||
]
|
||||
},
|
||||
{
|
||||
test: /\.svg$/,
|
||||
use: ['ngtemplate-loader', 'html-loader'],
|
||||
include: [
|
||||
/lib\/components\//,
|
||||
/features\//,
|
||||
/src\//
|
||||
]
|
||||
},
|
||||
{
|
||||
test: /\.json$/,
|
||||
loader: 'json-loader',
|
||||
exclude: /node_modules/
|
||||
}
|
||||
]
|
||||
},
|
||||
plugins: [
|
||||
new webpack.ProvidePlugin({
|
||||
jsyaml: 'js-yaml',
|
||||
CodeMirror: 'codemirror',
|
||||
jsonlint: 'codemirror.jsonlint'
|
||||
}),
|
||||
new ExtractTextPlugin('css/[name].[chunkhash].css'),
|
||||
new CleanWebpackPlugin([STATIC_PATH, COVERAGE_PATH], {
|
||||
root: UI_PATH,
|
||||
verbose: false
|
||||
}),
|
||||
new CopyWebpackPlugin([
|
||||
{
|
||||
from: path.join(ASSETS_PATH, 'fontcustom/**/*'),
|
||||
to: path.join(STATIC_PATH, 'fonts/'),
|
||||
flatten: true
|
||||
},
|
||||
{
|
||||
from: path.join(NODE_MODULES_PATH, 'components-font-awesome/fonts/*'),
|
||||
to: path.join(STATIC_PATH, 'fonts/'),
|
||||
flatten: true
|
||||
},
|
||||
{
|
||||
from: path.join(ASSETS_PATH, 'custom-theme/images.new/*'),
|
||||
to: path.join(STATIC_PATH, 'images/'),
|
||||
flatten: true
|
||||
},
|
||||
{
|
||||
from: path.join(LANGUAGES_PATH, '*'),
|
||||
to: path.join(STATIC_PATH, 'languages'),
|
||||
flatten: true
|
||||
},
|
||||
{
|
||||
from: ASSETS_PATH,
|
||||
to: path.join(STATIC_PATH, 'assets')
|
||||
},
|
||||
{
|
||||
from: path.join(NODE_MODULES_PATH, 'angular-scheduler/lib/*.html'),
|
||||
to: path.join(STATIC_PATH, 'lib'),
|
||||
context: NODE_MODULES_PATH
|
||||
},
|
||||
{
|
||||
from: path.join(NODE_MODULES_PATH, 'angular-tz-extensions/tz/data/*'),
|
||||
to: path.join(STATIC_PATH, 'lib/'),
|
||||
context: NODE_MODULES_PATH
|
||||
},
|
||||
{
|
||||
from: path.join(SRC_PATH, '**/*.partial.html'),
|
||||
to: path.join(STATIC_PATH, 'partials/'),
|
||||
context: SRC_PATH
|
||||
},
|
||||
{
|
||||
from: path.join(SRC_PATH, 'partials', '*.html'),
|
||||
to: STATIC_PATH,
|
||||
context: SRC_PATH
|
||||
},
|
||||
{
|
||||
from: path.join(SRC_PATH, '*config.js'),
|
||||
to: STATIC_PATH,
|
||||
flatten: true
|
||||
}
|
||||
]),
|
||||
new HtmlWebpackPlugin({
|
||||
alwaysWriteToDisk: true,
|
||||
template: INDEX_ENTRY,
|
||||
filename: INDEX_OUTPUT,
|
||||
inject: false,
|
||||
chunks: CHUNKS,
|
||||
chunksSortMode: chunk => (chunk.names[0] === 'vendor' ? -1 : 1)
|
||||
}),
|
||||
new HtmlWebpackPlugin({
|
||||
alwaysWriteToDisk: true,
|
||||
template: INSTALL_RUNNING_ENTRY,
|
||||
filename: INSTALL_RUNNING_OUTPUT,
|
||||
inject: false,
|
||||
chunks: CHUNKS,
|
||||
chunksSortMode: chunk => (chunk.names[0] === 'vendor' ? -1 : 1)
|
||||
}),
|
||||
],
|
||||
resolve: {
|
||||
alias: {
|
||||
'~assets': ASSETS_PATH,
|
||||
'~components': COMPONENTS_PATH,
|
||||
'~features': FEATURES_PATH,
|
||||
'~models': MODELS_PATH,
|
||||
'~node_modules': NODE_MODULES_PATH,
|
||||
'~services': SERVICES_PATH,
|
||||
'~src': SRC_PATH,
|
||||
'~test': TEST_PATH,
|
||||
'~theme': THEME_PATH,
|
||||
'~ui': UI_PATH,
|
||||
d3$: '~node_modules/d3/d3.min.js',
|
||||
'codemirror.jsonlint$': '~node_modules/codemirror/addon/lint/json-lint.js',
|
||||
jquery: '~node_modules/jquery/dist/jquery.js',
|
||||
'jquery-resize$': '~node_modules/javascript-detect-element-resize/jquery.resize.js',
|
||||
select2$: '~node_modules/select2/dist/js/select2.full.min.js',
|
||||
'js-yaml$': '~node_modules/js-yaml/dist/js-yaml.min.js',
|
||||
'lr-infinite-scroll$': '~node_modules/lr-infinite-scroll/lrInfiniteScroll.js',
|
||||
'angular-tz-extensions$': '~node_modules/angular-tz-extensions/lib/angular-tz-extensions.js',
|
||||
'ng-toast-provider$': '~node_modules/ng-toast/src/scripts/provider.js',
|
||||
'ng-toast-directives$': '~node_modules/ng-toast/src/scripts/directives.js',
|
||||
'ng-toast$': '~node_modules/ng-toast/src/scripts/module.js'
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
module.exports = base;
|
||||
@@ -1,9 +0,0 @@
|
||||
const merge = require('webpack-merge');
|
||||
|
||||
const base = require('./webpack.base');
|
||||
|
||||
const development = {
|
||||
devtool: 'source-map'
|
||||
};
|
||||
|
||||
module.exports = merge(base, development);
|
||||
@@ -1,28 +0,0 @@
|
||||
const path = require('path');
|
||||
|
||||
const merge = require('webpack-merge');
|
||||
const webpack = require('webpack');
|
||||
const UglifyJSPlugin = require('uglifyjs-webpack-plugin');
|
||||
const HtmlWebpackPlugin = require('html-webpack-plugin');
|
||||
|
||||
const base = require('./webpack.base');
|
||||
|
||||
const CLIENT_PATH = path.resolve(__dirname, '../client');
|
||||
const UI_PATH = path.resolve(__dirname, '..');
|
||||
const CHUNKS = ['vendor', 'app'];
|
||||
|
||||
const production = {
|
||||
plugins: [
|
||||
new UglifyJSPlugin({
|
||||
compress: true,
|
||||
mangle: false
|
||||
}),
|
||||
new webpack.DefinePlugin({
|
||||
'process.env': {
|
||||
NODE_ENV: JSON.stringify('production')
|
||||
}
|
||||
})
|
||||
]
|
||||
};
|
||||
|
||||
module.exports = merge(base, production);
|
||||
@@ -1,20 +0,0 @@
|
||||
const _ = require('lodash');
|
||||
const webpack = require('webpack');
|
||||
|
||||
const STATIC_URL = '/static/';
|
||||
|
||||
const development = require('./webpack.base');
|
||||
|
||||
const test = {
|
||||
devtool: 'cheap-source-map',
|
||||
plugins: [
|
||||
new webpack.DefinePlugin({
|
||||
$basePath: STATIC_URL
|
||||
})
|
||||
]
|
||||
};
|
||||
|
||||
test.plugins = development.plugins.concat(test.plugins);
|
||||
|
||||
module.exports = _.merge(development, test);
|
||||
|
||||
@@ -1,84 +0,0 @@
|
||||
const path = require('path');
|
||||
|
||||
const _ = require('lodash');
|
||||
const webpack = require('webpack');
|
||||
const merge = require('webpack-merge');
|
||||
const nodeObjectHash = require('node-object-hash');
|
||||
const HardSourceWebpackPlugin = require('hard-source-webpack-plugin');
|
||||
const HtmlWebpackHarddiskPlugin = require('html-webpack-harddisk-plugin');
|
||||
|
||||
const TARGET_PORT = _.get(process.env, 'npm_package_config_django_port', 8043);
|
||||
const TARGET_HOST = _.get(process.env, 'npm_package_config_django_host', 'https://localhost');
|
||||
const TARGET = `https://${TARGET_HOST}:${TARGET_PORT}`;
|
||||
const OUTPUT = 'js/[name].js';
|
||||
|
||||
const development = require('./webpack.development');
|
||||
|
||||
const watch = {
|
||||
cache: true,
|
||||
devtool: 'cheap-source-map',
|
||||
output: {
|
||||
filename: OUTPUT
|
||||
},
|
||||
module: {
|
||||
rules: [
|
||||
{
|
||||
test: /\.js$/,
|
||||
enforce: 'pre',
|
||||
exclude: /node_modules/,
|
||||
loader: 'eslint-loader'
|
||||
}
|
||||
]
|
||||
},
|
||||
plugins: [
|
||||
new HtmlWebpackHarddiskPlugin(),
|
||||
new HardSourceWebpackPlugin({
|
||||
cacheDirectory: 'node_modules/.cache/hard-source/[confighash]',
|
||||
recordsPath: 'node_modules/.cache/hard-source/[confighash]/records.json',
|
||||
configHash: config => nodeObjectHash({ sort: false }).hash(config),
|
||||
environmentHash: {
|
||||
root: process.cwd(),
|
||||
directories: ['node_modules'],
|
||||
files: ['package.json']
|
||||
}
|
||||
}),
|
||||
new webpack.HotModuleReplacementPlugin()
|
||||
],
|
||||
devServer: {
|
||||
hot: true,
|
||||
inline: true,
|
||||
contentBase: path.resolve(__dirname, '..', 'static'),
|
||||
stats: 'minimal',
|
||||
publicPath: '/static/',
|
||||
host: '127.0.0.1',
|
||||
https: true,
|
||||
port: 3000,
|
||||
clientLogLevel: 'none',
|
||||
proxy: [{
|
||||
context: (pathname, req) => !(pathname === '/api/login/' && req.method === 'POST'),
|
||||
target: TARGET,
|
||||
secure: false,
|
||||
ws: false,
|
||||
bypass: req => req.originalUrl.includes('hot-update.json')
|
||||
},
|
||||
{
|
||||
context: '/api/login/',
|
||||
target: TARGET,
|
||||
secure: false,
|
||||
ws: false,
|
||||
headers: {
|
||||
Host: `localhost:${TARGET_PORT}`,
|
||||
Origin: TARGET,
|
||||
Referer: `${TARGET}/`
|
||||
}
|
||||
},
|
||||
{
|
||||
context: '/websocket',
|
||||
target: TARGET,
|
||||
secure: false,
|
||||
ws: true
|
||||
}]
|
||||
}
|
||||
};
|
||||
|
||||
module.exports = merge(development, watch);
|
||||
@@ -1,5 +0,0 @@
|
||||
REVISION=588429
|
||||
CHROMIUM_URL="https://storage.googleapis.com/chromium-browser-snapshots/Linux_x64/${REVISION}/chrome-linux.zip"
|
||||
|
||||
wget ${CHROMIUM_URL} -w 30 -t 6 -O /tmp/chrome-linux.zip
|
||||
unzip -o -d /tmp /tmp/chrome-linux.zip
|
||||
@@ -1,202 +0,0 @@
|
||||
|
||||
Apache License
|
||||
Version 2.0, January 2004
|
||||
http://www.apache.org/licenses/
|
||||
|
||||
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
||||
|
||||
1. Definitions.
|
||||
|
||||
"License" shall mean the terms and conditions for use, reproduction,
|
||||
and distribution as defined by Sections 1 through 9 of this document.
|
||||
|
||||
"Licensor" shall mean the copyright owner or entity authorized by
|
||||
the copyright owner that is granting the License.
|
||||
|
||||
"Legal Entity" shall mean the union of the acting entity and all
|
||||
other entities that control, are controlled by, or are under common
|
||||
control with that entity. For the purposes of this definition,
|
||||
"control" means (i) the power, direct or indirect, to cause the
|
||||
direction or management of such entity, whether by contract or
|
||||
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
||||
outstanding shares, or (iii) beneficial ownership of such entity.
|
||||
|
||||
"You" (or "Your") shall mean an individual or Legal Entity
|
||||
exercising permissions granted by this License.
|
||||
|
||||
"Source" form shall mean the preferred form for making modifications,
|
||||
including but not limited to software source code, documentation
|
||||
source, and configuration files.
|
||||
|
||||
"Object" form shall mean any form resulting from mechanical
|
||||
transformation or translation of a Source form, including but
|
||||
not limited to compiled object code, generated documentation,
|
||||
and conversions to other media types.
|
||||
|
||||
"Work" shall mean the work of authorship, whether in Source or
|
||||
Object form, made available under the License, as indicated by a
|
||||
copyright notice that is included in or attached to the work
|
||||
(an example is provided in the Appendix below).
|
||||
|
||||
"Derivative Works" shall mean any work, whether in Source or Object
|
||||
form, that is based on (or derived from) the Work and for which the
|
||||
editorial revisions, annotations, elaborations, or other modifications
|
||||
represent, as a whole, an original work of authorship. For the purposes
|
||||
of this License, Derivative Works shall not include works that remain
|
||||
separable from, or merely link (or bind by name) to the interfaces of,
|
||||
the Work and Derivative Works thereof.
|
||||
|
||||
"Contribution" shall mean any work of authorship, including
|
||||
the original version of the Work and any modifications or additions
|
||||
to that Work or Derivative Works thereof, that is intentionally
|
||||
submitted to Licensor for inclusion in the Work by the copyright owner
|
||||
or by an individual or Legal Entity authorized to submit on behalf of
|
||||
the copyright owner. For the purposes of this definition, "submitted"
|
||||
means any form of electronic, verbal, or written communication sent
|
||||
to the Licensor or its representatives, including but not limited to
|
||||
communication on electronic mailing lists, source code control systems,
|
||||
and issue tracking systems that are managed by, or on behalf of, the
|
||||
Licensor for the purpose of discussing and improving the Work, but
|
||||
excluding communication that is conspicuously marked or otherwise
|
||||
designated in writing by the copyright owner as "Not a Contribution."
|
||||
|
||||
"Contributor" shall mean Licensor and any individual or Legal Entity
|
||||
on behalf of whom a Contribution has been received by Licensor and
|
||||
subsequently incorporated within the Work.
|
||||
|
||||
2. Grant of Copyright License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
copyright license to reproduce, prepare Derivative Works of,
|
||||
publicly display, publicly perform, sublicense, and distribute the
|
||||
Work and such Derivative Works in Source or Object form.
|
||||
|
||||
3. Grant of Patent License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
(except as stated in this section) patent license to make, have made,
|
||||
use, offer to sell, sell, import, and otherwise transfer the Work,
|
||||
where such license applies only to those patent claims licensable
|
||||
by such Contributor that are necessarily infringed by their
|
||||
Contribution(s) alone or by combination of their Contribution(s)
|
||||
with the Work to which such Contribution(s) was submitted. If You
|
||||
institute patent litigation against any entity (including a
|
||||
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
||||
or a Contribution incorporated within the Work constitutes direct
|
||||
or contributory patent infringement, then any patent licenses
|
||||
granted to You under this License for that Work shall terminate
|
||||
as of the date such litigation is filed.
|
||||
|
||||
4. Redistribution. You may reproduce and distribute copies of the
|
||||
Work or Derivative Works thereof in any medium, with or without
|
||||
modifications, and in Source or Object form, provided that You
|
||||
meet the following conditions:
|
||||
|
||||
(a) You must give any other recipients of the Work or
|
||||
Derivative Works a copy of this License; and
|
||||
|
||||
(b) You must cause any modified files to carry prominent notices
|
||||
stating that You changed the files; and
|
||||
|
||||
(c) You must retain, in the Source form of any Derivative Works
|
||||
that You distribute, all copyright, patent, trademark, and
|
||||
attribution notices from the Source form of the Work,
|
||||
excluding those notices that do not pertain to any part of
|
||||
the Derivative Works; and
|
||||
|
||||
(d) If the Work includes a "NOTICE" text file as part of its
|
||||
distribution, then any Derivative Works that You distribute must
|
||||
include a readable copy of the attribution notices contained
|
||||
within such NOTICE file, excluding those notices that do not
|
||||
pertain to any part of the Derivative Works, in at least one
|
||||
of the following places: within a NOTICE text file distributed
|
||||
as part of the Derivative Works; within the Source form or
|
||||
documentation, if provided along with the Derivative Works; or,
|
||||
within a display generated by the Derivative Works, if and
|
||||
wherever such third-party notices normally appear. The contents
|
||||
of the NOTICE file are for informational purposes only and
|
||||
do not modify the License. You may add Your own attribution
|
||||
notices within Derivative Works that You distribute, alongside
|
||||
or as an addendum to the NOTICE text from the Work, provided
|
||||
that such additional attribution notices cannot be construed
|
||||
as modifying the License.
|
||||
|
||||
You may add Your own copyright statement to Your modifications and
|
||||
may provide additional or different license terms and conditions
|
||||
for use, reproduction, or distribution of Your modifications, or
|
||||
for any such Derivative Works as a whole, provided Your use,
|
||||
reproduction, and distribution of the Work otherwise complies with
|
||||
the conditions stated in this License.
|
||||
|
||||
5. Submission of Contributions. Unless You explicitly state otherwise,
|
||||
any Contribution intentionally submitted for inclusion in the Work
|
||||
by You to the Licensor shall be under the terms and conditions of
|
||||
this License, without any additional terms or conditions.
|
||||
Notwithstanding the above, nothing herein shall supersede or modify
|
||||
the terms of any separate license agreement you may have executed
|
||||
with Licensor regarding such Contributions.
|
||||
|
||||
6. Trademarks. This License does not grant permission to use the trade
|
||||
names, trademarks, service marks, or product names of the Licensor,
|
||||
except as required for reasonable and customary use in describing the
|
||||
origin of the Work and reproducing the content of the NOTICE file.
|
||||
|
||||
7. Disclaimer of Warranty. Unless required by applicable law or
|
||||
agreed to in writing, Licensor provides the Work (and each
|
||||
Contributor provides its Contributions) on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
implied, including, without limitation, any warranties or conditions
|
||||
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
||||
PARTICULAR PURPOSE. You are solely responsible for determining the
|
||||
appropriateness of using or redistributing the Work and assume any
|
||||
risks associated with Your exercise of permissions under this License.
|
||||
|
||||
8. Limitation of Liability. In no event and under no legal theory,
|
||||
whether in tort (including negligence), contract, or otherwise,
|
||||
unless required by applicable law (such as deliberate and grossly
|
||||
negligent acts) or agreed to in writing, shall any Contributor be
|
||||
liable to You for damages, including any direct, indirect, special,
|
||||
incidental, or consequential damages of any character arising as a
|
||||
result of this License or out of the use or inability to use the
|
||||
Work (including but not limited to damages for loss of goodwill,
|
||||
work stoppage, computer failure or malfunction, or any and all
|
||||
other commercial damages or losses), even if such Contributor
|
||||
has been advised of the possibility of such damages.
|
||||
|
||||
9. Accepting Warranty or Additional Liability. While redistributing
|
||||
the Work or Derivative Works thereof, You may choose to offer,
|
||||
and charge a fee for, acceptance of support, warranty, indemnity,
|
||||
or other liability obligations and/or rights consistent with this
|
||||
License. However, in accepting such obligations, You may act only
|
||||
on Your own behalf and on Your sole responsibility, not on behalf
|
||||
of any other Contributor, and only if You agree to indemnify,
|
||||
defend, and hold each Contributor harmless for any liability
|
||||
incurred by, or claims asserted against, such Contributor by reason
|
||||
of your accepting any such warranty or additional liability.
|
||||
|
||||
END OF TERMS AND CONDITIONS
|
||||
|
||||
APPENDIX: How to apply the Apache License to your work.
|
||||
|
||||
To apply the Apache License to your work, attach the following
|
||||
boilerplate notice, with the fields enclosed by brackets "[]"
|
||||
replaced with your own identifying information. (Don't include
|
||||
the brackets!) The text should be enclosed in the appropriate
|
||||
comment syntax for the file format. We also recommend that a
|
||||
file or class name and description of purpose be included on the
|
||||
same "printed page" as the copyright notice for easier
|
||||
identification within third-party archives.
|
||||
|
||||
Copyright [yyyy] [name of copyright owner]
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
|
Before Width: | Height: | Size: 1.7 KiB |
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user