mirror of
https://github.com/ansible/awx.git
synced 2026-02-05 19:44:43 -03:30
Compare commits
283 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
896d895934 | ||
|
|
067e6a5163 | ||
|
|
a286324ab7 | ||
|
|
64bd49da16 | ||
|
|
5622bf1a0a | ||
|
|
862f40a643 | ||
|
|
41e3a69001 | ||
|
|
cabc60792b | ||
|
|
eb305edafd | ||
|
|
ec78f5a9ec | ||
|
|
d711a9aed3 | ||
|
|
4836a5b1a9 | ||
|
|
57b093aa2a | ||
|
|
fabc67eeb3 | ||
|
|
c29a7ccf8b | ||
|
|
af162b6897 | ||
|
|
dd269804fd | ||
|
|
ad5cc3a393 | ||
|
|
4b4e57889e | ||
|
|
f4ba87ea4f | ||
|
|
dc64da6f72 | ||
|
|
4e129d3d04 | ||
|
|
7542ebda48 | ||
|
|
ff559a81db | ||
|
|
909cdc2425 | ||
|
|
8b183b5f5d | ||
|
|
da13196e59 | ||
|
|
d333b0080f | ||
|
|
75de8a30f6 | ||
|
|
3e2affb08e | ||
|
|
42dbd4ea73 | ||
|
|
36f47f3696 | ||
|
|
c4eceb0915 | ||
|
|
a8b2b5892c | ||
|
|
f0129c857b | ||
|
|
66789fa713 | ||
|
|
82a585dbe2 | ||
|
|
2b4732f07b | ||
|
|
02cd646b44 | ||
|
|
c9ac18db24 | ||
|
|
5e369dc9e2 | ||
|
|
e0e66e3818 | ||
|
|
0a276a6276 | ||
|
|
536c02dc55 | ||
|
|
d607dfd5d8 | ||
|
|
d2d62adcb9 | ||
|
|
cea6d8c3cb | ||
|
|
8316a1d198 | ||
|
|
a0840ddec2 | ||
|
|
e28bed5a6c | ||
|
|
25bb3fbd59 | ||
|
|
f2cd630a90 | ||
|
|
63d702d1aa | ||
|
|
d6c5a23e05 | ||
|
|
dd1bda3b67 | ||
|
|
82c3348b15 | ||
|
|
058189dfcf | ||
|
|
e2140ad7a8 | ||
|
|
fedc40d578 | ||
|
|
aeed2e0128 | ||
|
|
0c917caa2a | ||
|
|
4fe4856d02 | ||
|
|
cb1df4a334 | ||
|
|
c456b944a5 | ||
|
|
7801590bef | ||
|
|
d834afd541 | ||
|
|
34ac60b35c | ||
|
|
b771929b6e | ||
|
|
a25eec5cfa | ||
|
|
3b5641c41b | ||
|
|
68f214c2be | ||
|
|
2aa3fe756e | ||
|
|
20dd436ee9 | ||
|
|
34f4dc521b | ||
|
|
7e307a69ce | ||
|
|
282914e809 | ||
|
|
7eaf1db3e0 | ||
|
|
1b1608cff9 | ||
|
|
cb7b2289b7 | ||
|
|
c1ea489043 | ||
|
|
c887f84a05 | ||
|
|
fb7c827bff | ||
|
|
5f03768f5d | ||
|
|
df3bd2e082 | ||
|
|
3d80eb30b3 | ||
|
|
5ad48bda6c | ||
|
|
fd60105db3 | ||
|
|
82807a1b20 | ||
|
|
61ae80819f | ||
|
|
62a9c74040 | ||
|
|
0e74f51aa3 | ||
|
|
966eead93f | ||
|
|
12cea1191e | ||
|
|
5dac46cd7d | ||
|
|
2c2aaa7fea | ||
|
|
997525076d | ||
|
|
3b280e1b32 | ||
|
|
a8b6db3967 | ||
|
|
bcf93810ad | ||
|
|
0cb33bd278 | ||
|
|
8c2ca29f4e | ||
|
|
f1767d05b0 | ||
|
|
b1cd7eb997 | ||
|
|
0f8533f123 | ||
|
|
2191ab1c6c | ||
|
|
9975d4764d | ||
|
|
31907423ce | ||
|
|
94d46a6e96 | ||
|
|
6f4c41a8d3 | ||
|
|
81de931711 | ||
|
|
9e7f004ca6 | ||
|
|
5c664eadf9 | ||
|
|
26b7e9de40 | ||
|
|
09801d0a9a | ||
|
|
a332f46f31 | ||
|
|
6f6d9e2f15 | ||
|
|
cd7c85490b | ||
|
|
a9ec7038de | ||
|
|
795e3c84fc | ||
|
|
f2b2e64426 | ||
|
|
9b7bc5109a | ||
|
|
d07887bd91 | ||
|
|
bb47bdbc43 | ||
|
|
d477f04d75 | ||
|
|
e5f059806b | ||
|
|
6649b435ce | ||
|
|
ae320ab228 | ||
|
|
29c961e52a | ||
|
|
fd466c5cff | ||
|
|
623cf0b4cd | ||
|
|
d33a748eea | ||
|
|
1f1cdf8859 | ||
|
|
334be9eb25 | ||
|
|
8f9373085a | ||
|
|
11c5d577d6 | ||
|
|
0e17023ba3 | ||
|
|
3c785fbff3 | ||
|
|
0061c57577 | ||
|
|
f59da78328 | ||
|
|
117bb07f0d | ||
|
|
01fdc482be | ||
|
|
875abcd31a | ||
|
|
3468153619 | ||
|
|
27b1d15a7a | ||
|
|
39ce0ade6d | ||
|
|
dfb0710d69 | ||
|
|
f6c9621510 | ||
|
|
7f90a8b2b3 | ||
|
|
0d17dfcd87 | ||
|
|
91134b2537 | ||
|
|
3a56d2447c | ||
|
|
550a66553e | ||
|
|
7fbe01352f | ||
|
|
3520a6e066 | ||
|
|
19d7f3e346 | ||
|
|
71f9476a51 | ||
|
|
ffcf76ddd0 | ||
|
|
d36babf506 | ||
|
|
a73cb0280c | ||
|
|
1e5a0dc7c5 | ||
|
|
fd5f3a82d2 | ||
|
|
e970620672 | ||
|
|
1befacaf39 | ||
|
|
e9bf25f108 | ||
|
|
6c06b0432b | ||
|
|
a7c50b77ea | ||
|
|
164255e516 | ||
|
|
74282c5dfb | ||
|
|
afa3501194 | ||
|
|
0f5629b514 | ||
|
|
0db58f0edd | ||
|
|
7f022b1555 | ||
|
|
9f80f918c5 | ||
|
|
34fe255336 | ||
|
|
03265c05ca | ||
|
|
6c7e1fc4eb | ||
|
|
8683872927 | ||
|
|
7b47d7e7f6 | ||
|
|
19d000e97f | ||
|
|
8f54627ea6 | ||
|
|
6bb8fd3fd6 | ||
|
|
6e97020eae | ||
|
|
30997b30b6 | ||
|
|
499321cdf7 | ||
|
|
a581e26414 | ||
|
|
26b1a82164 | ||
|
|
f5cc927a15 | ||
|
|
7ec0464072 | ||
|
|
b94a9c19e7 | ||
|
|
1c73407edf | ||
|
|
a4d15b20ff | ||
|
|
83b6a91623 | ||
|
|
5fb9afc9f5 | ||
|
|
82af78fe33 | ||
|
|
d60014987f | ||
|
|
55b8dcdd8d | ||
|
|
03261c4782 | ||
|
|
a6778604e1 | ||
|
|
c441d6cd55 | ||
|
|
eebcab610e | ||
|
|
783a0963ff | ||
|
|
dbc235cfb6 | ||
|
|
9dde854baa | ||
|
|
98375a0328 | ||
|
|
264b13f33c | ||
|
|
35a9e7e565 | ||
|
|
e088c7385a | ||
|
|
372c80ee44 | ||
|
|
5a1810e191 | ||
|
|
dee0b61bc2 | ||
|
|
2c82d32720 | ||
|
|
715aead961 | ||
|
|
ddb1d12a79 | ||
|
|
8fe437380d | ||
|
|
2d81143c98 | ||
|
|
a219e27e0b | ||
|
|
d428a540a0 | ||
|
|
e1b6e1509c | ||
|
|
1d6579e110 | ||
|
|
c50bd8d6e6 | ||
|
|
13e1fc9839 | ||
|
|
a205ddeed6 | ||
|
|
d7742d7340 | ||
|
|
ce675319b7 | ||
|
|
610138caeb | ||
|
|
25137b40d3 | ||
|
|
9b92d1584d | ||
|
|
11d024abdb | ||
|
|
57433a59d7 | ||
|
|
5ac4a9aca6 | ||
|
|
5b92c9e8f3 | ||
|
|
7951c2f014 | ||
|
|
2cfdf08500 | ||
|
|
10474cd64c | ||
|
|
da9a075000 | ||
|
|
510a546d8c | ||
|
|
562f78e53d | ||
|
|
e08590290c | ||
|
|
e2c8519b77 | ||
|
|
86b683a8f1 | ||
|
|
1ad561c307 | ||
|
|
7b60733da0 | ||
|
|
d7ce328046 | ||
|
|
25a4a112b3 | ||
|
|
cbe2a78287 | ||
|
|
83ceacf588 | ||
|
|
fe0ad30245 | ||
|
|
0ac6ba9c99 | ||
|
|
f8ecdbf287 | ||
|
|
a6f3817488 | ||
|
|
e4eb03259b | ||
|
|
0ccc93a166 | ||
|
|
2672c2ffe3 | ||
|
|
204083fcdc | ||
|
|
b21db2fd31 | ||
|
|
d1cb0781ce | ||
|
|
7c86edd825 | ||
|
|
3addbeab4c | ||
|
|
5096f1459d | ||
|
|
a3de251732 | ||
|
|
686d750ad9 | ||
|
|
39f26fe576 | ||
|
|
7a9bcc1e1e | ||
|
|
72a940bef1 | ||
|
|
cb26087c2a | ||
|
|
7d6a8adb79 | ||
|
|
c1e0def013 | ||
|
|
5993db76d3 | ||
|
|
c0afd67432 | ||
|
|
5b71681494 | ||
|
|
f63312c811 | ||
|
|
0886414c72 | ||
|
|
30d78e8857 | ||
|
|
07d01c49c0 | ||
|
|
bb896c0b02 | ||
|
|
95634bf0b8 | ||
|
|
e6735b595c | ||
|
|
03d8987d93 | ||
|
|
666e9c5c2f | ||
|
|
d57f2ab496 | ||
|
|
b3d1c7ff9f | ||
|
|
caec347371 | ||
|
|
26154d22d3 |
7
.github/dependabot.yml
vendored
7
.github/dependabot.yml
vendored
@@ -1,7 +0,0 @@
|
||||
---
|
||||
version: 2
|
||||
updates:
|
||||
- package-ecosystem: "pip"
|
||||
directory: "/requirements"
|
||||
schedule:
|
||||
interval: "monthly"
|
||||
13
CHANGELOG.md
13
CHANGELOG.md
@@ -2,6 +2,19 @@
|
||||
|
||||
This is a list of high-level changes for each release of AWX. A full list of commits can be found at `https://github.com/ansible/awx/releases/tag/<version>`.
|
||||
|
||||
# 19.2.0 (June 1, 2021)
|
||||
- Fixed race condition that would sometimes cause jobs to error out at the very end of an otherwise successful run (https://github.com/ansible/receptor/pull/328)
|
||||
- Fixes bug where users were unable to click on text next to checkboxes in modals (https://github.com/ansible/awx/pull/10279)
|
||||
- Have the project update playbook warn if role/collection syncing is disabled. (https://github.com/ansible/awx/pull/10068)
|
||||
- Move irc references to point to irc.libera.chat (https://github.com/ansible/awx/pull/10295)
|
||||
- Fixes bug where activity stream changes were displaying as [object object] (https://github.com/ansible/awx/pull/10267)
|
||||
- Update awxkit to enable export of Galaxy credentials associated to organizations (https://github.com/ansible/awx/pull/10271)
|
||||
- Bump receptor and receptorctl versions to 1.0.0a2 (https://github.com/ansible/awx/pull/10261)
|
||||
- Add the ability to disable local authentication (https://github.com/ansible/awx/pull/10102)
|
||||
- Show error if no Execution Environment is found on project sync/job run (https://github.com/ansible/awx/pull/10183)
|
||||
- Allow for editing and deleting managed_by_tower EEs from API/UI (https://github.com/ansible/awx/pull/10173)
|
||||
|
||||
|
||||
# 19.1.0 (May 1, 2021)
|
||||
|
||||
- Custom inventory scripts have been removed from the API https://github.com/ansible/awx/pull/9822
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
|
||||
Hi there! We're excited to have you as a contributor.
|
||||
|
||||
Have questions about this document or anything not covered here? Come chat with us at `#ansible-awx` on webchat.freenode.net, or submit your question to the [mailing list](https://groups.google.com/forum/#!forum/awx-project).
|
||||
Have questions about this document or anything not covered here? Come chat with us at `#ansible-awx` on irc.libera.chat, or submit your question to the [mailing list](https://groups.google.com/forum/#!forum/awx-project).
|
||||
|
||||
## Table of contents
|
||||
|
||||
@@ -28,7 +28,7 @@ Have questions about this document or anything not covered here? Come chat with
|
||||
- You must use `git commit --signoff` for any commit to be merged, and agree that usage of --signoff constitutes agreement with the terms of [DCO 1.1](./DCO_1_1.md).
|
||||
- Take care to make sure no merge commits are in the submission, and use `git rebase` vs `git merge` for this reason.
|
||||
- If collaborating with someone else on the same branch, consider using `--force-with-lease` instead of `--force`. This will prevent you from accidentally overwriting commits pushed by someone else. For more information, see https://git-scm.com/docs/git-push#git-push---force-with-leaseltrefnamegt
|
||||
- If submitting a large code change, it's a good idea to join the `#ansible-awx` channel on webchat.freenode.net, and talk about what you would like to do or add first. This not only helps everyone know what's going on, it also helps save time and effort, if the community decides some changes are needed.
|
||||
- If submitting a large code change, it's a good idea to join the `#ansible-awx` channel on irc.libera.chat, and talk about what you would like to do or add first. This not only helps everyone know what's going on, it also helps save time and effort, if the community decides some changes are needed.
|
||||
- We ask all of our community members and contributors to adhere to the [Ansible code of conduct](http://docs.ansible.com/ansible/latest/community/code_of_conduct.html). If you have questions, or need assistance, please reach out to our community team at [codeofconduct@ansible.com](mailto:codeofconduct@ansible.com)
|
||||
|
||||
## Setting up your development environment
|
||||
@@ -114,7 +114,7 @@ Fixing bugs, adding translations, and updating the documentation are always appr
|
||||
|
||||
**NOTE**
|
||||
|
||||
> If you work in a part of the codebase that is going through active development, your changes may be rejected, or you may be asked to `rebase`. A good idea before starting work is to have a discussion with us in the `#ansible-awx` channel on webchat.freenode.net, or on the [mailing list](https://groups.google.com/forum/#!forum/awx-project).
|
||||
> If you work in a part of the codebase that is going through active development, your changes may be rejected, or you may be asked to `rebase`. A good idea before starting work is to have a discussion with us in the `#ansible-awx` channel on irc.libera.chat, or on the [mailing list](https://groups.google.com/forum/#!forum/awx-project).
|
||||
|
||||
**NOTE**
|
||||
|
||||
@@ -136,7 +136,7 @@ Here are a few things you can do to help the visibility of your change, and incr
|
||||
* Make the smallest change possible
|
||||
* Write good commit messages. See [How to write a Git commit message](https://chris.beams.io/posts/git-commit/).
|
||||
|
||||
It's generally a good idea to discuss features with us first by engaging us in the `#ansible-awx` channel on webchat.freenode.net, or on the [mailing list](https://groups.google.com/forum/#!forum/awx-project).
|
||||
It's generally a good idea to discuss features with us first by engaging us in the `#ansible-awx` channel on irc.libera.chat, or on the [mailing list](https://groups.google.com/forum/#!forum/awx-project).
|
||||
|
||||
We like to keep our commit history clean, and will require resubmission of pull requests that contain merge commits. Use `git pull --rebase`, rather than
|
||||
`git pull`, and `git rebase`, rather than `git merge`.
|
||||
|
||||
108
INSTALL.md
108
INSTALL.md
@@ -3,12 +3,6 @@ Table of Contents
|
||||
|
||||
* [Installing AWX](#installing-awx)
|
||||
* [The AWX Operator](#the-awx-operator)
|
||||
* [Quickstart with minikube](#quickstart-with-minikube)
|
||||
* [Starting minikube](#starting-minikube)
|
||||
* [Deploying the AWX Operator](#deploying-the-awx-operator)
|
||||
* [Verifying the Operator Deployment](#verifying-the-operator-deployment)
|
||||
* [Deploy AWX](#deploy-awx)
|
||||
* [Accessing AWX](#accessing-awx)
|
||||
* [Installing the AWX CLI](#installing-the-awx-cli)
|
||||
* [Building the CLI Documentation](#building-the-cli-documentation)
|
||||
|
||||
@@ -22,110 +16,10 @@ If you're attempting to migrate an older Docker-based AWX installation, see: [Mi
|
||||
|
||||
## The AWX Operator
|
||||
|
||||
Starting in version 18.0, the [AWX Operator](https://github.com/ansible/awx-operator) is the preferred way to install AWX.
|
||||
Starting in version 18.0, the [AWX Operator](https://github.com/ansible/awx-operator) is the preferred way to install AWX. Please refer to the [AWX Operator](https://github.com/ansible/awx-operator) documentation.
|
||||
|
||||
AWX can also alternatively be installed and [run in Docker](./tools/docker-compose/README.md), but this install path is only recommended for development/test-oriented deployments, and has no official published release.
|
||||
|
||||
### Quickstart with minikube
|
||||
|
||||
If you don't have an existing OpenShift or Kubernetes cluster, minikube is a fast and easy way to get up and running.
|
||||
|
||||
To install minikube, follow the steps in their [documentation](https://minikube.sigs.k8s.io/docs/start/).
|
||||
|
||||
:warning: NOTE |
|
||||
--- |
|
||||
If you're about to install minikube or have already installed it, please be sure you're using [Minikube v1.18.1](https://github.com/kubernetes/minikube/releases/tag/v1.18.1). There's a [bug](https://github.com/ansible/awx-operator/issues/205) right now that will not allow you to run it using Minikube v1.19.x.
|
||||
#### Starting minikube
|
||||
|
||||
Once you have installed minikube, run the following command to start it. You may wish to customize these options.
|
||||
|
||||
```
|
||||
$ minikube start --cpus=4 --memory=8g --addons=ingress
|
||||
```
|
||||
|
||||
#### Deploying the AWX Operator
|
||||
|
||||
For a comprehensive overview of features, see [README.md](https://github.com/ansible/awx-operator/blob/devel/README.md) in the awx-operator repo. The following steps are the bare minimum to get AWX up and running.
|
||||
|
||||
Start by going to https://github.com/ansible/awx-operator/releases and making note of the latest release. Replace `<tag>` in the URL below with the version you are deploying:
|
||||
|
||||
```
|
||||
$ minikube kubectl -- apply -f https://raw.githubusercontent.com/ansible/awx-operator/<tag>/deploy/awx-operator.yaml
|
||||
```
|
||||
|
||||
##### Verifying the Operator Deployment
|
||||
|
||||
After a few seconds, the operator should be up and running. Verify it by running the following command:
|
||||
|
||||
```
|
||||
$ minikube kubectl get pods
|
||||
NAME READY STATUS RESTARTS AGE
|
||||
awx-operator-7c78bfbfd-xb6th 1/1 Running 0 11s
|
||||
```
|
||||
|
||||
#### Deploy AWX
|
||||
|
||||
Once the Operator is running, you can now deploy AWX by creating a simple YAML file:
|
||||
|
||||
```
|
||||
$ cat myawx.yml
|
||||
---
|
||||
apiVersion: awx.ansible.com/v1beta1
|
||||
kind: AWX
|
||||
metadata:
|
||||
name: awx
|
||||
spec:
|
||||
tower_ingress_type: Ingress
|
||||
```
|
||||
|
||||
> If a custom AWX image is needed, see [these docs](./docs/build_awx_image.md) on how to build and use it.
|
||||
|
||||
And then creating the AWX object in the Kubernetes API:
|
||||
|
||||
```
|
||||
$ minikube kubectl apply -- -f myawx.yml
|
||||
awx.awx.ansible.com/awx created
|
||||
```
|
||||
|
||||
After creating the AWX object in the Kubernetes API, the operator will begin running its reconciliation loop.
|
||||
|
||||
To see what's going on, you can tail the logs of the operator pod (note that your pod name will be different):
|
||||
|
||||
```
|
||||
$ minikube kubectl logs -- -f awx-operator-7c78bfbfd-xb6th
|
||||
```
|
||||
|
||||
After a few seconds, you will see the database and application pods show up. On a fresh system, it may take a few minutes for the container images to download.
|
||||
|
||||
```
|
||||
$ minikube kubectl get pods
|
||||
NAME READY STATUS RESTARTS AGE
|
||||
awx-5ffbfd489c-bvtvf 3/3 Running 0 2m54s
|
||||
awx-operator-7c78bfbfd-xb6th 1/1 Running 0 6m42s
|
||||
awx-postgres-0 1/1 Running 0 2m58s
|
||||
```
|
||||
|
||||
##### Accessing AWX
|
||||
|
||||
To access the AWX UI, you'll need to grab the service url from minikube:
|
||||
|
||||
```
|
||||
$ minikube service awx-service --url
|
||||
http://192.168.59.2:31868
|
||||
```
|
||||
|
||||
On fresh installs, you will see the "AWX is currently upgrading." page until database migrations finish.
|
||||
|
||||
Once you are redirected to the login screen, you can now log in by obtaining the generated admin password (note: do not copy the trailing `%`):
|
||||
|
||||
```
|
||||
$ minikube kubectl -- get secret awx-admin-password -o jsonpath='{.data.password}' | base64 --decode
|
||||
b6ChwVmqEiAsil2KSpH4xGaZPeZvWnWj%
|
||||
```
|
||||
|
||||
Now you can log in at the URL above with the username "admin" and the password above. Happy Automating!
|
||||
|
||||
|
||||
# Installing the AWX CLI
|
||||
|
||||
`awx` is the official command-line client for AWX. It:
|
||||
|
||||
@@ -5,7 +5,7 @@
|
||||
Use the GitHub [issue tracker](https://github.com/ansible/awx/issues) for filing bugs. In order to save time, and help us respond to issues quickly, make sure to fill out as much of the issue template
|
||||
as possible. Version information, and an accurate reproducing scenario are critical to helping us identify the problem.
|
||||
|
||||
Please don't use the issue tracker as a way to ask how to do something. Instead, use the [mailing list](https://groups.google.com/forum/#!forum/awx-project) , and the `#ansible-awx` channel on irc.freenode.net to get help.
|
||||
Please don't use the issue tracker as a way to ask how to do something. Instead, use the [mailing list](https://groups.google.com/forum/#!forum/awx-project) , and the `#ansible-awx` channel on irc.libera.chat to get help.
|
||||
|
||||
Before opening a new issue, please use the issue search feature to see if what you're experiencing has already been reported. If you have any extra detail to provide, please comment. Otherwise, rather than posting a "me too" comment, please consider giving it a ["thumbs up"](https://github.com/blog/2119-add-reactions-to-pull-requests-issues-and-comment) to give us an indication of the severity of the problem.
|
||||
|
||||
|
||||
11
Makefile
11
Makefile
@@ -13,7 +13,6 @@ MANAGEMENT_COMMAND ?= awx-manage
|
||||
IMAGE_REPOSITORY_AUTH ?=
|
||||
IMAGE_REPOSITORY_BASE ?= https://gcr.io
|
||||
VERSION := $(shell cat VERSION)
|
||||
PYCURL_SSL_LIBRARY ?= openssl
|
||||
|
||||
# NOTE: This defaults the container image version to the branch that's active
|
||||
COMPOSE_TAG ?= $(GIT_BRANCH)
|
||||
@@ -28,7 +27,7 @@ DEVEL_IMAGE_NAME ?= $(DEV_DOCKER_TAG_BASE)/awx_devel:$(COMPOSE_TAG)
|
||||
|
||||
# Python packages to install only from source (not from binary wheels)
|
||||
# Comma separated list
|
||||
SRC_ONLY_PKGS ?= cffi,pycparser,psycopg2,twilio,pycurl
|
||||
SRC_ONLY_PKGS ?= cffi,pycparser,psycopg2,twilio
|
||||
# These should be upgraded in the AWX and Ansible venv before attempting
|
||||
# to install the actual requirements
|
||||
VENV_BOOTSTRAP ?= pip==19.3.1 setuptools==41.6.0 wheel==0.36.2
|
||||
@@ -272,7 +271,9 @@ black: reports
|
||||
@(set -o pipefail && $@ $(BLACK_ARGS) awx awxkit awx_collection | tee reports/$@.report)
|
||||
|
||||
.git/hooks/pre-commit:
|
||||
@echo "[ -z \$$AWX_IGNORE_BLACK ] && (black --check \`git diff --cached --name-only --diff-filter=AM | grep -E '\.py$\'\` || (echo 'To fix this, run \`make black\` to auto-format your code prior to commit, or set AWX_IGNORE_BLACK=1' && exit 1))" > .git/hooks/pre-commit
|
||||
@echo "if [ -x pre-commit.sh ]; then" > .git/hooks/pre-commit
|
||||
@echo " ./pre-commit.sh;" >> .git/hooks/pre-commit
|
||||
@echo "fi" >> .git/hooks/pre-commit
|
||||
@chmod +x .git/hooks/pre-commit
|
||||
|
||||
genschema: reports
|
||||
@@ -387,7 +388,7 @@ clean-ui:
|
||||
rm -rf $(UI_BUILD_FLAG_FILE)
|
||||
|
||||
awx/ui_next/node_modules:
|
||||
$(NPM_BIN) --prefix awx/ui_next --loglevel warn install
|
||||
NODE_OPTIONS=--max-old-space-size=4096 $(NPM_BIN) --prefix awx/ui_next --loglevel warn ci
|
||||
|
||||
$(UI_BUILD_FLAG_FILE):
|
||||
$(NPM_BIN) --prefix awx/ui_next --loglevel warn run compile-strings
|
||||
@@ -469,7 +470,7 @@ docker-compose-sources: .git/hooks/pre-commit
|
||||
-e cluster_node_count=$(CLUSTER_NODE_COUNT)
|
||||
|
||||
docker-compose: docker-auth awx/projects docker-compose-sources
|
||||
docker-compose -f tools/docker-compose/_sources/docker-compose.yml up $(COMPOSE_UP_OPTS)
|
||||
docker-compose -f tools/docker-compose/_sources/docker-compose.yml $(COMPOSE_UP_OPTS) up
|
||||
|
||||
docker-compose-credential-plugins: docker-auth awx/projects docker-compose-sources
|
||||
echo -e "\033[0;31mTo generate a CyberArk Conjur API key: docker exec -it tools_conjur_1 conjurctl account create quick-start\033[0m"
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
[](https://ansible.softwarefactory-project.io/zuul/status) [](https://docs.ansible.com/ansible/latest/community/code_of_conduct.html) [](https://github.com/ansible/awx/blob/devel/LICENSE.md) [](https://groups.google.com/g/awx-project)
|
||||
[](https://webchat.freenode.net/#ansible-awx)
|
||||
[](irc.libera.chat - #ansible-awx)
|
||||
|
||||
<img src="https://raw.githubusercontent.com/ansible/awx-logos/master/awx/ui/client/assets/logo-login.svg?sanitize=true" width=200 alt="AWX" />
|
||||
|
||||
|
||||
@@ -1,8 +1,12 @@
|
||||
# Django
|
||||
from django.conf import settings
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
|
||||
# Django REST Framework
|
||||
from rest_framework import serializers
|
||||
|
||||
# AWX
|
||||
from awx.conf import fields, register
|
||||
from awx.conf import fields, register, register_validate
|
||||
from awx.api.fields import OAuth2ProviderField
|
||||
from oauth2_provider.settings import oauth2_settings
|
||||
|
||||
@@ -27,6 +31,17 @@ register(
|
||||
category=_('Authentication'),
|
||||
category_slug='authentication',
|
||||
)
|
||||
register(
|
||||
'DISABLE_LOCAL_AUTH',
|
||||
field_class=fields.BooleanField,
|
||||
label=_('Disable the built-in authentication system'),
|
||||
help_text=_(
|
||||
"Controls whether users are prevented from using the built-in authentication system. "
|
||||
"You probably want to do this if you are using an LDAP or SAML integration."
|
||||
),
|
||||
category=_('Authentication'),
|
||||
category_slug='authentication',
|
||||
)
|
||||
register(
|
||||
'AUTH_BASIC_ENABLED',
|
||||
field_class=fields.BooleanField,
|
||||
@@ -81,3 +96,23 @@ register(
|
||||
category=_('Authentication'),
|
||||
category_slug='authentication',
|
||||
)
|
||||
|
||||
|
||||
def authentication_validate(serializer, attrs):
|
||||
remote_auth_settings = [
|
||||
'AUTH_LDAP_SERVER_URI',
|
||||
'SOCIAL_AUTH_GOOGLE_OAUTH2_KEY',
|
||||
'SOCIAL_AUTH_GITHUB_KEY',
|
||||
'SOCIAL_AUTH_GITHUB_ORG_KEY',
|
||||
'SOCIAL_AUTH_GITHUB_TEAM_KEY',
|
||||
'SOCIAL_AUTH_SAML_ENABLED_IDPS',
|
||||
'RADIUS_SERVER',
|
||||
'TACACSPLUS_HOST',
|
||||
]
|
||||
if attrs.get('DISABLE_LOCAL_AUTH', False):
|
||||
if not any(getattr(settings, s, None) for s in remote_auth_settings):
|
||||
raise serializers.ValidationError(_("There are no remote authentication systems configured."))
|
||||
return attrs
|
||||
|
||||
|
||||
register_validate('authentication', authentication_validate)
|
||||
|
||||
@@ -150,7 +150,7 @@ SUMMARIZABLE_FK_FIELDS = {
|
||||
'group': DEFAULT_SUMMARY_FIELDS,
|
||||
'default_environment': DEFAULT_SUMMARY_FIELDS + ('image',),
|
||||
'execution_environment': DEFAULT_SUMMARY_FIELDS + ('image',),
|
||||
'project': DEFAULT_SUMMARY_FIELDS + ('status', 'scm_type'),
|
||||
'project': DEFAULT_SUMMARY_FIELDS + ('status', 'scm_type', 'allow_override'),
|
||||
'source_project': DEFAULT_SUMMARY_FIELDS + ('status', 'scm_type'),
|
||||
'project_update': DEFAULT_SUMMARY_FIELDS + ('status', 'failed'),
|
||||
'credential': DEFAULT_SUMMARY_FIELDS + ('kind', 'cloud', 'kubernetes', 'credential_type_id'),
|
||||
@@ -2207,6 +2207,7 @@ class InventoryUpdateSerializer(UnifiedJobSerializer, InventorySourceOptionsSeri
|
||||
'org_host_limit_error',
|
||||
'source_project_update',
|
||||
'custom_virtualenv',
|
||||
'instance_group',
|
||||
'-controller_node',
|
||||
)
|
||||
|
||||
@@ -4374,7 +4375,7 @@ class NotificationTemplateSerializer(BaseSerializer):
|
||||
return res
|
||||
|
||||
def _recent_notifications(self, obj):
|
||||
return [{'id': x.id, 'status': x.status, 'created': x.created} for x in obj.notifications.all().order_by('-created')[:5]]
|
||||
return [{'id': x.id, 'status': x.status, 'created': x.created, 'error': x.error} for x in obj.notifications.all().order_by('-created')[:5]]
|
||||
|
||||
def get_summary_fields(self, obj):
|
||||
d = super(NotificationTemplateSerializer, self).get_summary_fields(obj)
|
||||
|
||||
@@ -685,7 +685,6 @@ class TeamAccessList(ResourceAccessList):
|
||||
|
||||
class ExecutionEnvironmentList(ListCreateAPIView):
|
||||
|
||||
always_allow_superuser = False
|
||||
model = models.ExecutionEnvironment
|
||||
serializer_class = serializers.ExecutionEnvironmentSerializer
|
||||
swagger_topic = "Execution Environments"
|
||||
@@ -693,7 +692,6 @@ class ExecutionEnvironmentList(ListCreateAPIView):
|
||||
|
||||
class ExecutionEnvironmentDetail(RetrieveUpdateDestroyAPIView):
|
||||
|
||||
always_allow_superuser = False
|
||||
model = models.ExecutionEnvironment
|
||||
serializer_class = serializers.ExecutionEnvironmentSerializer
|
||||
swagger_topic = "Execution Environments"
|
||||
|
||||
@@ -24,7 +24,7 @@ from awx.api.generics import APIView
|
||||
from awx.conf.registry import settings_registry
|
||||
from awx.main.analytics import all_collectors
|
||||
from awx.main.ha import is_ha_environment
|
||||
from awx.main.utils import get_awx_version, get_custom_venv_choices, to_python_boolean
|
||||
from awx.main.utils import get_awx_version, get_custom_venv_choices
|
||||
from awx.main.utils.licensing import validate_entitlement_manifest
|
||||
from awx.api.versioning import reverse, drf_reverse
|
||||
from awx.main.constants import PRIVILEGE_ESCALATION_METHODS
|
||||
@@ -313,16 +313,6 @@ class ApiV2ConfigView(APIView):
|
||||
def post(self, request):
|
||||
if not isinstance(request.data, dict):
|
||||
return Response({"error": _("Invalid subscription data")}, status=status.HTTP_400_BAD_REQUEST)
|
||||
if "eula_accepted" not in request.data:
|
||||
return Response({"error": _("Missing 'eula_accepted' property")}, status=status.HTTP_400_BAD_REQUEST)
|
||||
try:
|
||||
eula_accepted = to_python_boolean(request.data["eula_accepted"])
|
||||
except ValueError:
|
||||
return Response({"error": _("'eula_accepted' value is invalid")}, status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
if not eula_accepted:
|
||||
return Response({"error": _("'eula_accepted' must be True")}, status=status.HTTP_400_BAD_REQUEST)
|
||||
request.data.pop("eula_accepted")
|
||||
try:
|
||||
data_actual = json.dumps(request.data)
|
||||
except Exception:
|
||||
|
||||
@@ -3,9 +3,9 @@ import logging
|
||||
|
||||
# Django
|
||||
from django.conf import settings
|
||||
from django.core.cache import cache
|
||||
from django.core.signals import setting_changed
|
||||
from django.db.models.signals import post_save, pre_delete, post_delete
|
||||
from django.core.cache import cache
|
||||
from django.dispatch import receiver
|
||||
|
||||
# AWX
|
||||
@@ -25,7 +25,7 @@ def handle_setting_change(key, for_delete=False):
|
||||
# Note: Doesn't handle multiple levels of dependencies!
|
||||
setting_keys.append(dependent_key)
|
||||
# NOTE: This block is probably duplicated.
|
||||
cache_keys = set([Setting.get_cache_key(k) for k in setting_keys])
|
||||
cache_keys = {Setting.get_cache_key(k) for k in setting_keys}
|
||||
cache.delete_many(cache_keys)
|
||||
|
||||
# Send setting_changed signal with new value for each setting.
|
||||
@@ -58,3 +58,18 @@ def on_post_delete_setting(sender, **kwargs):
|
||||
key = getattr(instance, '_saved_key_', None)
|
||||
if key:
|
||||
handle_setting_change(key, True)
|
||||
|
||||
|
||||
@receiver(setting_changed)
|
||||
def disable_local_auth(**kwargs):
|
||||
if (kwargs['setting'], kwargs['value']) == ('DISABLE_LOCAL_AUTH', True):
|
||||
from django.contrib.auth.models import User
|
||||
from oauth2_provider.models import RefreshToken
|
||||
from awx.main.models.oauth import OAuth2AccessToken
|
||||
from awx.main.management.commands.revoke_oauth2_tokens import revoke_tokens
|
||||
|
||||
logger.warning("Triggering token invalidation for local users.")
|
||||
|
||||
qs = User.objects.filter(profile__ldap_dn='', enterprise_auth__isnull=True, social_auth__isnull=True)
|
||||
revoke_tokens(RefreshToken.objects.filter(revoked=None, user__in=qs))
|
||||
revoke_tokens(OAuth2AccessToken.objects.filter(user__in=qs))
|
||||
|
||||
@@ -1356,11 +1356,8 @@ class ExecutionEnvironmentAccess(BaseAccess):
|
||||
return Organization.accessible_objects(self.user, 'execution_environment_admin_role').exists()
|
||||
return self.check_related('organization', Organization, data, mandatory=True, role_field='execution_environment_admin_role')
|
||||
|
||||
@check_superuser
|
||||
def can_change(self, obj, data):
|
||||
if obj.managed_by_tower:
|
||||
raise PermissionDenied
|
||||
if self.user.is_superuser:
|
||||
return True
|
||||
if obj and obj.organization_id is None:
|
||||
raise PermissionDenied
|
||||
if self.user not in obj.organization.execution_environment_admin_role:
|
||||
|
||||
@@ -177,7 +177,7 @@ def gather(dest=None, module=None, subset=None, since=None, until=None, collecti
|
||||
|
||||
if collection_type != 'dry-run':
|
||||
if not settings.INSIGHTS_TRACKING_STATE:
|
||||
logger.log(log_level, "Automation Analytics not enabled. Use --dry-run to gather locally without sending.")
|
||||
logger.log(log_level, "Insights for Ansible Automation Platform not enabled. Use --dry-run to gather locally without sending.")
|
||||
return None
|
||||
|
||||
if not (settings.AUTOMATION_ANALYTICS_URL and settings.REDHAT_USERNAME and settings.REDHAT_PASSWORD):
|
||||
@@ -330,10 +330,10 @@ def ship(path):
|
||||
Ship gathered metrics to the Insights API
|
||||
"""
|
||||
if not path:
|
||||
logger.error('Automation Analytics TAR not found')
|
||||
logger.error('Insights for Ansible Automation Platform TAR not found')
|
||||
return False
|
||||
if not os.path.exists(path):
|
||||
logger.error('Automation Analytics TAR {} not found'.format(path))
|
||||
logger.error('Insights for Ansible Automation Platform TAR {} not found'.format(path))
|
||||
return False
|
||||
if "Error:" in str(path):
|
||||
return False
|
||||
|
||||
14
awx/main/backends.py
Normal file
14
awx/main/backends.py
Normal file
@@ -0,0 +1,14 @@
|
||||
import logging
|
||||
|
||||
from django.conf import settings
|
||||
from django.contrib.auth.backends import ModelBackend
|
||||
|
||||
logger = logging.getLogger('awx.main.backends')
|
||||
|
||||
|
||||
class AWXModelBackend(ModelBackend):
|
||||
def authenticate(self, request, **kwargs):
|
||||
if settings.DISABLE_LOCAL_AUTH:
|
||||
logger.warning(f"User '{kwargs['username']}' attempted login through the disabled local authentication system.")
|
||||
return
|
||||
return super().authenticate(request, **kwargs)
|
||||
@@ -36,7 +36,7 @@ register(
|
||||
'ORG_ADMINS_CAN_SEE_ALL_USERS',
|
||||
field_class=fields.BooleanField,
|
||||
label=_('All Users Visible to Organization Admins'),
|
||||
help_text=_('Controls whether any Organization Admin can view all users and teams, ' 'even those not associated with their Organization.'),
|
||||
help_text=_('Controls whether any Organization Admin can view all users and teams, even those not associated with their Organization.'),
|
||||
category=_('System'),
|
||||
category_slug='system',
|
||||
)
|
||||
@@ -59,7 +59,7 @@ register(
|
||||
schemes=('http', 'https'),
|
||||
allow_plain_hostname=True, # Allow hostname only without TLD.
|
||||
label=_('Base URL of the service'),
|
||||
help_text=_('This setting is used by services like notifications to render ' 'a valid url to the service.'),
|
||||
help_text=_('This setting is used by services like notifications to render a valid url to the service.'),
|
||||
category=_('System'),
|
||||
category_slug='system',
|
||||
)
|
||||
@@ -94,13 +94,12 @@ register(
|
||||
category_slug='system',
|
||||
)
|
||||
|
||||
|
||||
register(
|
||||
'LICENSE',
|
||||
field_class=fields.DictField,
|
||||
default=lambda: {},
|
||||
label=_('License'),
|
||||
help_text=_('The license controls which features and functionality are ' 'enabled. Use /api/v2/config/ to update or change ' 'the license.'),
|
||||
help_text=_('The license controls which features and functionality are enabled. Use /api/v2/config/ to update or change the license.'),
|
||||
category=_('System'),
|
||||
category_slug='system',
|
||||
)
|
||||
@@ -113,7 +112,7 @@ register(
|
||||
encrypted=False,
|
||||
read_only=False,
|
||||
label=_('Red Hat customer username'),
|
||||
help_text=_('This username is used to send data to Automation Analytics'),
|
||||
help_text=_('This username is used to send data to Insights for Ansible Automation Platform'),
|
||||
category=_('System'),
|
||||
category_slug='system',
|
||||
)
|
||||
@@ -126,7 +125,7 @@ register(
|
||||
encrypted=True,
|
||||
read_only=False,
|
||||
label=_('Red Hat customer password'),
|
||||
help_text=_('This password is used to send data to Automation Analytics'),
|
||||
help_text=_('This password is used to send data to Insights for Ansible Automation Platform'),
|
||||
category=_('System'),
|
||||
category_slug='system',
|
||||
)
|
||||
@@ -163,8 +162,8 @@ register(
|
||||
default='https://example.com',
|
||||
schemes=('http', 'https'),
|
||||
allow_plain_hostname=True, # Allow hostname only without TLD.
|
||||
label=_('Automation Analytics upload URL'),
|
||||
help_text=_('This setting is used to to configure data collection for the Automation Analytics dashboard'),
|
||||
label=_('Insights for Ansible Automation Platform upload URL'),
|
||||
help_text=_('This setting is used to to configure the upload URL for data collection for Red Hat Insights.'),
|
||||
category=_('System'),
|
||||
category_slug='system',
|
||||
)
|
||||
@@ -194,7 +193,7 @@ register(
|
||||
'CUSTOM_VENV_PATHS',
|
||||
field_class=fields.StringListPathField,
|
||||
label=_('Custom virtual environment paths'),
|
||||
help_text=_('Paths where Tower will look for custom virtual environments ' '(in addition to /var/lib/awx/venv/). Enter one path per line.'),
|
||||
help_text=_('Paths where Tower will look for custom virtual environments (in addition to /var/lib/awx/venv/). Enter one path per line.'),
|
||||
category=_('System'),
|
||||
category_slug='system',
|
||||
default=[],
|
||||
@@ -265,7 +264,7 @@ register(
|
||||
'INSIGHTS_TRACKING_STATE',
|
||||
field_class=fields.BooleanField,
|
||||
default=False,
|
||||
label=_('Gather data for Automation Analytics'),
|
||||
label=_('Gather data for Insights for Ansible Automation Platform'),
|
||||
help_text=_('Enables the service to gather data on automation and send it to Red Hat Insights.'),
|
||||
category=_('System'),
|
||||
category_slug='system',
|
||||
@@ -318,7 +317,7 @@ register(
|
||||
field_class=fields.BooleanField,
|
||||
default=False,
|
||||
label=_('Ignore Ansible Galaxy SSL Certificate Verification'),
|
||||
help_text=_('If set to true, certificate validation will not be done when ' 'installing content from any Galaxy server.'),
|
||||
help_text=_('If set to true, certificate validation will not be done when installing content from any Galaxy server.'),
|
||||
category=_('Jobs'),
|
||||
category_slug='jobs',
|
||||
)
|
||||
@@ -433,7 +432,7 @@ register(
|
||||
allow_null=False,
|
||||
default=200,
|
||||
label=_('Maximum number of forks per job'),
|
||||
help_text=_('Saving a Job Template with more than this number of forks will result in an error. ' 'When set to 0, no limit is applied.'),
|
||||
help_text=_('Saving a Job Template with more than this number of forks will result in an error. When set to 0, no limit is applied.'),
|
||||
category=_('Jobs'),
|
||||
category_slug='jobs',
|
||||
)
|
||||
@@ -454,7 +453,7 @@ register(
|
||||
allow_null=True,
|
||||
default=None,
|
||||
label=_('Logging Aggregator Port'),
|
||||
help_text=_('Port on Logging Aggregator to send logs to (if required and not' ' provided in Logging Aggregator).'),
|
||||
help_text=_('Port on Logging Aggregator to send logs to (if required and not provided in Logging Aggregator).'),
|
||||
category=_('Logging'),
|
||||
category_slug='logging',
|
||||
required=False,
|
||||
@@ -561,7 +560,7 @@ register(
|
||||
field_class=fields.IntegerField,
|
||||
default=5,
|
||||
label=_('TCP Connection Timeout'),
|
||||
help_text=_('Number of seconds for a TCP connection to external log ' 'aggregator to timeout. Applies to HTTPS and TCP log ' 'aggregator protocols.'),
|
||||
help_text=_('Number of seconds for a TCP connection to external log aggregator to timeout. Applies to HTTPS and TCP log aggregator protocols.'),
|
||||
category=_('Logging'),
|
||||
category_slug='logging',
|
||||
unit=_('seconds'),
|
||||
@@ -627,7 +626,7 @@ register(
|
||||
field_class=fields.BooleanField,
|
||||
default=False,
|
||||
label=_('Enable rsyslogd debugging'),
|
||||
help_text=_('Enabled high verbosity debugging for rsyslogd. ' 'Useful for debugging connection issues for external log aggregation.'),
|
||||
help_text=_('Enabled high verbosity debugging for rsyslogd. Useful for debugging connection issues for external log aggregation.'),
|
||||
category=_('Logging'),
|
||||
category_slug='logging',
|
||||
)
|
||||
@@ -636,7 +635,7 @@ register(
|
||||
register(
|
||||
'AUTOMATION_ANALYTICS_LAST_GATHER',
|
||||
field_class=fields.DateTimeField,
|
||||
label=_('Last gather date for Automation Analytics.'),
|
||||
label=_('Last gather date for Insights for Ansible Automation Platform.'),
|
||||
allow_null=True,
|
||||
category=_('System'),
|
||||
category_slug='system',
|
||||
@@ -644,7 +643,7 @@ register(
|
||||
register(
|
||||
'AUTOMATION_ANALYTICS_LAST_ENTRIES',
|
||||
field_class=fields.CharField,
|
||||
label=_('Last gathered entries for expensive Automation Analytics collectors.'),
|
||||
label=_('Last gathered entries for expensive collectors for Insights for Ansible Automation Platform.'),
|
||||
default='',
|
||||
allow_blank=True,
|
||||
category=_('System'),
|
||||
@@ -655,7 +654,7 @@ register(
|
||||
register(
|
||||
'AUTOMATION_ANALYTICS_GATHER_INTERVAL',
|
||||
field_class=fields.IntegerField,
|
||||
label=_('Automation Analytics Gather Interval'),
|
||||
label=_('Insights for Ansible Automation Platform Gather Interval'),
|
||||
help_text=_('Interval (in seconds) between data gathering.'),
|
||||
default=14400, # every 4 hours
|
||||
min_value=1800, # every 30 minutes
|
||||
|
||||
@@ -1,13 +1,5 @@
|
||||
from django.db import connections
|
||||
from django.db.backends.sqlite3.base import DatabaseWrapper
|
||||
from django.core.management.commands.makemigrations import Command as MakeMigrations
|
||||
|
||||
|
||||
class Command(MakeMigrations):
|
||||
def execute(self, *args, **options):
|
||||
settings = connections['default'].settings_dict.copy()
|
||||
settings['ENGINE'] = 'sqlite3'
|
||||
if 'application_name' in settings['OPTIONS']:
|
||||
del settings['OPTIONS']['application_name']
|
||||
connections['default'] = DatabaseWrapper(settings)
|
||||
return MakeMigrations().execute(*args, **options)
|
||||
pass
|
||||
|
||||
@@ -69,7 +69,7 @@ class Command(BaseCommand):
|
||||
changed = True
|
||||
|
||||
for ee in reversed(settings.DEFAULT_EXECUTION_ENVIRONMENTS):
|
||||
_, created = ExecutionEnvironment.objects.get_or_create(name=ee['name'], defaults={'image': ee['image'], 'managed_by_tower': True})
|
||||
_, created = ExecutionEnvironment.objects.update_or_create(name=ee['name'], defaults={'image': ee['image'], 'managed_by_tower': True})
|
||||
|
||||
if created:
|
||||
changed = True
|
||||
|
||||
@@ -31,6 +31,7 @@ class Command(BaseCommand):
|
||||
for session in sessions:
|
||||
user_id = session.get_decoded().get('_auth_user_id')
|
||||
if (user is None) or (user_id and user.id == int(user_id)):
|
||||
# The Session model instance doesn't have .flush(), we need a SessionStore instance.
|
||||
session = import_module(settings.SESSION_ENGINE).SessionStore(session.session_key)
|
||||
# Log out the session, but without the need for a request object.
|
||||
session.flush()
|
||||
|
||||
54
awx/main/management/commands/host_metric.py
Normal file
54
awx/main/management/commands/host_metric.py
Normal file
@@ -0,0 +1,54 @@
|
||||
from django.core.management.base import BaseCommand
|
||||
import datetime
|
||||
from django.core.serializers.json import DjangoJSONEncoder
|
||||
from awx.main.models.inventory import HostMetric
|
||||
import json
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
|
||||
help = 'This is for offline licensing usage'
|
||||
|
||||
def add_arguments(self, parser):
|
||||
parser.add_argument('--since', type=datetime.datetime.fromisoformat, help='Start Date in ISO format YYYY-MM-DD')
|
||||
parser.add_argument('--until', type=datetime.datetime.fromisoformat, help='End Date in ISO format YYYY-MM-DD')
|
||||
parser.add_argument('--json', action='store_true', help='Select output as JSON')
|
||||
|
||||
def handle(self, *args, **options):
|
||||
since = options.get('since')
|
||||
until = options.get('until')
|
||||
|
||||
if since is None and until is None:
|
||||
print("No Arguments received")
|
||||
return None
|
||||
|
||||
if since is not None and since.tzinfo is None:
|
||||
since = since.replace(tzinfo=datetime.timezone.utc)
|
||||
|
||||
if until is not None and until.tzinfo is None:
|
||||
until = until.replace(tzinfo=datetime.timezone.utc)
|
||||
|
||||
filter_kwargs = {}
|
||||
if since is not None:
|
||||
filter_kwargs['last_automation__gte'] = since
|
||||
if until is not None:
|
||||
filter_kwargs['last_automation__lte'] = until
|
||||
|
||||
result = HostMetric.objects.filter(**filter_kwargs)
|
||||
|
||||
# if --json flag is set, output the result in json format
|
||||
if options['json']:
|
||||
list_of_queryset = list(result.values('hostname', 'first_automation', 'last_automation'))
|
||||
json_result = json.dumps(list_of_queryset, cls=DjangoJSONEncoder)
|
||||
print(json_result)
|
||||
|
||||
# --json flag is not set, output in plain text
|
||||
else:
|
||||
print(f"Total Number of hosts automated: {len(result)}")
|
||||
for item in result:
|
||||
print(
|
||||
"Hostname : {hostname} | first_automation : {first_automation} | last_automation : {last_automation}".format(
|
||||
hostname=item.hostname, first_automation=item.first_automation, last_automation=item.last_automation
|
||||
)
|
||||
)
|
||||
return
|
||||
@@ -10,7 +10,6 @@ import subprocess
|
||||
import sys
|
||||
import time
|
||||
import traceback
|
||||
import shutil
|
||||
|
||||
# Django
|
||||
from django.conf import settings
|
||||
@@ -75,17 +74,6 @@ class AnsibleInventoryLoader(object):
|
||||
else:
|
||||
self.venv_path = settings.ANSIBLE_VENV_PATH
|
||||
|
||||
def get_path_to_ansible_inventory(self):
|
||||
venv_exe = os.path.join(self.venv_path, 'bin', 'ansible-inventory')
|
||||
if os.path.exists(venv_exe):
|
||||
return venv_exe
|
||||
elif os.path.exists(os.path.join(self.venv_path, 'bin', 'ansible')):
|
||||
# if bin/ansible exists but bin/ansible-inventory doesn't, it's
|
||||
# probably a really old version of ansible that doesn't support
|
||||
# ansible-inventory
|
||||
raise RuntimeError("{} does not exist (please upgrade to ansible >= 2.4)".format(venv_exe))
|
||||
return shutil.which('ansible-inventory')
|
||||
|
||||
def get_base_args(self):
|
||||
bargs = ['podman', 'run', '--user=root', '--quiet']
|
||||
bargs.extend(['-v', '{0}:{0}:Z'.format(self.source)])
|
||||
|
||||
@@ -7,6 +7,7 @@ import time
|
||||
import urllib.parse
|
||||
|
||||
from django.conf import settings
|
||||
from django.contrib.auth import logout
|
||||
from django.contrib.auth.models import User
|
||||
from django.db.migrations.executor import MigrationExecutor
|
||||
from django.db import connection
|
||||
@@ -71,6 +72,21 @@ class SessionTimeoutMiddleware(MiddlewareMixin):
|
||||
return response
|
||||
|
||||
|
||||
class DisableLocalAuthMiddleware(MiddlewareMixin):
|
||||
"""
|
||||
Respects the presence of the DISABLE_LOCAL_AUTH setting and forces
|
||||
local-only users to logout when they make a request.
|
||||
"""
|
||||
|
||||
def process_request(self, request):
|
||||
if settings.DISABLE_LOCAL_AUTH:
|
||||
user = request.user
|
||||
if not user.pk:
|
||||
return
|
||||
if not (user.profile.ldap_dn or user.social_auth.exists() or user.enterprise_auth.exists()):
|
||||
logout(request)
|
||||
|
||||
|
||||
def _customize_graph():
|
||||
from awx.main.models import Instance, Schedule, UnifiedJobTemplate
|
||||
|
||||
|
||||
@@ -16,7 +16,7 @@ class Migration(migrations.Migration):
|
||||
field=models.CharField(
|
||||
choices=[
|
||||
('always', 'Always pull container before running.'),
|
||||
('missing', 'No pull option has been selected.'),
|
||||
('missing', 'Only pull the image if not present before running.'),
|
||||
('never', 'Never pull container before running.'),
|
||||
],
|
||||
blank=True,
|
||||
|
||||
19
awx/main/migrations/0141_remove_isolated_instances.py
Normal file
19
awx/main/migrations/0141_remove_isolated_instances.py
Normal file
@@ -0,0 +1,19 @@
|
||||
# Generated by Django 2.2.16 on 2021-05-11 19:38
|
||||
|
||||
from django.db import migrations
|
||||
|
||||
|
||||
def forwards(apps, schema_editor):
|
||||
Instance = apps.get_model('main', 'Instance')
|
||||
Instance.objects.filter(version__startswith='ansible-runner-').delete()
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('main', '0140_rename'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RunPython(forwards),
|
||||
]
|
||||
@@ -0,0 +1,22 @@
|
||||
# Generated by Django 2.2.16 on 2021-05-12 20:08
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('main', '0141_remove_isolated_instances'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name='executionenvironment',
|
||||
name='image',
|
||||
field=models.CharField(
|
||||
help_text='The full image location, including the container registry, image name, and version tag.',
|
||||
max_length=1024,
|
||||
verbose_name='image location',
|
||||
),
|
||||
),
|
||||
]
|
||||
21
awx/main/migrations/0143_hostmetric.py
Normal file
21
awx/main/migrations/0143_hostmetric.py
Normal file
@@ -0,0 +1,21 @@
|
||||
# Generated by Django 2.2.16 on 2021-05-18 18:08
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('main', '0142_update_ee_image_field_description'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name='HostMetric',
|
||||
fields=[
|
||||
('hostname', models.CharField(max_length=512, primary_key=True, serialize=False)),
|
||||
('first_automation', models.DateTimeField(auto_now_add=True, db_index=True, help_text='When the host was first automated against')),
|
||||
('last_automation', models.DateTimeField(db_index=True, help_text='When the host was last automated against')),
|
||||
],
|
||||
),
|
||||
]
|
||||
@@ -12,7 +12,16 @@ from awx.main.models.unified_jobs import UnifiedJob, UnifiedJobTemplate, StdoutM
|
||||
from awx.main.models.organization import Organization, Profile, Team, UserSessionMembership # noqa
|
||||
from awx.main.models.credential import Credential, CredentialType, CredentialInputSource, ManagedCredentialType, build_safe_env # noqa
|
||||
from awx.main.models.projects import Project, ProjectUpdate # noqa
|
||||
from awx.main.models.inventory import Group, Host, Inventory, InventorySource, InventoryUpdate, SmartInventoryMembership # noqa
|
||||
from awx.main.models.inventory import ( # noqa
|
||||
CustomInventoryScript,
|
||||
Group,
|
||||
Host,
|
||||
HostMetric,
|
||||
Inventory,
|
||||
InventorySource,
|
||||
InventoryUpdate,
|
||||
SmartInventoryMembership,
|
||||
)
|
||||
from awx.main.models.jobs import ( # noqa
|
||||
Job,
|
||||
JobHostSummary,
|
||||
|
||||
@@ -31,6 +31,7 @@ from awx.main.fields import (
|
||||
)
|
||||
from awx.main.utils import decrypt_field, classproperty
|
||||
from awx.main.utils.safe_yaml import safe_dump
|
||||
from awx.main.utils.execution_environments import to_container_path
|
||||
from awx.main.validators import validate_ssh_private_key
|
||||
from awx.main.models.base import CommonModelNameNotUnique, PasswordFieldsModel, PrimordialModel
|
||||
from awx.main.models.mixins import ResourceMixin
|
||||
@@ -493,12 +494,11 @@ class CredentialType(CommonModelNameNotUnique):
|
||||
|
||||
for file_label, file_tmpl in file_tmpls.items():
|
||||
data = sandbox_env.from_string(file_tmpl).render(**namespace)
|
||||
_, path = tempfile.mkstemp(dir=private_data_dir)
|
||||
_, path = tempfile.mkstemp(dir=os.path.join(private_data_dir, 'env'))
|
||||
with open(path, 'w') as f:
|
||||
f.write(data)
|
||||
os.chmod(path, stat.S_IRUSR | stat.S_IWUSR)
|
||||
# FIXME: develop some better means of referencing paths inside containers
|
||||
container_path = os.path.join('/runner', os.path.basename(path))
|
||||
container_path = to_container_path(path, private_data_dir)
|
||||
|
||||
# determine if filename indicates single file or many
|
||||
if file_label.find('.') == -1:
|
||||
@@ -526,7 +526,7 @@ class CredentialType(CommonModelNameNotUnique):
|
||||
extra_vars[var_name] = sandbox_env.from_string(tmpl).render(**namespace)
|
||||
|
||||
def build_extra_vars_file(vars, private_dir):
|
||||
handle, path = tempfile.mkstemp(dir=private_dir)
|
||||
handle, path = tempfile.mkstemp(dir=os.path.join(private_dir, 'env'))
|
||||
f = os.fdopen(handle, 'w')
|
||||
f.write(safe_dump(vars))
|
||||
f.close()
|
||||
@@ -535,8 +535,7 @@ class CredentialType(CommonModelNameNotUnique):
|
||||
|
||||
if extra_vars:
|
||||
path = build_extra_vars_file(extra_vars, private_data_dir)
|
||||
# FIXME: develop some better means of referencing paths inside containers
|
||||
container_path = os.path.join('/runner', os.path.basename(path))
|
||||
container_path = to_container_path(path, private_data_dir)
|
||||
args.extend(['-e', '@%s' % container_path])
|
||||
|
||||
|
||||
|
||||
@@ -6,6 +6,8 @@ import tempfile
|
||||
|
||||
from django.conf import settings
|
||||
|
||||
from awx.main.utils.execution_environments import to_container_path
|
||||
|
||||
|
||||
def aws(cred, env, private_data_dir):
|
||||
env['AWS_ACCESS_KEY_ID'] = cred.get_input('username', default='')
|
||||
@@ -25,13 +27,14 @@ def gce(cred, env, private_data_dir):
|
||||
env['GCE_PROJECT'] = project
|
||||
json_cred['token_uri'] = 'https://oauth2.googleapis.com/token'
|
||||
|
||||
handle, path = tempfile.mkstemp(dir=private_data_dir)
|
||||
handle, path = tempfile.mkstemp(dir=os.path.join(private_data_dir, 'env'))
|
||||
f = os.fdopen(handle, 'w')
|
||||
json.dump(json_cred, f, indent=2)
|
||||
f.close()
|
||||
os.chmod(path, stat.S_IRUSR | stat.S_IWUSR)
|
||||
env['GCE_CREDENTIALS_FILE_PATH'] = os.path.join('/runner', os.path.basename(path))
|
||||
env['GCP_SERVICE_ACCOUNT_FILE'] = os.path.join('/runner', os.path.basename(path))
|
||||
container_path = to_container_path(path, private_data_dir)
|
||||
env['GCE_CREDENTIALS_FILE_PATH'] = container_path
|
||||
env['GCP_SERVICE_ACCOUNT_FILE'] = container_path
|
||||
|
||||
# Handle env variables for new module types.
|
||||
# This includes gcp_compute inventory plugin and
|
||||
@@ -96,14 +99,13 @@ def _openstack_data(cred):
|
||||
|
||||
|
||||
def openstack(cred, env, private_data_dir):
|
||||
handle, path = tempfile.mkstemp(dir=private_data_dir)
|
||||
handle, path = tempfile.mkstemp(dir=os.path.join(private_data_dir, 'env'))
|
||||
f = os.fdopen(handle, 'w')
|
||||
openstack_data = _openstack_data(cred)
|
||||
yaml.safe_dump(openstack_data, f, default_flow_style=False, allow_unicode=True)
|
||||
f.close()
|
||||
os.chmod(path, stat.S_IRUSR | stat.S_IWUSR)
|
||||
# TODO: constant for container base path
|
||||
env['OS_CLIENT_CONFIG_FILE'] = os.path.join('/runner', os.path.basename(path))
|
||||
env['OS_CLIENT_CONFIG_FILE'] = to_container_path(path, private_data_dir)
|
||||
|
||||
|
||||
def kubernetes_bearer_token(cred, env, private_data_dir):
|
||||
@@ -111,10 +113,10 @@ def kubernetes_bearer_token(cred, env, private_data_dir):
|
||||
env['K8S_AUTH_API_KEY'] = cred.get_input('bearer_token', default='')
|
||||
if cred.get_input('verify_ssl') and 'ssl_ca_cert' in cred.inputs:
|
||||
env['K8S_AUTH_VERIFY_SSL'] = 'True'
|
||||
handle, path = tempfile.mkstemp(dir=private_data_dir)
|
||||
handle, path = tempfile.mkstemp(dir=os.path.join(private_data_dir, 'env'))
|
||||
with os.fdopen(handle, 'w') as f:
|
||||
os.chmod(path, stat.S_IRUSR | stat.S_IWUSR)
|
||||
f.write(cred.get_input('ssl_ca_cert'))
|
||||
env['K8S_AUTH_SSL_CA_CERT'] = os.path.join('/runner', os.path.basename(path))
|
||||
env['K8S_AUTH_SSL_CA_CERT'] = to_container_path(path, private_data_dir)
|
||||
else:
|
||||
env['K8S_AUTH_VERIFY_SSL'] = 'False'
|
||||
|
||||
@@ -510,12 +510,15 @@ class JobEvent(BasePlaybookEvent):
|
||||
job = self.job
|
||||
|
||||
from awx.main.models import Host, JobHostSummary # circular import
|
||||
from awx.main.models import Host, JobHostSummary, HostMetric
|
||||
|
||||
all_hosts = Host.objects.filter(pk__in=self.host_map.values()).only('id')
|
||||
all_hosts = Host.objects.filter(pk__in=self.host_map.values()).only('id', 'name')
|
||||
existing_host_ids = set(h.id for h in all_hosts)
|
||||
|
||||
summaries = dict()
|
||||
updated_hosts_list = list()
|
||||
for host in hostnames:
|
||||
updated_hosts_list.append(host)
|
||||
host_id = self.host_map.get(host, None)
|
||||
if host_id not in existing_host_ids:
|
||||
host_id = None
|
||||
@@ -546,6 +549,13 @@ class JobEvent(BasePlaybookEvent):
|
||||
|
||||
Host.objects.bulk_update(list(updated_hosts), ['last_job_id', 'last_job_host_summary_id'], batch_size=100)
|
||||
|
||||
# bulk-create
|
||||
current_time = now()
|
||||
HostMetric.objects.bulk_create(
|
||||
[HostMetric(hostname=hostname, last_automation=current_time) for hostname in updated_hosts_list], ignore_conflicts=True, batch_size=100
|
||||
)
|
||||
HostMetric.objects.filter(hostname__in=updated_hosts_list).update(last_automation=current_time)
|
||||
|
||||
@property
|
||||
def job_verbosity(self):
|
||||
return self.job.verbosity
|
||||
|
||||
@@ -14,7 +14,7 @@ class ExecutionEnvironment(CommonModel):
|
||||
|
||||
PULL_CHOICES = [
|
||||
('always', _("Always pull container before running.")),
|
||||
('missing', _("No pull option has been selected.")),
|
||||
('missing', _("Only pull the image if not present before running.")),
|
||||
('never', _("Never pull container before running.")),
|
||||
]
|
||||
|
||||
@@ -30,7 +30,7 @@ class ExecutionEnvironment(CommonModel):
|
||||
image = models.CharField(
|
||||
max_length=1024,
|
||||
verbose_name=_('image location'),
|
||||
help_text=_("The registry location where the container is stored."),
|
||||
help_text=_("The full image location, including the container registry, image name, and version tag."),
|
||||
)
|
||||
managed_by_tower = models.BooleanField(default=False, editable=False)
|
||||
credential = models.ForeignKey(
|
||||
|
||||
@@ -50,6 +50,7 @@ from awx.main.models.notifications import (
|
||||
from awx.main.models.credential.injectors import _openstack_data
|
||||
from awx.main.utils import _inventory_updates
|
||||
from awx.main.utils.safe_yaml import sanitize_jinja
|
||||
from awx.main.utils.execution_environments import to_container_path
|
||||
|
||||
|
||||
__all__ = ['Inventory', 'Host', 'Group', 'InventorySource', 'InventoryUpdate', 'SmartInventoryMembership']
|
||||
@@ -803,6 +804,12 @@ class Group(CommonModelNameNotUnique, RelatedJobsMixin):
|
||||
return UnifiedJob.objects.non_polymorphic().filter(Q(job__inventory=self.inventory) | Q(inventoryupdate__inventory_source__groups=self))
|
||||
|
||||
|
||||
class HostMetric(models.Model):
|
||||
hostname = models.CharField(primary_key=True, max_length=512)
|
||||
first_automation = models.DateTimeField(auto_now_add=True, null=False, db_index=True, help_text=_('When the host was first automated against'))
|
||||
last_automation = models.DateTimeField(db_index=True, help_text=_('When the host was last automated against'))
|
||||
|
||||
|
||||
class InventorySourceOptions(BaseModel):
|
||||
"""
|
||||
Common fields for InventorySource and InventoryUpdate.
|
||||
@@ -1505,7 +1512,7 @@ class openstack(PluginFileInjector):
|
||||
env = super(openstack, self).get_plugin_env(inventory_update, private_data_dir, private_data_files)
|
||||
credential = inventory_update.get_cloud_credential()
|
||||
cred_data = private_data_files['credentials']
|
||||
env['OS_CLIENT_CONFIG_FILE'] = os.path.join('/runner', os.path.basename(cred_data[credential]))
|
||||
env['OS_CLIENT_CONFIG_FILE'] = to_container_path(cred_data[credential], private_data_dir)
|
||||
return env
|
||||
|
||||
|
||||
|
||||
@@ -32,6 +32,7 @@ from awx.main.models.jobs import Job
|
||||
from awx.main.models.mixins import ResourceMixin, TaskManagerProjectUpdateMixin, CustomVirtualEnvMixin, RelatedJobsMixin
|
||||
from awx.main.utils import update_scm_url, polymorphic
|
||||
from awx.main.utils.ansible import skip_directory, could_be_inventory, could_be_playbook
|
||||
from awx.main.utils.execution_environments import get_default_execution_environment
|
||||
from awx.main.fields import ImplicitRoleField
|
||||
from awx.main.models.rbac import (
|
||||
ROLE_SINGLETON_SYSTEM_ADMINISTRATOR,
|
||||
@@ -188,7 +189,7 @@ class ProjectOptions(models.Model):
|
||||
Jobs using the project can use the default_environment, but the project updates
|
||||
are not flexible enough to allow customizing the image they use.
|
||||
"""
|
||||
return self.get_default_execution_environment()
|
||||
return get_default_execution_environment()
|
||||
|
||||
def get_project_path(self, check_if_exists=True):
|
||||
local_path = os.path.basename(self.local_path)
|
||||
|
||||
@@ -1,10 +1,13 @@
|
||||
import collections
|
||||
import json
|
||||
import logging
|
||||
from base64 import b64encode
|
||||
from urllib import parse as urlparse
|
||||
|
||||
from django.conf import settings
|
||||
from kubernetes import client, config
|
||||
from django.utils.functional import cached_property
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
|
||||
from awx.main.utils.common import parse_yaml_or_json
|
||||
from awx.main.utils.execution_environments import get_default_pod_spec
|
||||
@@ -51,6 +54,96 @@ class PodManager(object):
|
||||
|
||||
return pods
|
||||
|
||||
def create_secret(self, job):
|
||||
registry_cred = job.execution_environment.credential
|
||||
host = registry_cred.get_input('host')
|
||||
# urlparse requires '//' to be provided if scheme is not specified
|
||||
original_parsed = urlparse.urlsplit(host)
|
||||
if (not original_parsed.scheme and not host.startswith('//')) or original_parsed.hostname is None:
|
||||
host = 'https://%s' % (host)
|
||||
parsed = urlparse.urlsplit(host)
|
||||
host = parsed.hostname
|
||||
if parsed.port:
|
||||
host = "{0}:{1}".format(host, parsed.port)
|
||||
|
||||
username = registry_cred.get_input("username")
|
||||
password = registry_cred.get_input("password")
|
||||
|
||||
# Construct container auth dict and base64 encode it
|
||||
token = b64encode("{}:{}".format(username, password).encode('UTF-8')).decode()
|
||||
auth_dict = json.dumps({"auths": {host: {"auth": token}}}, indent=4)
|
||||
auth_data = b64encode(str(auth_dict).encode('UTF-8')).decode()
|
||||
|
||||
# Construct Secret object
|
||||
secret = client.V1Secret()
|
||||
secret_name = "automation-{0}-image-pull-secret-{1}".format(settings.INSTALL_UUID[:5], job.execution_environment.credential.id)
|
||||
secret.metadata = client.V1ObjectMeta(name="{}".format(secret_name))
|
||||
secret.type = "kubernetes.io/dockerconfigjson"
|
||||
secret.kind = "Secret"
|
||||
secret.data = {".dockerconfigjson": auth_data}
|
||||
|
||||
# Check if secret already exists
|
||||
replace_secret = False
|
||||
try:
|
||||
existing_secret = self.kube_api.read_namespaced_secret(namespace=self.namespace, name=secret_name)
|
||||
if existing_secret.data != secret.data:
|
||||
replace_secret = True
|
||||
secret_exists = True
|
||||
except client.rest.ApiException as e:
|
||||
if e.status == 404:
|
||||
secret_exists = False
|
||||
else:
|
||||
error_msg = _('Invalid openshift or k8s cluster credential')
|
||||
if e.status == 403:
|
||||
error_msg = _(
|
||||
'Failed to create secret for container group {} because the needed service account roles are needed. Add get, create and delete roles for secret resources for your cluster credential.'.format(
|
||||
job.instance_group.name
|
||||
)
|
||||
)
|
||||
full_error_msg = '{0}: {1}'.format(error_msg, str(e))
|
||||
logger.exception(full_error_msg)
|
||||
raise PermissionError(full_error_msg)
|
||||
|
||||
if replace_secret:
|
||||
try:
|
||||
# Try to replace existing secret
|
||||
self.kube_api.delete_namespaced_secret(name=secret.metadata.name, namespace=self.namespace)
|
||||
self.kube_api.create_namespaced_secret(namespace=self.namespace, body=secret)
|
||||
except client.rest.ApiException as e:
|
||||
error_msg = _('Invalid openshift or k8s cluster credential')
|
||||
if e.status == 403:
|
||||
error_msg = _(
|
||||
'Failed to delete secret for container group {} because the needed service account roles are needed. Add create and delete roles for secret resources for your cluster credential.'.format(
|
||||
job.instance_group.name
|
||||
)
|
||||
)
|
||||
full_error_msg = '{0}: {1}'.format(error_msg, str(e))
|
||||
logger.exception(full_error_msg)
|
||||
# let job continue for the case where secret was created manually and cluster cred doesn't have permission to create a secret
|
||||
except Exception as e:
|
||||
error_msg = 'Failed to create imagePullSecret for container group {}'.format(job.instance_group.name)
|
||||
logger.exception('{0}: {1}'.format(error_msg, str(e)))
|
||||
raise RuntimeError(error_msg)
|
||||
elif secret_exists and not replace_secret:
|
||||
pass
|
||||
else:
|
||||
# Create an image pull secret in namespace
|
||||
try:
|
||||
self.kube_api.create_namespaced_secret(namespace=self.namespace, body=secret)
|
||||
except client.rest.ApiException as e:
|
||||
if e.status == 403:
|
||||
error_msg = _(
|
||||
'Failed to create imagePullSecret: {}. Check that openshift or k8s credential has permission to create a secret.'.format(e.status)
|
||||
)
|
||||
logger.exception(error_msg)
|
||||
# let job continue for the case where secret was created manually and cluster cred doesn't have permission to create a secret
|
||||
except Exception:
|
||||
error_msg = 'Failed to create imagePullSecret for container group {}'.format(job.instance_group.name)
|
||||
logger.exception(error_msg)
|
||||
job.cancel(job_explanation=error_msg)
|
||||
|
||||
return secret.metadata.name
|
||||
|
||||
@property
|
||||
def namespace(self):
|
||||
return self.pod_definition['metadata']['namespace']
|
||||
@@ -81,7 +174,7 @@ class PodManager(object):
|
||||
|
||||
@property
|
||||
def pod_name(self):
|
||||
return f"awx-job-{self.task.id}"
|
||||
return f"automation-job-{self.task.id}"
|
||||
|
||||
@property
|
||||
def pod_definition(self):
|
||||
|
||||
@@ -28,6 +28,7 @@ import threading
|
||||
import concurrent.futures
|
||||
from base64 import b64encode
|
||||
import subprocess
|
||||
import sys
|
||||
|
||||
# Django
|
||||
from django.conf import settings
|
||||
@@ -95,8 +96,9 @@ from awx.main.utils import (
|
||||
get_awx_version,
|
||||
deepmerge,
|
||||
parse_yaml_or_json,
|
||||
cleanup_new_process,
|
||||
)
|
||||
from awx.main.utils.execution_environments import get_default_execution_environment, get_default_pod_spec
|
||||
from awx.main.utils.execution_environments import get_default_execution_environment, get_default_pod_spec, CONTAINER_ROOT, to_container_path
|
||||
from awx.main.utils.ansible import read_ansible_config
|
||||
from awx.main.utils.external_logging import reconfigure_rsyslog
|
||||
from awx.main.utils.safe_yaml import safe_dump, sanitize_jinja
|
||||
@@ -840,7 +842,7 @@ class BaseTask(object):
|
||||
username = cred.get_input('username')
|
||||
password = cred.get_input('password')
|
||||
token = "{}:{}".format(username, password)
|
||||
auth_data = {'auths': {host: {'auth': b64encode(token.encode('ascii')).decode()}}}
|
||||
auth_data = {'auths': {host: {'auth': b64encode(token.encode('UTF-8')).decode('UTF-8')}}}
|
||||
authfile.write(json.dumps(auth_data, indent=4))
|
||||
params["container_options"].append(f'--authfile={authfile.name}')
|
||||
else:
|
||||
@@ -853,7 +855,9 @@ class BaseTask(object):
|
||||
if settings.AWX_ISOLATION_SHOW_PATHS:
|
||||
params['container_volume_mounts'] = []
|
||||
for this_path in settings.AWX_ISOLATION_SHOW_PATHS:
|
||||
params['container_volume_mounts'].append(f'{this_path}:{this_path}:Z')
|
||||
# Using z allows the dir to mounted by multiple containers
|
||||
# Uppercase Z restricts access (in weird ways) to 1 container at a time
|
||||
params['container_volume_mounts'].append(f'{this_path}:{this_path}:z')
|
||||
return params
|
||||
|
||||
def build_private_data(self, instance, private_data_dir):
|
||||
@@ -873,11 +877,12 @@ class BaseTask(object):
|
||||
|
||||
path = tempfile.mkdtemp(prefix='awx_%s_' % instance.pk, dir=pdd_wrapper_path)
|
||||
os.chmod(path, stat.S_IRUSR | stat.S_IWUSR | stat.S_IXUSR)
|
||||
runner_project_folder = os.path.join(path, 'project')
|
||||
if not os.path.exists(runner_project_folder):
|
||||
# Ansible Runner requires that this directory exists.
|
||||
# Specifically, when using process isolation
|
||||
os.mkdir(runner_project_folder)
|
||||
# Ansible runner requires that project exists,
|
||||
# and we will write files in the other folders without pre-creating the folder
|
||||
for subfolder in ('project', 'inventory', 'env'):
|
||||
runner_subfolder = os.path.join(path, subfolder)
|
||||
if not os.path.exists(runner_subfolder):
|
||||
os.mkdir(runner_subfolder)
|
||||
return path
|
||||
|
||||
def build_private_data_files(self, instance, private_data_dir):
|
||||
@@ -921,7 +926,7 @@ class BaseTask(object):
|
||||
# Instead, ssh private key file is explicitly passed via an
|
||||
# env variable.
|
||||
else:
|
||||
handle, path = tempfile.mkstemp(dir=private_data_dir)
|
||||
handle, path = tempfile.mkstemp(dir=os.path.join(private_data_dir, 'env'))
|
||||
f = os.fdopen(handle, 'w')
|
||||
f.write(data)
|
||||
f.close()
|
||||
@@ -994,6 +999,9 @@ class BaseTask(object):
|
||||
|
||||
env['AWX_PRIVATE_DATA_DIR'] = private_data_dir
|
||||
|
||||
if self.instance.execution_environment is None:
|
||||
raise RuntimeError('The project could not sync because there is no Execution Environment.')
|
||||
|
||||
ee_cred = self.instance.execution_environment.credential
|
||||
if ee_cred:
|
||||
verify_ssl = ee_cred.get_input('verify_ssl')
|
||||
@@ -1031,7 +1039,6 @@ class BaseTask(object):
|
||||
self.host_map = {hostname: hv.pop('remote_tower_id', '') for hostname, hv in script_data.get('_meta', {}).get('hostvars', {}).items()}
|
||||
json_data = json.dumps(script_data)
|
||||
path = os.path.join(private_data_dir, 'inventory')
|
||||
os.makedirs(path, mode=0o700)
|
||||
fn = os.path.join(path, 'hosts')
|
||||
with open(fn, 'w') as f:
|
||||
os.chmod(fn, stat.S_IRUSR | stat.S_IXUSR | stat.S_IWUSR)
|
||||
@@ -1057,9 +1064,6 @@ class BaseTask(object):
|
||||
os.chmod(path, stat.S_IRUSR)
|
||||
return path
|
||||
|
||||
def build_cwd(self, instance, private_data_dir):
|
||||
raise NotImplementedError
|
||||
|
||||
def build_credentials_list(self, instance):
|
||||
return []
|
||||
|
||||
@@ -1367,9 +1371,12 @@ class BaseTask(object):
|
||||
|
||||
self.instance.log_lifecycle("running_playbook")
|
||||
if isinstance(self.instance, SystemJob):
|
||||
cwd = self.build_cwd(self.instance, private_data_dir)
|
||||
res = ansible_runner.interface.run(
|
||||
project_dir=cwd, event_handler=self.event_handler, finished_callback=self.finished_callback, status_handler=self.status_handler, **params
|
||||
project_dir=settings.BASE_DIR,
|
||||
event_handler=self.event_handler,
|
||||
finished_callback=self.finished_callback,
|
||||
status_handler=self.status_handler,
|
||||
**params,
|
||||
)
|
||||
else:
|
||||
receptor_job = AWXReceptorJob(self, params)
|
||||
@@ -1528,8 +1535,8 @@ class RunJob(BaseTask):
|
||||
# Set environment variables for cloud credentials.
|
||||
cred_files = private_data_files.get('credentials', {})
|
||||
for cloud_cred in job.cloud_credentials:
|
||||
if cloud_cred and cloud_cred.credential_type.namespace == 'openstack':
|
||||
env['OS_CLIENT_CONFIG_FILE'] = os.path.join('/runner', os.path.basename(cred_files.get(cloud_cred, '')))
|
||||
if cloud_cred and cloud_cred.credential_type.namespace == 'openstack' and cred_files.get(cloud_cred, ''):
|
||||
env['OS_CLIENT_CONFIG_FILE'] = to_container_path(cred_files.get(cloud_cred, ''), private_data_dir)
|
||||
|
||||
for network_cred in job.network_credentials:
|
||||
env['ANSIBLE_NET_USERNAME'] = network_cred.get_input('username', default='')
|
||||
@@ -1561,8 +1568,7 @@ class RunJob(BaseTask):
|
||||
for path in config_values[config_setting].split(':'):
|
||||
if path not in paths:
|
||||
paths = [config_values[config_setting]] + paths
|
||||
# FIXME: again, figure out more elegant way for inside container
|
||||
paths = [os.path.join('/runner', folder)] + paths
|
||||
paths = [os.path.join(CONTAINER_ROOT, folder)] + paths
|
||||
env[env_key] = os.pathsep.join(paths)
|
||||
|
||||
return env
|
||||
@@ -1636,9 +1642,6 @@ class RunJob(BaseTask):
|
||||
|
||||
return args
|
||||
|
||||
def build_cwd(self, job, private_data_dir):
|
||||
return os.path.join(private_data_dir, 'project')
|
||||
|
||||
def build_playbook_path_relative_to_cwd(self, job, private_data_dir):
|
||||
return job.playbook
|
||||
|
||||
@@ -1709,6 +1712,10 @@ class RunJob(BaseTask):
|
||||
error = _('Job could not start because it does not have a valid project.')
|
||||
self.update_model(job.pk, status='failed', job_explanation=error)
|
||||
raise RuntimeError(error)
|
||||
elif job.execution_environment is None:
|
||||
error = _('Job could not start because no Execution Environment could be found.')
|
||||
self.update_model(job.pk, status='error', job_explanation=error)
|
||||
raise RuntimeError(error)
|
||||
elif job.project.status in ('error', 'failed'):
|
||||
msg = _('The project revision for this job template is unknown due to a failed update.')
|
||||
job = self.update_model(job.pk, status='failed', job_explanation=msg)
|
||||
@@ -1759,6 +1766,7 @@ class RunJob(BaseTask):
|
||||
)
|
||||
if branch_override:
|
||||
sync_metafields['scm_branch'] = job.scm_branch
|
||||
sync_metafields['scm_clean'] = True # to accomidate force pushes
|
||||
if 'update_' not in sync_metafields['job_tags']:
|
||||
sync_metafields['scm_revision'] = job_revision
|
||||
local_project_sync = job.project.create_project_update(_eager_fields=sync_metafields)
|
||||
@@ -2012,9 +2020,6 @@ class RunProjectUpdate(BaseTask):
|
||||
|
||||
self._write_extra_vars_file(private_data_dir, extra_vars)
|
||||
|
||||
def build_cwd(self, project_update, private_data_dir):
|
||||
return os.path.join(private_data_dir, 'project')
|
||||
|
||||
def build_playbook_path_relative_to_cwd(self, project_update, private_data_dir):
|
||||
return os.path.join('project_update.yml')
|
||||
|
||||
@@ -2384,8 +2389,7 @@ class RunInventoryUpdate(BaseTask):
|
||||
for path in config_values[config_setting].split(':'):
|
||||
if path not in paths:
|
||||
paths = [config_values[config_setting]] + paths
|
||||
# FIXME: containers
|
||||
paths = [os.path.join('/runner', folder)] + paths
|
||||
paths = [os.path.join(CONTAINER_ROOT, folder)] + paths
|
||||
env[env_key] = os.pathsep.join(paths)
|
||||
|
||||
return env
|
||||
@@ -2414,14 +2418,14 @@ class RunInventoryUpdate(BaseTask):
|
||||
|
||||
# Add arguments for the source inventory file/script/thing
|
||||
rel_path = self.pseudo_build_inventory(inventory_update, private_data_dir)
|
||||
container_location = os.path.join('/runner', rel_path) # TODO: make container paths elegant
|
||||
container_location = os.path.join(CONTAINER_ROOT, rel_path)
|
||||
source_location = os.path.join(private_data_dir, rel_path)
|
||||
|
||||
args.append('-i')
|
||||
args.append(container_location)
|
||||
|
||||
args.append('--output')
|
||||
args.append(os.path.join('/runner', 'artifacts', str(inventory_update.id), 'output.json'))
|
||||
args.append(os.path.join(CONTAINER_ROOT, 'artifacts', str(inventory_update.id), 'output.json'))
|
||||
|
||||
if os.path.isdir(source_location):
|
||||
playbook_dir = container_location
|
||||
@@ -2453,29 +2457,17 @@ class RunInventoryUpdate(BaseTask):
|
||||
if injector is not None:
|
||||
content = injector.inventory_contents(inventory_update, private_data_dir)
|
||||
# must be a statically named file
|
||||
inventory_path = os.path.join(private_data_dir, injector.filename)
|
||||
inventory_path = os.path.join(private_data_dir, 'inventory', injector.filename)
|
||||
with open(inventory_path, 'w') as f:
|
||||
f.write(content)
|
||||
os.chmod(inventory_path, stat.S_IRUSR | stat.S_IWUSR | stat.S_IXUSR)
|
||||
|
||||
rel_path = injector.filename
|
||||
rel_path = os.path.join('inventory', injector.filename)
|
||||
elif src == 'scm':
|
||||
rel_path = os.path.join('project', inventory_update.source_path)
|
||||
|
||||
return rel_path
|
||||
|
||||
def build_cwd(self, inventory_update, private_data_dir):
|
||||
"""
|
||||
There is one case where the inventory "source" is in a different
|
||||
location from the private data:
|
||||
- SCM, where source needs to live in the project folder
|
||||
"""
|
||||
src = inventory_update.source
|
||||
container_dir = '/runner' # TODO: make container paths elegant
|
||||
if src == 'scm' and inventory_update.source_project_update:
|
||||
return os.path.join(container_dir, 'project')
|
||||
return container_dir
|
||||
|
||||
def build_playbook_path_relative_to_cwd(self, inventory_update, private_data_dir):
|
||||
return None
|
||||
|
||||
@@ -2751,9 +2743,6 @@ class RunAdHocCommand(BaseTask):
|
||||
module_args = sanitize_jinja(module_args)
|
||||
return module_args
|
||||
|
||||
def build_cwd(self, ad_hoc_command, private_data_dir):
|
||||
return private_data_dir
|
||||
|
||||
def build_playbook_path_relative_to_cwd(self, job, private_data_dir):
|
||||
return None
|
||||
|
||||
@@ -2818,9 +2807,6 @@ class RunSystemJob(BaseTask):
|
||||
env.update(base_env)
|
||||
return env
|
||||
|
||||
def build_cwd(self, instance, private_data_dir):
|
||||
return settings.BASE_DIR
|
||||
|
||||
def build_playbook_path_relative_to_cwd(self, job, private_data_dir):
|
||||
return None
|
||||
|
||||
@@ -2886,6 +2872,16 @@ def deep_copy_model_obj(model_module, model_name, obj_pk, new_obj_pk, user_pk, u
|
||||
update_inventory_computed_fields.delay(new_obj.id)
|
||||
|
||||
|
||||
class TransmitterThread(threading.Thread):
|
||||
def run(self):
|
||||
self.exc = None
|
||||
|
||||
try:
|
||||
super().run()
|
||||
except Exception:
|
||||
self.exc = sys.exc_info()
|
||||
|
||||
|
||||
class AWXReceptorJob:
|
||||
def __init__(self, task=None, runner_params=None):
|
||||
self.task = task
|
||||
@@ -2913,7 +2909,8 @@ class AWXReceptorJob:
|
||||
# reading.
|
||||
sockin, sockout = socket.socketpair()
|
||||
|
||||
threading.Thread(target=self.transmit, args=[sockin]).start()
|
||||
transmitter_thread = TransmitterThread(target=self.transmit, args=[sockin])
|
||||
transmitter_thread.start()
|
||||
|
||||
# submit our work, passing
|
||||
# in the right side of our socketpair for reading.
|
||||
@@ -2923,6 +2920,11 @@ class AWXReceptorJob:
|
||||
sockin.close()
|
||||
sockout.close()
|
||||
|
||||
if transmitter_thread.exc:
|
||||
raise transmitter_thread.exc[1].with_traceback(transmitter_thread.exc[2])
|
||||
|
||||
transmitter_thread.join()
|
||||
|
||||
resultsock, resultfile = receptor_ctl.get_work_results(self.unit_id, return_socket=True, return_sockfile=True)
|
||||
# Both "processor" and "cancel_watcher" are spawned in separate threads.
|
||||
# We wait for the first one to return. If cancel_watcher returns first,
|
||||
@@ -2953,7 +2955,6 @@ class AWXReceptorJob:
|
||||
logger.warn(f"Could not launch pod for {log_name}. Exceeded quota.")
|
||||
self.task.update_model(self.task.instance.pk, status='pending')
|
||||
return
|
||||
|
||||
# If ansible-runner ran, but an error occured at runtime, the traceback information
|
||||
# is saved via the status_handler passed in to the processor.
|
||||
if state_name == 'Succeeded':
|
||||
@@ -2965,15 +2966,18 @@ class AWXReceptorJob:
|
||||
|
||||
# Spawned in a thread so Receptor can start reading before we finish writing, we
|
||||
# write our payload to the left side of our socketpair.
|
||||
@cleanup_new_process
|
||||
def transmit(self, _socket):
|
||||
if not settings.IS_K8S and self.work_type == 'local':
|
||||
self.runner_params['only_transmit_kwargs'] = True
|
||||
|
||||
ansible_runner.interface.run(streamer='transmit', _output=_socket.makefile('wb'), **self.runner_params)
|
||||
|
||||
# Socket must be shutdown here, or the reader will hang forever.
|
||||
_socket.shutdown(socket.SHUT_WR)
|
||||
try:
|
||||
ansible_runner.interface.run(streamer='transmit', _output=_socket.makefile('wb'), **self.runner_params)
|
||||
finally:
|
||||
# Socket must be shutdown here, or the reader will hang forever.
|
||||
_socket.shutdown(socket.SHUT_WR)
|
||||
|
||||
@cleanup_new_process
|
||||
def processor(self, resultfile):
|
||||
return ansible_runner.interface.run(
|
||||
streamer='process',
|
||||
@@ -2992,6 +2996,7 @@ class AWXReceptorJob:
|
||||
|
||||
receptor_params = {
|
||||
"secret_kube_pod": spec_yaml,
|
||||
"pod_pending_timeout": getattr(settings, 'AWX_CONTAINER_GROUP_POD_PENDING_TIMEOUT', "5m"),
|
||||
}
|
||||
|
||||
if self.credential:
|
||||
@@ -3015,6 +3020,7 @@ class AWXReceptorJob:
|
||||
|
||||
return work_type
|
||||
|
||||
@cleanup_new_process
|
||||
def cancel_watcher(self, processor_future):
|
||||
while True:
|
||||
if processor_future.done():
|
||||
@@ -3047,6 +3053,24 @@ class AWXReceptorJob:
|
||||
pod_spec['spec']['containers'][0]['image'] = ee.image
|
||||
pod_spec['spec']['containers'][0]['args'] = ['ansible-runner', 'worker', '--private-data-dir=/runner']
|
||||
|
||||
# Enforce EE Pull Policy
|
||||
pull_options = {"always": "Always", "missing": "IfNotPresent", "never": "Never"}
|
||||
if self.task and self.task.instance.execution_environment:
|
||||
if self.task.instance.execution_environment.pull:
|
||||
pod_spec['spec']['containers'][0]['imagePullPolicy'] = pull_options[self.task.instance.execution_environment.pull]
|
||||
|
||||
if self.task and self.task.instance.is_container_group_task:
|
||||
# If EE credential is passed, create an imagePullSecret
|
||||
if self.task.instance.execution_environment and self.task.instance.execution_environment.credential:
|
||||
# Create pull secret in k8s cluster based on ee cred
|
||||
from awx.main.scheduler.kubernetes import PodManager # prevent circular import
|
||||
|
||||
pm = PodManager(self.task.instance)
|
||||
secret_name = pm.create_secret(job=self.task.instance)
|
||||
|
||||
# Inject secret name into podspec
|
||||
pod_spec['spec']['imagePullSecrets'] = [{"name": secret_name}]
|
||||
|
||||
if self.task:
|
||||
pod_spec['metadata'] = deepmerge(
|
||||
pod_spec.get('metadata', {}),
|
||||
@@ -3057,7 +3081,7 @@ class AWXReceptorJob:
|
||||
|
||||
@property
|
||||
def pod_name(self):
|
||||
return f"awx-job-{self.task.instance.id}"
|
||||
return f"automation-job-{self.task.instance.id}"
|
||||
|
||||
@property
|
||||
def credential(self):
|
||||
|
||||
22
awx/main/tests/functional/models/test_host_metric.py
Normal file
22
awx/main/tests/functional/models/test_host_metric.py
Normal file
@@ -0,0 +1,22 @@
|
||||
import pytest
|
||||
from django.utils.timezone import now
|
||||
from awx.main.models import HostMetric
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_host_metrics_generation():
|
||||
hostnames = [f'Host {i}' for i in range(100)]
|
||||
current_time = now()
|
||||
HostMetric.objects.bulk_create([HostMetric(hostname=h, last_automation=current_time) for h in hostnames])
|
||||
|
||||
# 3 assertions have to be made
|
||||
# 1) if all the objects were created or not
|
||||
assert HostMetric.objects.count() == len(hostnames)
|
||||
|
||||
# 2) Match the hostnames stored in DB with the one passed in bulk_create
|
||||
assert sorted([s.hostname for s in HostMetric.objects.all()]) == sorted(hostnames)
|
||||
|
||||
# 3) Make sure that first_automation attribute is today's date
|
||||
date_today = now().strftime('%Y-%m-%d')
|
||||
result = HostMetric.objects.filter(first_automation__startswith=date_today).count()
|
||||
assert result == len(hostnames)
|
||||
@@ -9,6 +9,7 @@ from awx.main.tasks import RunInventoryUpdate
|
||||
from awx.main.models import InventorySource, Credential, CredentialType, UnifiedJob, ExecutionEnvironment
|
||||
from awx.main.constants import CLOUD_PROVIDERS, STANDARD_INVENTORY_UPDATE_ENV
|
||||
from awx.main.tests import data
|
||||
from awx.main.utils.execution_environments import to_container_path
|
||||
|
||||
from django.conf import settings
|
||||
|
||||
@@ -99,13 +100,19 @@ def read_content(private_data_dir, raw_env, inventory_update):
|
||||
dir_contents = {}
|
||||
referenced_paths = set()
|
||||
file_aliases = {}
|
||||
filename_list = sorted(os.listdir(private_data_dir), key=lambda fn: inverse_env.get(os.path.join(private_data_dir, fn), [fn])[0])
|
||||
filename_list = os.listdir(private_data_dir)
|
||||
for subdir in ('env', 'inventory'):
|
||||
if subdir in filename_list:
|
||||
filename_list.remove(subdir)
|
||||
for filename in os.listdir(os.path.join(private_data_dir, subdir)):
|
||||
filename_list.append(os.path.join(subdir, filename))
|
||||
filename_list = sorted(filename_list, key=lambda fn: inverse_env.get(os.path.join(private_data_dir, fn), [fn])[0])
|
||||
for filename in filename_list:
|
||||
if filename in ('args', 'project'):
|
||||
continue # Ansible runner
|
||||
abs_file_path = os.path.join(private_data_dir, filename)
|
||||
file_aliases[abs_file_path] = filename
|
||||
runner_path = os.path.join('/runner', os.path.basename(abs_file_path))
|
||||
runner_path = to_container_path(abs_file_path, private_data_dir)
|
||||
if runner_path in inverse_env:
|
||||
referenced_paths.add(abs_file_path)
|
||||
alias = 'file_reference'
|
||||
|
||||
@@ -123,6 +123,29 @@ def test_disallow_delete_when_notifications_pending(delete, user, notification_t
|
||||
assert response.status_code == 405
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_notification_template_list_includes_notification_errors(get, user, notification_template):
|
||||
Notification.objects.create(notification_template=notification_template, status='failed', error='failed to send')
|
||||
Notification.objects.create(notification_template=notification_template, status='pending')
|
||||
Notification.objects.create(notification_template=notification_template, status='successful')
|
||||
url = reverse('api:notification_template_list')
|
||||
u = user('superuser', True)
|
||||
response = get(url, user=u)
|
||||
|
||||
assert response.status_code == 200
|
||||
notifications = response.data['results'][0]['summary_fields']['recent_notifications']
|
||||
assert len(notifications) == 3
|
||||
statuses = [n['status'] for n in notifications]
|
||||
assert set(statuses) == set(['failed', 'pending', 'successful'])
|
||||
for n in notifications:
|
||||
if n['status'] == 'successful':
|
||||
assert n['error'] == ''
|
||||
elif n['status'] == 'pending':
|
||||
assert n['error'] == ''
|
||||
elif n['status'] == 'failed':
|
||||
assert n['error'] == 'failed to send'
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_custom_environment_injection(post, user, organization):
|
||||
u = user('admin-poster', True)
|
||||
|
||||
@@ -37,6 +37,7 @@ from awx.main.models.credential import ManagedCredentialType
|
||||
from awx.main import tasks
|
||||
from awx.main.utils import encrypt_field, encrypt_value
|
||||
from awx.main.utils.safe_yaml import SafeLoader
|
||||
from awx.main.utils.execution_environments import CONTAINER_ROOT, to_host_path
|
||||
|
||||
from awx.main.utils.licensing import Licenser
|
||||
|
||||
@@ -48,6 +49,10 @@ class TestJobExecution(object):
|
||||
@pytest.fixture
|
||||
def private_data_dir():
|
||||
private_data = tempfile.mkdtemp(prefix='awx_')
|
||||
for subfolder in ('inventory', 'env'):
|
||||
runner_subfolder = os.path.join(private_data, subfolder)
|
||||
if not os.path.exists(runner_subfolder):
|
||||
os.mkdir(runner_subfolder)
|
||||
yield private_data
|
||||
shutil.rmtree(private_data, True)
|
||||
|
||||
@@ -337,8 +342,8 @@ def pytest_generate_tests(metafunc):
|
||||
def parse_extra_vars(args, private_data_dir):
|
||||
extra_vars = {}
|
||||
for chunk in args:
|
||||
if chunk.startswith('@/runner/'):
|
||||
local_path = os.path.join(private_data_dir, os.path.basename(chunk.strip('@')))
|
||||
if chunk.startswith(f'@{CONTAINER_ROOT}'):
|
||||
local_path = chunk[len('@') :].replace(CONTAINER_ROOT, private_data_dir) # container path to host path
|
||||
with open(local_path, 'r') as f:
|
||||
extra_vars.update(yaml.load(f, Loader=SafeLoader))
|
||||
return extra_vars
|
||||
@@ -888,7 +893,7 @@ class TestJobCredentials(TestJobExecution):
|
||||
|
||||
if verify:
|
||||
assert env['K8S_AUTH_VERIFY_SSL'] == 'True'
|
||||
local_path = os.path.join(private_data_dir, os.path.basename(env['K8S_AUTH_SSL_CA_CERT']))
|
||||
local_path = to_host_path(env['K8S_AUTH_SSL_CA_CERT'], private_data_dir)
|
||||
cert = open(local_path, 'r').read()
|
||||
assert cert == 'CERTDATA'
|
||||
else:
|
||||
@@ -938,7 +943,7 @@ class TestJobCredentials(TestJobExecution):
|
||||
safe_env = {}
|
||||
credential.credential_type.inject_credential(credential, env, safe_env, [], private_data_dir)
|
||||
runner_path = env['GCE_CREDENTIALS_FILE_PATH']
|
||||
local_path = os.path.join(private_data_dir, os.path.basename(runner_path))
|
||||
local_path = to_host_path(runner_path, private_data_dir)
|
||||
json_data = json.load(open(local_path, 'rb'))
|
||||
assert json_data['type'] == 'service_account'
|
||||
assert json_data['private_key'] == self.EXAMPLE_PRIVATE_KEY
|
||||
@@ -1010,8 +1015,7 @@ class TestJobCredentials(TestJobExecution):
|
||||
env = task.build_env(job, private_data_dir, private_data_files=private_data_files)
|
||||
credential.credential_type.inject_credential(credential, env, {}, [], private_data_dir)
|
||||
|
||||
# convert container path to host machine path
|
||||
config_loc = os.path.join(private_data_dir, os.path.basename(env['OS_CLIENT_CONFIG_FILE']))
|
||||
config_loc = to_host_path(env['OS_CLIENT_CONFIG_FILE'], private_data_dir)
|
||||
shade_config = open(config_loc, 'r').read()
|
||||
assert shade_config == '\n'.join(
|
||||
[
|
||||
@@ -1046,7 +1050,8 @@ class TestJobCredentials(TestJobExecution):
|
||||
credential.credential_type.inject_credential(credential, env, safe_env, [], private_data_dir)
|
||||
|
||||
config = configparser.ConfigParser()
|
||||
config.read(os.path.join(private_data_dir, os.path.basename(env['OVIRT_INI_PATH'])))
|
||||
host_path = to_host_path(env['OVIRT_INI_PATH'], private_data_dir)
|
||||
config.read(host_path)
|
||||
assert config.get('ovirt', 'ovirt_url') == 'some-ovirt-host.example.org'
|
||||
assert config.get('ovirt', 'ovirt_username') == 'bob'
|
||||
assert config.get('ovirt', 'ovirt_password') == 'some-pass'
|
||||
@@ -1259,7 +1264,7 @@ class TestJobCredentials(TestJobExecution):
|
||||
env = {}
|
||||
credential.credential_type.inject_credential(credential, env, {}, [], private_data_dir)
|
||||
|
||||
path = os.path.join(private_data_dir, os.path.basename(env['MY_CLOUD_INI_FILE']))
|
||||
path = to_host_path(env['MY_CLOUD_INI_FILE'], private_data_dir)
|
||||
assert open(path, 'r').read() == '[mycloud]\nABC123'
|
||||
|
||||
def test_custom_environment_injectors_with_unicode_content(self, private_data_dir):
|
||||
@@ -1279,7 +1284,7 @@ class TestJobCredentials(TestJobExecution):
|
||||
env = {}
|
||||
credential.credential_type.inject_credential(credential, env, {}, [], private_data_dir)
|
||||
|
||||
path = os.path.join(private_data_dir, os.path.basename(env['MY_CLOUD_INI_FILE']))
|
||||
path = to_host_path(env['MY_CLOUD_INI_FILE'], private_data_dir)
|
||||
assert open(path, 'r').read() == value
|
||||
|
||||
def test_custom_environment_injectors_with_files(self, private_data_dir):
|
||||
@@ -1298,8 +1303,8 @@ class TestJobCredentials(TestJobExecution):
|
||||
env = {}
|
||||
credential.credential_type.inject_credential(credential, env, {}, [], private_data_dir)
|
||||
|
||||
cert_path = os.path.join(private_data_dir, os.path.basename(env['MY_CERT_INI_FILE']))
|
||||
key_path = os.path.join(private_data_dir, os.path.basename(env['MY_KEY_INI_FILE']))
|
||||
cert_path = to_host_path(env['MY_CERT_INI_FILE'], private_data_dir)
|
||||
key_path = to_host_path(env['MY_KEY_INI_FILE'], private_data_dir)
|
||||
assert open(cert_path, 'r').read() == '[mycert]\nCERT123'
|
||||
assert open(key_path, 'r').read() == '[mykey]\nKEY123'
|
||||
|
||||
@@ -1322,7 +1327,7 @@ class TestJobCredentials(TestJobExecution):
|
||||
assert env['AZURE_AD_USER'] == 'bob'
|
||||
assert env['AZURE_PASSWORD'] == 'secret'
|
||||
|
||||
path = os.path.join(private_data_dir, os.path.basename(env['GCE_CREDENTIALS_FILE_PATH']))
|
||||
path = to_host_path(env['GCE_CREDENTIALS_FILE_PATH'], private_data_dir)
|
||||
json_data = json.load(open(path, 'rb'))
|
||||
assert json_data['type'] == 'service_account'
|
||||
assert json_data['private_key'] == self.EXAMPLE_PRIVATE_KEY
|
||||
@@ -1703,7 +1708,7 @@ class TestInventoryUpdateCredentials(TestJobExecution):
|
||||
private_data_files = task.build_private_data_files(inventory_update, private_data_dir)
|
||||
env = task.build_env(inventory_update, private_data_dir, private_data_files)
|
||||
|
||||
path = os.path.join(private_data_dir, os.path.basename(env['OS_CLIENT_CONFIG_FILE']))
|
||||
path = to_host_path(env['OS_CLIENT_CONFIG_FILE'], private_data_dir)
|
||||
shade_config = open(path, 'r').read()
|
||||
assert (
|
||||
'\n'.join(
|
||||
@@ -1927,3 +1932,36 @@ def test_notification_job_finished(mocker):
|
||||
with mocker.patch('awx.main.models.UnifiedJob.objects.get', mocker.MagicMock(return_value=uj)):
|
||||
tasks.handle_success_and_failure_notifications(1)
|
||||
uj.send_notification_templates.assert_called()
|
||||
|
||||
|
||||
def test_job_run_no_ee():
|
||||
org = Organization(pk=1)
|
||||
proj = Project(pk=1, organization=org)
|
||||
job = Job(project=proj, organization=org, inventory=Inventory(pk=1))
|
||||
job.execution_environment = None
|
||||
task = tasks.RunJob()
|
||||
task.instance = job
|
||||
task.update_model = mock.Mock(return_value=job)
|
||||
task.model.objects.get = mock.Mock(return_value=job)
|
||||
|
||||
with mock.patch('awx.main.tasks.copy_tree'):
|
||||
with pytest.raises(RuntimeError) as e:
|
||||
task.pre_run_hook(job, private_data_dir)
|
||||
|
||||
update_model_call = task.update_model.call_args[1]
|
||||
assert update_model_call['status'] == 'error'
|
||||
assert 'Job could not start because no Execution Environment could be found' in str(e.value)
|
||||
|
||||
|
||||
def test_project_update_no_ee():
|
||||
org = Organization(pk=1)
|
||||
proj = Project(pk=1, organization=org)
|
||||
project_update = ProjectUpdate(pk=1, project=proj, scm_type='git')
|
||||
project_update.execution_environment = None
|
||||
task = tasks.RunProjectUpdate()
|
||||
task.instance = project_update
|
||||
|
||||
with pytest.raises(RuntimeError) as e:
|
||||
task.build_env(job, {})
|
||||
|
||||
assert 'The project could not sync because there is no Execution Environment' in str(e.value)
|
||||
|
||||
45
awx/main/tests/unit/utils/test_execution_environments.py
Normal file
45
awx/main/tests/unit/utils/test_execution_environments.py
Normal file
@@ -0,0 +1,45 @@
|
||||
import pytest
|
||||
|
||||
from awx.main.utils.execution_environments import to_container_path, to_host_path
|
||||
|
||||
|
||||
private_data_dir = '/tmp/pdd_iso/awx_xxx'
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
'container_path,host_path',
|
||||
[
|
||||
('/runner', private_data_dir),
|
||||
('/runner/foo', '{0}/foo'.format(private_data_dir)),
|
||||
('/runner/foo/bar', '{0}/foo/bar'.format(private_data_dir)),
|
||||
('/runner{0}'.format(private_data_dir), '{0}{0}'.format(private_data_dir)),
|
||||
],
|
||||
)
|
||||
def test_switch_paths(container_path, host_path):
|
||||
assert to_container_path(host_path, private_data_dir) == container_path
|
||||
assert to_host_path(container_path, private_data_dir) == host_path
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
'container_path',
|
||||
[
|
||||
('/foobar'),
|
||||
('/runner/..'),
|
||||
],
|
||||
)
|
||||
def test_invalid_container_path(container_path):
|
||||
with pytest.raises(RuntimeError):
|
||||
to_host_path(container_path, private_data_dir)
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
'host_path',
|
||||
[
|
||||
('/foobar'),
|
||||
('/tmp/pdd_iso'),
|
||||
('/tmp/pdd_iso/awx_xxx/..'),
|
||||
],
|
||||
)
|
||||
def test_invalid_host_path(host_path):
|
||||
with pytest.raises(RuntimeError):
|
||||
to_container_path(host_path, private_data_dir)
|
||||
@@ -26,6 +26,8 @@ from django.db.models.fields.related import ForeignObjectRel, ManyToManyField
|
||||
from django.db.models.fields.related_descriptors import ForwardManyToOneDescriptor, ManyToManyDescriptor
|
||||
from django.db.models.query import QuerySet
|
||||
from django.db.models import Q
|
||||
from django.db import connection as django_connection
|
||||
from django.core.cache import cache as django_cache
|
||||
|
||||
# Django REST Framework
|
||||
from rest_framework.exceptions import ParseError
|
||||
@@ -85,6 +87,7 @@ __all__ = [
|
||||
'create_temporary_fifo',
|
||||
'truncate_stdout',
|
||||
'deepmerge',
|
||||
'cleanup_new_process',
|
||||
]
|
||||
|
||||
|
||||
@@ -1019,3 +1022,17 @@ def deepmerge(a, b):
|
||||
return a
|
||||
else:
|
||||
return b
|
||||
|
||||
|
||||
def cleanup_new_process(func):
|
||||
"""
|
||||
Cleanup django connection, cache connection, before executing new thread or processes entry point, func.
|
||||
"""
|
||||
|
||||
@wraps(func)
|
||||
def wrapper_cleanup_new_process(*args, **kwargs):
|
||||
django_connection.close()
|
||||
django_cache.close()
|
||||
return func(*args, **kwargs)
|
||||
|
||||
return wrapper_cleanup_new_process
|
||||
|
||||
@@ -1,3 +1,6 @@
|
||||
import os
|
||||
from pathlib import Path
|
||||
|
||||
from django.conf import settings
|
||||
|
||||
from awx.main.models.execution_environments import ExecutionEnvironment
|
||||
@@ -25,3 +28,32 @@ def get_default_pod_spec():
|
||||
],
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
# this is the root of the private data dir as seen from inside
|
||||
# of the container running a job
|
||||
CONTAINER_ROOT = '/runner'
|
||||
|
||||
|
||||
def to_container_path(path, private_data_dir):
|
||||
"""Given a path inside of the host machine filesystem,
|
||||
this returns the expected path which would be observed by the job running
|
||||
inside of the EE container.
|
||||
This only handles the volume mount from private_data_dir to /runner
|
||||
"""
|
||||
if not os.path.isabs(private_data_dir):
|
||||
raise RuntimeError('The private_data_dir path must be absolute')
|
||||
if private_data_dir != path and Path(private_data_dir) not in Path(path).resolve().parents:
|
||||
raise RuntimeError(f'Cannot convert path {path} unless it is a subdir of {private_data_dir}')
|
||||
return path.replace(private_data_dir, CONTAINER_ROOT, 1)
|
||||
|
||||
|
||||
def to_host_path(path, private_data_dir):
|
||||
"""Given a path inside of the EE container, this gives the absolute path
|
||||
on the host machine within the private_data_dir
|
||||
"""
|
||||
if not os.path.isabs(private_data_dir):
|
||||
raise RuntimeError('The private_data_dir path must be absolute')
|
||||
if CONTAINER_ROOT != path and Path(CONTAINER_ROOT) not in Path(path).resolve().parents:
|
||||
raise RuntimeError(f'Cannot convert path {path} unless it is a subdir of {CONTAINER_ROOT}')
|
||||
return path.replace(CONTAINER_ROOT, private_data_dir, 1)
|
||||
|
||||
@@ -47,11 +47,10 @@ def construct_rsyslog_conf_template(settings=settings):
|
||||
return tmpl
|
||||
|
||||
if protocol.startswith('http'):
|
||||
scheme = 'https'
|
||||
# urlparse requires '//' to be provided if scheme is not specified
|
||||
original_parsed = urlparse.urlsplit(host)
|
||||
if (not original_parsed.scheme and not host.startswith('//')) or original_parsed.hostname is None:
|
||||
host = '%s://%s' % (scheme, host) if scheme else '//%s' % host
|
||||
host = 'https://%s' % (host)
|
||||
parsed = urlparse.urlsplit(host)
|
||||
|
||||
host = escape_quotes(parsed.hostname)
|
||||
|
||||
@@ -156,6 +156,21 @@
|
||||
name: Install content with ansible-galaxy command if necessary
|
||||
tasks:
|
||||
|
||||
- name: Check content sync settings
|
||||
debug:
|
||||
msg: "Collection and role syncing disabled. Check the AWX_ROLES_ENABLED and AWX_COLLECTIONS_ENABLED settings and Galaxy credentials on the project's organization."
|
||||
when: not roles_enabled|bool and not collections_enabled|bool
|
||||
tags:
|
||||
- install_roles
|
||||
- install_collections
|
||||
|
||||
- name:
|
||||
meta: end_play
|
||||
when: not roles_enabled|bool and not collections_enabled|bool
|
||||
tags:
|
||||
- install_roles
|
||||
- install_collections
|
||||
|
||||
- block:
|
||||
- name: fetch galaxy roles from requirements.(yml/yaml)
|
||||
command: >
|
||||
|
||||
@@ -70,9 +70,9 @@ IS_K8S = False
|
||||
|
||||
RECEPTOR_RELEASE_WORK = True
|
||||
AWX_CONTAINER_GROUP_K8S_API_TIMEOUT = 10
|
||||
AWX_CONTAINER_GROUP_POD_LAUNCH_RETRIES = 100
|
||||
AWX_CONTAINER_GROUP_POD_LAUNCH_RETRY_DELAY = 5
|
||||
AWX_CONTAINER_GROUP_DEFAULT_NAMESPACE = os.getenv('MY_POD_NAMESPACE', 'default')
|
||||
# Timeout when waiting for pod to enter running state. If the pod is still in pending state , it will be terminated. Valid time units are "s", "m", "h". Example : "5m" , "10s".
|
||||
AWX_CONTAINER_GROUP_POD_PENDING_TIMEOUT = "5m"
|
||||
|
||||
# Internationalization
|
||||
# https://docs.djangoproject.com/en/dev/topics/i18n/
|
||||
@@ -125,10 +125,6 @@ LOGIN_URL = '/api/login/'
|
||||
# This directory should not be web-accessible.
|
||||
PROJECTS_ROOT = '/var/lib/awx/projects/'
|
||||
|
||||
# Absolute filesystem path to the directory to host collections for
|
||||
# running inventory imports
|
||||
AWX_ANSIBLE_COLLECTIONS_PATHS = os.path.join(BASE_DIR, 'vendor', 'awx_ansible_collections')
|
||||
|
||||
# Absolute filesystem path to the directory for job status stdout (default for
|
||||
# development and tests, default for production defined in production.py). This
|
||||
# directory should not be web-accessible
|
||||
@@ -364,7 +360,7 @@ AUTHENTICATION_BACKENDS = (
|
||||
'social_core.backends.github_enterprise.GithubEnterpriseTeamOAuth2',
|
||||
'social_core.backends.azuread.AzureADOAuth2',
|
||||
'awx.sso.backends.SAMLAuth',
|
||||
'django.contrib.auth.backends.ModelBackend',
|
||||
'awx.main.backends.AWXModelBackend',
|
||||
)
|
||||
|
||||
|
||||
@@ -716,6 +712,7 @@ CALLBACK_QUEUE = "callback_tasks"
|
||||
# Note: This setting may be overridden by database settings.
|
||||
ORG_ADMINS_CAN_SEE_ALL_USERS = True
|
||||
MANAGE_ORGANIZATION_AUTH = True
|
||||
DISABLE_LOCAL_AUTH = False
|
||||
|
||||
# Note: This setting may be overridden by database settings.
|
||||
TOWER_URL_BASE = "https://towerhost"
|
||||
@@ -913,6 +910,7 @@ MIDDLEWARE = [
|
||||
'django.middleware.common.CommonMiddleware',
|
||||
'django.middleware.csrf.CsrfViewMiddleware',
|
||||
'django.contrib.auth.middleware.AuthenticationMiddleware',
|
||||
'awx.main.middleware.DisableLocalAuthMiddleware',
|
||||
'django.contrib.messages.middleware.MessageMiddleware',
|
||||
'awx.sso.middleware.SocialAuthMiddleware',
|
||||
'crum.CurrentRequestUserMiddleware',
|
||||
|
||||
@@ -94,9 +94,6 @@ for setting in dir(this_module):
|
||||
include(optional('/etc/tower/settings.py'), scope=locals())
|
||||
include(optional('/etc/tower/conf.d/*.py'), scope=locals())
|
||||
|
||||
# Installed differently in Dockerfile compared to production versions
|
||||
AWX_ANSIBLE_COLLECTIONS_PATHS = '/var/lib/awx/vendor/awx_ansible_collections'
|
||||
|
||||
BASE_VENV_PATH = "/var/lib/awx/venv/"
|
||||
ANSIBLE_VENV_PATH = os.path.join(BASE_VENV_PATH, "ansible")
|
||||
AWX_VENV_PATH = os.path.join(BASE_VENV_PATH, "awx")
|
||||
|
||||
@@ -196,6 +196,7 @@ class AuthenticationBackendsField(fields.StringListField):
|
||||
],
|
||||
),
|
||||
('django.contrib.auth.backends.ModelBackend', []),
|
||||
('awx.main.backends.AWXModelBackend', []),
|
||||
]
|
||||
)
|
||||
|
||||
|
||||
@@ -336,23 +336,21 @@ Internationalization leans on the [lingui](https://github.com/lingui/js-lingui)
|
||||
|
||||
The lingui library provides various React helpers for dealing with both marking strings for translation, and replacing strings that have been translated. For consistency and ease of use, we have consolidated on one pattern for the codebase. To set strings to be translated in the UI:
|
||||
|
||||
- import the withI18n function and wrap the export of your component in it (i.e. `export default withI18n()(Foo)`)
|
||||
- doing the above gives you access to the i18n object on props. Make sure to put it in the scope of the function that contains strings needed to be translated (i.e. `const { i18n } = this.props;`)
|
||||
- import the t template tag function from the @lingui/macro package.
|
||||
- wrap your string using the following format: `` i18n._(t`String to be translated`) ``
|
||||
- wrap your string using the following format: `` t`String to be translated` ``
|
||||
|
||||
**Note:** Variables that are put inside the t-marked template tag will not be translated. If you have a variable string with text that needs translating, you must wrap it in ` i18n._(t``) ` where it is defined.
|
||||
**Note:** If you have a variable string with text that needs translating, you must wrap it in `` t`${variable} string` `` where it is defined. Then you must run `npm run extract-strings` to generate new `.po` files and submit those files along with your pull request.
|
||||
|
||||
**Note:** We try to avoid the `I18n` consumer, `i18nMark` function, or `<Trans>` component lingui gives us access to in this repo. i18nMark does not actually replace the string in the UI (leading to the potential for untranslated bugs), and the other helpers are redundant. Settling on a consistent, single pattern helps us ease the mental overhead of the need to understand the ins and outs of the lingui API.
|
||||
**Note:** We try to avoid the `I18n` consumer, or `i18nMark` function lingui gives us access to in this repo. i18nMark does not actually replace the string in the UI (leading to the potential for untranslated bugs), and the other helpers are redundant. Settling on a consistent, single pattern helps us ease the mental overhead of the need to understand the ins and outs of the lingui API.
|
||||
|
||||
**Note:** Pluralization can be complicated so it is best to allow lingui handle cases where we have a string that may need to be pluralized based on number of items, or count. In that case lingui provides a `<Plural>` component, and a `plural()` function. See documentation [here](https://lingui.js.org/guides/plurals.html?highlight=pluralization).
|
||||
**Note:** Pluralization can be complicated so it is best to allow lingui handle cases where we have a string that may need to be pluralized based on number of items, or count. In that case lingui provides a `<Plural>` component, and a `plural()` function. When adding or updating strings in a `<Plural/>` tag you must run `npm run extra-strings` and submit the new `.po` files with your pull request. See documentation [here](https://lingui.js.org/guides/plurals.html?highlight=pluralization).
|
||||
|
||||
You can learn more about the ways lingui and its React helpers at [this link](https://lingui.js.org/tutorials/react-patterns.html).
|
||||
|
||||
### Setting up .po files to give to translation team
|
||||
|
||||
1. `npm run add-locale` to add the language that you want to translate to (we should only have to do this once and the commit to repo afaik). Example: `npm run add-locale en es fr` # Add English, Spanish and French locale
|
||||
2. `npm run extract-strings` to create .po files for each language specified. The .po files will be placed in src/locales. When updating strings that are used by `<Plural>` or `plural()` you will need to run this command to get the strings to render properly. This commmand will create `.po` files for each of the supported languages that will need to be commited with your PR.
|
||||
2. `npm run extract-strings` to create .po files for each language specified. The .po files will be placed in src/locales. When updating strings that are used by `<Plural>` or `plural()` you will need to run this command to get the strings to render properly. This command will create `.po` files for each of the supported languages that will need to be committed with your PR.
|
||||
3. Open up the .po file for the language you want to test and add some translations. In production we would pass this .po file off to the translation team.
|
||||
4. Once you've edited your .po file (or we've gotten a .po file back from the translation team) run `npm run compile-strings`. This command takes the .po files and turns them into a minified JSON object and can be seen in the `messages.js` file in each locale directory. These files get loaded at the App root level (see: App.jsx).
|
||||
5. Change the language in your browser and reload the page. You should see your specified translations in place of English strings.
|
||||
|
||||
@@ -62,26 +62,39 @@ npm --prefix awx/ui_next run test -- --coverage
|
||||
- All commands are run on your host machine and not in the api development containers.
|
||||
|
||||
|
||||
## Adding Dependencies
|
||||
## Updating Dependencies
|
||||
It is not uncommon to run the ui development tooling outside of the awx development
|
||||
container. That said, dependencies should always be modified from within the
|
||||
container to ensure consistency.
|
||||
|
||||
```shell
|
||||
# add an exact development or build dependency
|
||||
npm --prefix awx/ui_next install --save-dev --save-exact dev-package@1.2.3
|
||||
# make sure the awx development container is running and open a shell
|
||||
docker exec -it tools_awx_1 bash
|
||||
|
||||
# start with a fresh install of the current dependencies
|
||||
(tools_awx_1)$ make clean-ui && npm --prefix=awx/ui_next ci
|
||||
|
||||
# add an exact development dependency
|
||||
(tools_awx_1)$ npm --prefix awx/ui_next install --save-dev --save-exact dev-package@1.2.3
|
||||
|
||||
# add an exact production dependency
|
||||
npm --prefix awx/ui_next install --save --save-exact prod-package@1.23
|
||||
(tools_awx_1)$ npm --prefix awx/ui_next install --save --save-exact prod-package@1.23
|
||||
|
||||
# remove a development dependency
|
||||
(tools_awx_1)$ npm --prefix awx/ui_next uninstall --save-dev dev-package
|
||||
|
||||
# remove a production dependency
|
||||
(tools_awx_1)$ npm --prefix awx/ui_next uninstall --save prod-package
|
||||
|
||||
# exit the container
|
||||
(tools_awx_1)$ exit
|
||||
|
||||
# add the updated package.json and package-lock.json files to scm
|
||||
git add awx/ui_next_next/package.json awx/ui_next_next/package-lock.json
|
||||
```
|
||||
|
||||
## Removing Dependencies
|
||||
```shell
|
||||
# remove a development or build dependency
|
||||
npm --prefix awx/ui_next uninstall --save-dev dev-package
|
||||
|
||||
# remove a production dependency
|
||||
npm --prefix awx/ui_next uninstall --save prod-package
|
||||
```
|
||||
#### Note:
|
||||
- Building the ui can use up a lot of resources. If you're running docker for mac or similar
|
||||
virtualization, the default memory limit may not be enough and you should increase it.
|
||||
|
||||
## Building for Production
|
||||
```shell
|
||||
|
||||
1445
awx/ui_next/package-lock.json
generated
1445
awx/ui_next/package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@@ -6,9 +6,9 @@
|
||||
"node": "14.x"
|
||||
},
|
||||
"dependencies": {
|
||||
"@lingui/react": "^3.7.1",
|
||||
"@lingui/react": "3.9.0",
|
||||
"@patternfly/patternfly": "^4.102.1",
|
||||
"@patternfly/react-core": "^4.115.1",
|
||||
"@patternfly/react-core": "4.121.1",
|
||||
"@patternfly/react-icons": "4.7.22",
|
||||
"@patternfly/react-table": "^4.19.15",
|
||||
"ace-builds": "^1.4.12",
|
||||
@@ -26,6 +26,7 @@
|
||||
"react": "^16.13.1",
|
||||
"react-ace": "^9.3.0",
|
||||
"react-dom": "^16.13.1",
|
||||
"react-error-boundary": "^3.1.3",
|
||||
"react-router-dom": "^5.1.2",
|
||||
"react-virtualized": "^9.21.1",
|
||||
"rrule": "^2.6.4",
|
||||
@@ -64,8 +65,8 @@
|
||||
"prestart-instrumented": "lingui compile",
|
||||
"pretest": "lingui compile",
|
||||
"pretest-watch": "lingui compile",
|
||||
"start": "PORT=3001 HTTPS=true DANGEROUSLY_DISABLE_HOST_CHECK=true react-scripts start",
|
||||
"start-instrumented": "DEBUG=instrument-cra PORT=3001 HTTPS=true DANGEROUSLY_DISABLE_HOST_CHECK=true react-scripts -r @cypress/instrument-cra start",
|
||||
"start": "ESLINT_NO_DEV_ERRORS=true PORT=3001 HTTPS=true DANGEROUSLY_DISABLE_HOST_CHECK=true react-scripts start",
|
||||
"start-instrumented": "ESLINT_NO_DEV_ERRORS=true DEBUG=instrument-cra PORT=3001 HTTPS=true DANGEROUSLY_DISABLE_HOST_CHECK=true react-scripts -r @cypress/instrument-cra start",
|
||||
"build": "INLINE_RUNTIME_CHUNK=false react-scripts build",
|
||||
"test": "TZ='UTC' react-scripts test --watchAll=false",
|
||||
"test-watch": "TZ='UTC' react-scripts test",
|
||||
|
||||
@@ -2,10 +2,10 @@
|
||||
<!DOCTYPE html>
|
||||
<html lang="en-US">
|
||||
<head>
|
||||
<title>{{ title }}</title>
|
||||
<title data-cy="migration-title">{{ title }}</title>
|
||||
<meta
|
||||
http-equiv="Content-Security-Policy"
|
||||
content="default-src 'self'; connect-src 'self' ws: wss:; style-src 'self' 'nonce-{{ csp_nonce }}'; script-src 'self' 'nonce-{{ csp_nonce }}' *.pendo.io; img-src 'self' *.pendo.io data:;"
|
||||
content="default-src 'self'; connect-src 'self' ws: wss:; style-src 'self' 'unsafe-inline'; script-src 'self' 'nonce-{{ csp_nonce }}' *.pendo.io; img-src 'self' *.pendo.io data:;"
|
||||
/>
|
||||
<meta charset="utf-8">
|
||||
<meta http-equiv="X-UA-Compatible" content="IE=edge" />
|
||||
@@ -23,7 +23,7 @@
|
||||
<div class="pf-l-bullseye pf-m-gutter">
|
||||
<div class="pf-l-bullseye__item">
|
||||
<div class="pf-l-bullseye">
|
||||
<img src="{% static 'media/logo-header.svg' %}" width="300px" alt={{image_alt}} />
|
||||
<img src="{% static 'media/logo-black.svg' %}" width="300px" alt={{image_alt}} />
|
||||
</div>
|
||||
<div class="pf-l-bullseye">
|
||||
<span class="pf-c-spinner" role="progressbar" aria-valuetext={{aria_spinner}}>
|
||||
@@ -32,7 +32,7 @@
|
||||
<span class="pf-c-spinner__tail-ball"></span>
|
||||
</span>
|
||||
</div>
|
||||
<h2 class="pf-l-bullseye pf-c-title pf-m-2xl ws-heading ws-title ws-h2">{{message_upgrade}}</h2>
|
||||
<h2 data-cy="migration-message-upgrade" class="pf-l-bullseye pf-c-title pf-m-2xl ws-heading ws-title ws-h2">{{message_upgrade}}</h2>
|
||||
<h2 class="pf-l-bullseye pf-c-title pf-m-2xl ws-heading ws-title ws-h2">{{message_refresh}}</h2>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
@@ -7,6 +7,7 @@ import {
|
||||
Switch,
|
||||
Redirect,
|
||||
} from 'react-router-dom';
|
||||
import { ErrorBoundary } from 'react-error-boundary';
|
||||
import { I18nProvider } from '@lingui/react';
|
||||
import { i18n } from '@lingui/core';
|
||||
import { Card, PageSection } from '@patternfly/react-core';
|
||||
@@ -14,6 +15,7 @@ import { Card, PageSection } from '@patternfly/react-core';
|
||||
import { ConfigProvider, useAuthorizedPath } from './contexts/Config';
|
||||
import AppContainer from './components/AppContainer';
|
||||
import Background from './components/Background';
|
||||
import ContentError from './components/ContentError';
|
||||
import NotFound from './screens/NotFound';
|
||||
import Login from './screens/Login';
|
||||
|
||||
@@ -25,6 +27,16 @@ import Metrics from './screens/Metrics';
|
||||
import getRouteConfig from './routeConfig';
|
||||
import SubscriptionEdit from './screens/Setting/Subscription/SubscriptionEdit';
|
||||
|
||||
function ErrorFallback({ error }) {
|
||||
return (
|
||||
<PageSection>
|
||||
<Card>
|
||||
<ContentError error={error} />
|
||||
</Card>
|
||||
</PageSection>
|
||||
);
|
||||
}
|
||||
|
||||
const AuthorizedRoutes = ({ routeConfig }) => {
|
||||
const isAuthorized = useAuthorizedPath();
|
||||
const match = useRouteMatch();
|
||||
@@ -72,7 +84,11 @@ const AuthorizedRoutes = ({ routeConfig }) => {
|
||||
|
||||
const ProtectedRoute = ({ children, ...rest }) =>
|
||||
isAuthenticated(document.cookie) ? (
|
||||
<Route {...rest}>{children}</Route>
|
||||
<Route {...rest}>
|
||||
<ErrorBoundary FallbackComponent={ErrorFallback}>
|
||||
{children}
|
||||
</ErrorBoundary>
|
||||
</Route>
|
||||
) : (
|
||||
<Redirect to="/login" />
|
||||
);
|
||||
@@ -105,8 +121,8 @@ function App() {
|
||||
</Route>
|
||||
<ProtectedRoute>
|
||||
<ConfigProvider>
|
||||
<AppContainer navRouteConfig={getRouteConfig(i18n)}>
|
||||
<AuthorizedRoutes routeConfig={getRouteConfig(i18n)} />
|
||||
<AppContainer navRouteConfig={getRouteConfig()}>
|
||||
<AuthorizedRoutes routeConfig={getRouteConfig()} />
|
||||
</AppContainer>
|
||||
</ConfigProvider>
|
||||
</ProtectedRoute>
|
||||
|
||||
@@ -1,12 +1,11 @@
|
||||
import React from 'react';
|
||||
import PropTypes from 'prop-types';
|
||||
import { withI18n } from '@lingui/react';
|
||||
import { t } from '@lingui/macro';
|
||||
import { AboutModal } from '@patternfly/react-core';
|
||||
|
||||
import { BrandName } from '../../variables';
|
||||
|
||||
function About({ version, isOpen, onClose, i18n }) {
|
||||
function About({ version, isOpen, onClose }) {
|
||||
const createSpeechBubble = () => {
|
||||
let text = `${BrandName} ${version}`;
|
||||
let top = '';
|
||||
@@ -25,8 +24,8 @@ function About({ version, isOpen, onClose, i18n }) {
|
||||
};
|
||||
|
||||
const speechBubble = createSpeechBubble();
|
||||
const copyright = i18n._(t`Copyright`);
|
||||
const redHatInc = i18n._(t`Red Hat, Inc.`);
|
||||
const copyright = t`Copyright`;
|
||||
const redHatInc = t`Red Hat, Inc.`;
|
||||
|
||||
return (
|
||||
<AboutModal
|
||||
@@ -35,7 +34,7 @@ function About({ version, isOpen, onClose, i18n }) {
|
||||
productName={`Ansible ${BrandName}`}
|
||||
trademark={`${copyright} ${new Date().getFullYear()} ${redHatInc}`}
|
||||
brandImageSrc="/static/media/logo-header.svg"
|
||||
brandImageAlt={i18n._(t`Brand Image`)}
|
||||
brandImageAlt={t`Brand Image`}
|
||||
>
|
||||
<pre>
|
||||
{speechBubble}
|
||||
@@ -63,4 +62,4 @@ About.defaultProps = {
|
||||
version: null,
|
||||
};
|
||||
|
||||
export default withI18n()(About);
|
||||
export default About;
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import React, { useCallback, useEffect, useState, useContext } from 'react';
|
||||
import { useHistory, useParams } from 'react-router-dom';
|
||||
import { withI18n } from '@lingui/react';
|
||||
|
||||
import { t } from '@lingui/macro';
|
||||
import PropTypes from 'prop-types';
|
||||
import { Button, DropdownItem } from '@patternfly/react-core';
|
||||
@@ -14,7 +14,7 @@ import AdHocCommandsWizard from './AdHocCommandsWizard';
|
||||
import { KebabifiedContext } from '../../contexts/Kebabified';
|
||||
import ContentError from '../ContentError';
|
||||
|
||||
function AdHocCommands({ adHocItems, i18n, hasListItems, onLaunchLoading }) {
|
||||
function AdHocCommands({ adHocItems, hasListItems, onLaunchLoading }) {
|
||||
const history = useHistory();
|
||||
const { id } = useParams();
|
||||
|
||||
@@ -22,11 +22,11 @@ function AdHocCommands({ adHocItems, i18n, hasListItems, onLaunchLoading }) {
|
||||
const { isKebabified, onKebabModalChange } = useContext(KebabifiedContext);
|
||||
|
||||
const verbosityOptions = [
|
||||
{ value: '0', key: '0', label: i18n._(t`0 (Normal)`) },
|
||||
{ value: '1', key: '1', label: i18n._(t`1 (Verbose)`) },
|
||||
{ value: '2', key: '2', label: i18n._(t`2 (More Verbose)`) },
|
||||
{ value: '3', key: '3', label: i18n._(t`3 (Debug)`) },
|
||||
{ value: '4', key: '4', label: i18n._(t`4 (Connection Debug)`) },
|
||||
{ value: '0', key: '0', label: t`0 (Normal)` },
|
||||
{ value: '1', key: '1', label: t`1 (Verbose)` },
|
||||
{ value: '2', key: '2', label: t`2 (More Verbose)` },
|
||||
{ value: '3', key: '3', label: t`3 (Debug)` },
|
||||
{ value: '4', key: '4', label: t`4 (Connection Debug)` },
|
||||
];
|
||||
useEffect(() => {
|
||||
if (isKebabified) {
|
||||
@@ -102,7 +102,7 @@ function AdHocCommands({ adHocItems, i18n, hasListItems, onLaunchLoading }) {
|
||||
<AlertModal
|
||||
isOpen={error}
|
||||
variant="error"
|
||||
title={i18n._(t`Error!`)}
|
||||
title={t`Error!`}
|
||||
onClose={() => {
|
||||
dismissError();
|
||||
setIsWizardOpen(false);
|
||||
@@ -110,7 +110,7 @@ function AdHocCommands({ adHocItems, i18n, hasListItems, onLaunchLoading }) {
|
||||
>
|
||||
{launchError ? (
|
||||
<>
|
||||
{i18n._(t`Failed to launch job.`)}
|
||||
{t`Failed to launch job.`}
|
||||
<ErrorDetail error={error} />
|
||||
</>
|
||||
) : (
|
||||
@@ -128,20 +128,20 @@ function AdHocCommands({ adHocItems, i18n, hasListItems, onLaunchLoading }) {
|
||||
key="cancel-job"
|
||||
isDisabled={isAdHocDisabled || !hasListItems}
|
||||
component="button"
|
||||
aria-label={i18n._(t`Run Command`)}
|
||||
aria-label={t`Run Command`}
|
||||
onClick={() => setIsWizardOpen(true)}
|
||||
>
|
||||
{i18n._(t`Run Command`)}
|
||||
{t`Run Command`}
|
||||
</DropdownItem>
|
||||
) : (
|
||||
<Button
|
||||
ouiaId="run-command-button"
|
||||
variant="secondary"
|
||||
aria-label={i18n._(t`Run Command`)}
|
||||
aria-label={t`Run Command`}
|
||||
onClick={() => setIsWizardOpen(true)}
|
||||
isDisabled={isAdHocDisabled || !hasListItems}
|
||||
>
|
||||
{i18n._(t`Run Command`)}
|
||||
{t`Run Command`}
|
||||
</Button>
|
||||
)}
|
||||
|
||||
@@ -166,4 +166,4 @@ AdHocCommands.propTypes = {
|
||||
hasListItems: PropTypes.bool.isRequired,
|
||||
};
|
||||
|
||||
export default withI18n()(AdHocCommands);
|
||||
export default AdHocCommands;
|
||||
|
||||
@@ -206,7 +206,8 @@ describe('<AdHocCommands />', () => {
|
||||
|
||||
await act(async () => {
|
||||
wrapper
|
||||
.find('input[aria-labelledby="check-action-item-2"]')
|
||||
.find('td#check-action-item-2')
|
||||
.find('input')
|
||||
.simulate('change', { target: { checked: true } });
|
||||
});
|
||||
|
||||
@@ -224,7 +225,8 @@ describe('<AdHocCommands />', () => {
|
||||
|
||||
await act(async () => {
|
||||
wrapper
|
||||
.find('input[aria-labelledby="check-action-item-4"]')
|
||||
.find('td#check-action-item-4')
|
||||
.find('input')
|
||||
.simulate('change', { target: { checked: true } });
|
||||
});
|
||||
|
||||
@@ -373,7 +375,8 @@ describe('<AdHocCommands />', () => {
|
||||
|
||||
await act(async () => {
|
||||
wrapper
|
||||
.find('input[aria-labelledby="check-action-item-2"]')
|
||||
.find('td#check-action-item-2')
|
||||
.find('input')
|
||||
.simulate('change', {
|
||||
target: {
|
||||
checked: true,
|
||||
@@ -395,7 +398,8 @@ describe('<AdHocCommands />', () => {
|
||||
|
||||
await act(async () => {
|
||||
wrapper
|
||||
.find('input[aria-labelledby="check-action-item-4"]')
|
||||
.find('td#check-action-item-4')
|
||||
.find('input')
|
||||
.simulate('change', {
|
||||
target: {
|
||||
checked: true,
|
||||
|
||||
@@ -87,7 +87,7 @@ function AdHocCommandsWizard({
|
||||
<AdHocExecutionEnvironmentStep organizationId={organizationId} />
|
||||
),
|
||||
// Removed this line when https://github.com/patternfly/patternfly-react/issues/5729 is fixed
|
||||
stepNavItemProps: { style: { 'white-space': 'nowrap' } },
|
||||
stepNavItemProps: { style: { whiteSpace: 'nowrap' } },
|
||||
enableNext: true,
|
||||
nextButtonText: t`Next`,
|
||||
canJumpTo: currentStepId >= 2,
|
||||
|
||||
@@ -153,7 +153,8 @@ describe('<AdHocCommandsWizard/>', () => {
|
||||
|
||||
await act(async () => {
|
||||
wrapper
|
||||
.find('input[aria-labelledby="check-action-item-1"]')
|
||||
.find('td#check-action-item-1')
|
||||
.find('input')
|
||||
.simulate('change', { target: { checked: true } });
|
||||
});
|
||||
|
||||
@@ -178,7 +179,8 @@ describe('<AdHocCommandsWizard/>', () => {
|
||||
expect(wrapper.find('Button[type="submit"]').prop('isDisabled')).toBe(true);
|
||||
await act(async () => {
|
||||
wrapper
|
||||
.find('input[aria-labelledby="check-action-item-1"]')
|
||||
.find('td#check-action-item-1')
|
||||
.find('input')
|
||||
.simulate('change', { target: { checked: true } });
|
||||
});
|
||||
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import React, { useEffect, useCallback } from 'react';
|
||||
import { useHistory } from 'react-router-dom';
|
||||
import { withI18n } from '@lingui/react';
|
||||
|
||||
import { t } from '@lingui/macro';
|
||||
import PropTypes from 'prop-types';
|
||||
import { useField } from 'formik';
|
||||
@@ -21,7 +21,7 @@ const QS_CONFIG = getQSConfig('credentials', {
|
||||
order_by: 'name',
|
||||
});
|
||||
|
||||
function AdHocCredentialStep({ i18n, credentialTypeId, onEnableLaunch }) {
|
||||
function AdHocCredentialStep({ credentialTypeId, onEnableLaunch }) {
|
||||
const history = useHistory();
|
||||
const {
|
||||
error,
|
||||
@@ -52,7 +52,7 @@ function AdHocCredentialStep({ i18n, credentialTypeId, onEnableLaunch }) {
|
||||
|
||||
const [credentialField, credentialMeta, credentialHelpers] = useField({
|
||||
name: 'credential',
|
||||
validate: required(null, i18n),
|
||||
validate: required(null),
|
||||
});
|
||||
if (error) {
|
||||
return <ContentError error={error} />;
|
||||
@@ -64,8 +64,8 @@ function AdHocCredentialStep({ i18n, credentialTypeId, onEnableLaunch }) {
|
||||
<Form>
|
||||
<FormGroup
|
||||
fieldId="credential"
|
||||
label={i18n._(t`Machine Credential`)}
|
||||
aria-label={i18n._(t`Machine Credential`)}
|
||||
label={t`Machine Credential`}
|
||||
aria-label={t`Machine Credential`}
|
||||
isRequired
|
||||
validated={
|
||||
!credentialMeta.touched || !credentialMeta.error ? 'default' : 'error'
|
||||
@@ -73,9 +73,7 @@ function AdHocCredentialStep({ i18n, credentialTypeId, onEnableLaunch }) {
|
||||
helperTextInvalid={credentialMeta.error}
|
||||
labelIcon={
|
||||
<Popover
|
||||
content={i18n._(
|
||||
t`Select the credential you want to use when accessing the remote hosts to run the command. Choose the credential containing the username and SSH key or password that Ansible will need to log into the remote hosts.`
|
||||
)}
|
||||
content={t`Select the credential you want to use when accessing the remote hosts to run the command. Choose the credential containing the username and SSH key or password that Ansible will need to log into the remote hosts.`}
|
||||
/>
|
||||
}
|
||||
>
|
||||
@@ -83,27 +81,27 @@ function AdHocCredentialStep({ i18n, credentialTypeId, onEnableLaunch }) {
|
||||
value={credentialField.value || []}
|
||||
options={credentials}
|
||||
optionCount={credentialCount}
|
||||
header={i18n._(t`Machine Credential`)}
|
||||
header={t`Machine Credential`}
|
||||
readOnly
|
||||
qsConfig={QS_CONFIG}
|
||||
searchColumns={[
|
||||
{
|
||||
name: i18n._(t`Name`),
|
||||
name: t`Name`,
|
||||
key: 'name',
|
||||
isDefault: true,
|
||||
},
|
||||
{
|
||||
name: i18n._(t`Created By (Username)`),
|
||||
name: t`Created By (Username)`,
|
||||
key: 'created_by__username',
|
||||
},
|
||||
{
|
||||
name: i18n._(t`Modified By (Username)`),
|
||||
name: t`Modified By (Username)`,
|
||||
key: 'modified_by__username',
|
||||
},
|
||||
]}
|
||||
sortColumns={[
|
||||
{
|
||||
name: i18n._(t`Name`),
|
||||
name: t`Name`,
|
||||
key: 'name',
|
||||
},
|
||||
]}
|
||||
@@ -125,4 +123,4 @@ AdHocCredentialStep.propTypes = {
|
||||
credentialTypeId: PropTypes.number.isRequired,
|
||||
onEnableLaunch: PropTypes.func.isRequired,
|
||||
};
|
||||
export default withI18n()(AdHocCredentialStep);
|
||||
export default AdHocCredentialStep;
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
/* eslint-disable react/no-unescaped-entities */
|
||||
import React from 'react';
|
||||
import { withI18n } from '@lingui/react';
|
||||
|
||||
import { t } from '@lingui/macro';
|
||||
import PropTypes from 'prop-types';
|
||||
import { useField } from 'formik';
|
||||
@@ -28,10 +28,10 @@ const TooltipWrapper = styled.div`
|
||||
// in failing tests.
|
||||
const brandName = BrandName;
|
||||
|
||||
function AdHocDetailsStep({ i18n, verbosityOptions, moduleOptions }) {
|
||||
function AdHocDetailsStep({ verbosityOptions, moduleOptions }) {
|
||||
const [moduleNameField, moduleNameMeta, moduleNameHelpers] = useField({
|
||||
name: 'module_name',
|
||||
validate: required(null, i18n),
|
||||
validate: required(null),
|
||||
});
|
||||
|
||||
const [variablesField] = useField('extra_vars');
|
||||
@@ -41,14 +41,14 @@ function AdHocDetailsStep({ i18n, verbosityOptions, moduleOptions }) {
|
||||
);
|
||||
const [verbosityField, verbosityMeta, verbosityHelpers] = useField({
|
||||
name: 'verbosity',
|
||||
validate: required(null, i18n),
|
||||
validate: required(null),
|
||||
});
|
||||
|
||||
const argumentsRequired =
|
||||
moduleNameField.value === 'command' || moduleNameField.value === 'shell';
|
||||
const [, argumentsMeta, argumentsHelpers] = useField({
|
||||
name: 'module_args',
|
||||
validate: argumentsRequired && required(null, i18n),
|
||||
validate: argumentsRequired && required(null),
|
||||
});
|
||||
|
||||
const isValid = !argumentsMeta.error || !argumentsMeta.touched;
|
||||
@@ -59,8 +59,8 @@ function AdHocDetailsStep({ i18n, verbosityOptions, moduleOptions }) {
|
||||
<FormFullWidthLayout>
|
||||
<FormGroup
|
||||
fieldId="module_name"
|
||||
aria-label={i18n._(t`select module`)}
|
||||
label={i18n._(t`Module`)}
|
||||
aria-label={t`select module`}
|
||||
label={t`Module`}
|
||||
isRequired
|
||||
helperTextInvalid={moduleNameMeta.error}
|
||||
validated={
|
||||
@@ -70,22 +70,20 @@ function AdHocDetailsStep({ i18n, verbosityOptions, moduleOptions }) {
|
||||
}
|
||||
labelIcon={
|
||||
<Popover
|
||||
content={i18n._(
|
||||
t`These are the modules that ${brandName} supports running commands against.`
|
||||
)}
|
||||
content={t`These are the modules that ${brandName} supports running commands against.`}
|
||||
/>
|
||||
}
|
||||
>
|
||||
<AnsibleSelect
|
||||
{...moduleNameField}
|
||||
placeHolder={i18n._(t`Select a module`)}
|
||||
placeHolder={t`Select a module`}
|
||||
isValid={!moduleNameMeta.touched || !moduleNameMeta.error}
|
||||
id="module_name"
|
||||
data={[
|
||||
{
|
||||
value: '',
|
||||
key: '',
|
||||
label: i18n._(t`Choose a module`),
|
||||
label: t`Choose a module`,
|
||||
isDisabled: true,
|
||||
},
|
||||
...moduleOptions.map(value => ({
|
||||
@@ -105,9 +103,9 @@ function AdHocDetailsStep({ i18n, verbosityOptions, moduleOptions }) {
|
||||
<FormField
|
||||
id="module_args"
|
||||
name="module_args"
|
||||
aria-label={i18n._(t`Arguments`)}
|
||||
aria-label={t`Arguments`}
|
||||
type="text"
|
||||
label={i18n._(t`Arguments`)}
|
||||
label={t`Arguments`}
|
||||
validated={isValid ? 'default' : 'error'}
|
||||
onBlur={() => argumentsHelpers.setTouched(true)}
|
||||
isRequired={
|
||||
@@ -117,27 +115,25 @@ function AdHocDetailsStep({ i18n, verbosityOptions, moduleOptions }) {
|
||||
tooltip={
|
||||
moduleNameField.value ? (
|
||||
<>
|
||||
{i18n._(
|
||||
t`These arguments are used with the specified module. You can find information about ${moduleNameField.value} by clicking `
|
||||
)}
|
||||
{t`These arguments are used with the specified module. You can find information about ${moduleNameField.value} by clicking `}
|
||||
<a
|
||||
href={`https://docs.ansible.com/ansible/latest/modules/${moduleNameField.value}_module.html`}
|
||||
target="_blank"
|
||||
rel="noopener noreferrer"
|
||||
>
|
||||
{' '}
|
||||
{i18n._(t`here.`)}
|
||||
{t`here.`}
|
||||
</a>
|
||||
</>
|
||||
) : (
|
||||
i18n._(t`These arguments are used with the specified module.`)
|
||||
t`These arguments are used with the specified module.`
|
||||
)
|
||||
}
|
||||
/>
|
||||
<FormGroup
|
||||
fieldId="verbosity"
|
||||
aria-label={i18n._(t`select verbosity`)}
|
||||
label={i18n._(t`Verbosity`)}
|
||||
aria-label={t`select verbosity`}
|
||||
label={t`Verbosity`}
|
||||
isRequired
|
||||
validated={
|
||||
!verbosityMeta.touched || !verbosityMeta.error
|
||||
@@ -147,9 +143,7 @@ function AdHocDetailsStep({ i18n, verbosityOptions, moduleOptions }) {
|
||||
helperTextInvalid={verbosityMeta.error}
|
||||
labelIcon={
|
||||
<Popover
|
||||
content={i18n._(
|
||||
t`These are the verbosity levels for standard out of the command run that are supported.`
|
||||
)}
|
||||
content={t`These are the verbosity levels for standard out of the command run that are supported.`}
|
||||
/>
|
||||
}
|
||||
>
|
||||
@@ -167,19 +161,17 @@ function AdHocDetailsStep({ i18n, verbosityOptions, moduleOptions }) {
|
||||
id="limit"
|
||||
name="limit"
|
||||
type="text"
|
||||
label={i18n._(t`Limit`)}
|
||||
aria-label={i18n._(t`Limit`)}
|
||||
label={t`Limit`}
|
||||
aria-label={t`Limit`}
|
||||
tooltip={
|
||||
<span>
|
||||
{i18n._(
|
||||
t`The pattern used to target hosts in the inventory. Leaving the field blank, all, and * will all target all hosts in the inventory. You can find more information about Ansible's host patterns`
|
||||
)}{' '}
|
||||
{t`The pattern used to target hosts in the inventory. Leaving the field blank, all, and * will all target all hosts in the inventory. You can find more information about Ansible's host patterns`}{' '}
|
||||
<a
|
||||
href="https://docs.ansible.com/ansible/latest/user_guide/intro_patterns.html"
|
||||
target="_blank"
|
||||
rel="noopener noreferrer"
|
||||
>
|
||||
{i18n._(t`here`)}
|
||||
{t`here`}
|
||||
</a>
|
||||
</span>
|
||||
}
|
||||
@@ -189,67 +181,63 @@ function AdHocDetailsStep({ i18n, verbosityOptions, moduleOptions }) {
|
||||
name="forks"
|
||||
type="number"
|
||||
min="0"
|
||||
label={i18n._(t`Forks`)}
|
||||
aria-label={i18n._(t`Forks`)}
|
||||
label={t`Forks`}
|
||||
aria-label={t`Forks`}
|
||||
tooltip={
|
||||
<span>
|
||||
{i18n._(
|
||||
t`The number of parallel or simultaneous processes to use while executing the playbook. Inputting no value will use the default value from the ansible configuration file. You can find more information`
|
||||
)}{' '}
|
||||
{t`The number of parallel or simultaneous processes to use while executing the playbook. Inputting no value will use the default value from the ansible configuration file. You can find more information`}{' '}
|
||||
<a
|
||||
href="https://docs.ansible.com/ansible/latest/installation_guide/intro_configuration.html#the-ansible-configuration-file"
|
||||
target="_blank"
|
||||
rel="noopener noreferrer"
|
||||
>
|
||||
{i18n._(t`here.`)}
|
||||
{t`here.`}
|
||||
</a>
|
||||
</span>
|
||||
}
|
||||
/>
|
||||
<FormColumnLayout>
|
||||
<FormGroup
|
||||
label={i18n._(t`Show changes`)}
|
||||
aria-label={i18n._(t`Show changes`)}
|
||||
label={t`Show changes`}
|
||||
aria-label={t`Show changes`}
|
||||
labelIcon={
|
||||
<Popover
|
||||
content={i18n._(
|
||||
t`If enabled, show the changes made by Ansible tasks, where supported. This is equivalent to Ansible’s --diff mode.`
|
||||
)}
|
||||
content={t`If enabled, show the changes made by Ansible tasks, where supported. This is equivalent to Ansible’s --diff mode.`}
|
||||
/>
|
||||
}
|
||||
>
|
||||
<Switch
|
||||
css="display: inline-flex;"
|
||||
id="diff_mode"
|
||||
label={i18n._(t`On`)}
|
||||
labelOff={i18n._(t`Off`)}
|
||||
label={t`On`}
|
||||
labelOff={t`Off`}
|
||||
isChecked={diffModeField.value}
|
||||
onChange={() => {
|
||||
diffModeHelpers.setValue(!diffModeField.value);
|
||||
}}
|
||||
aria-label={i18n._(t`toggle changes`)}
|
||||
aria-label={t`toggle changes`}
|
||||
/>
|
||||
</FormGroup>
|
||||
<FormGroup name="become_enabled" fieldId="become_enabled">
|
||||
<FormCheckboxLayout>
|
||||
<Checkbox
|
||||
aria-label={i18n._(t`Enable privilege escalation`)}
|
||||
aria-label={t`Enable privilege escalation`}
|
||||
label={
|
||||
<span>
|
||||
{i18n._(t`Enable privilege escalation`)}
|
||||
{t`Enable privilege escalation`}
|
||||
|
||||
<Popover
|
||||
content={
|
||||
<p>
|
||||
{i18n._(t`Enables creation of a provisioning
|
||||
{t`Enables creation of a provisioning
|
||||
callback URL. Using the URL a host can contact ${brandName}
|
||||
and request a configuration update using this job
|
||||
template`)}
|
||||
template`}
|
||||
|
||||
<code>--become </code>
|
||||
{i18n._(t`option to the`)}
|
||||
{t`option to the`}
|
||||
<code>ansible </code>
|
||||
{i18n._(t`command`)}
|
||||
{t`command`}
|
||||
</p>
|
||||
}
|
||||
/>
|
||||
@@ -275,14 +263,12 @@ function AdHocDetailsStep({ i18n, verbosityOptions, moduleOptions }) {
|
||||
tooltip={
|
||||
<TooltipWrapper>
|
||||
<p>
|
||||
{i18n._(
|
||||
t`Pass extra command line changes. There are two ansible command line parameters: `
|
||||
)}
|
||||
{t`Pass extra command line changes. There are two ansible command line parameters: `}
|
||||
<br />
|
||||
<code>-e</code>, <code>--extra-vars </code>
|
||||
<br />
|
||||
{i18n._(t`Provide key/value pairs using either
|
||||
YAML or JSON.`)}
|
||||
{t`Provide key/value pairs using either
|
||||
YAML or JSON.`}
|
||||
</p>
|
||||
JSON:
|
||||
<br />
|
||||
@@ -306,8 +292,8 @@ function AdHocDetailsStep({ i18n, verbosityOptions, moduleOptions }) {
|
||||
</code>
|
||||
</TooltipWrapper>
|
||||
}
|
||||
label={i18n._(t`Extra variables`)}
|
||||
aria-label={i18n._(t`Extra variables`)}
|
||||
label={t`Extra variables`}
|
||||
aria-label={t`Extra variables`}
|
||||
/>
|
||||
</FormFullWidthLayout>
|
||||
</FormColumnLayout>
|
||||
@@ -320,4 +306,4 @@ AdHocDetailsStep.propTypes = {
|
||||
verbosityOptions: PropTypes.arrayOf(PropTypes.object).isRequired,
|
||||
};
|
||||
|
||||
export default withI18n()(AdHocDetailsStep);
|
||||
export default AdHocDetailsStep;
|
||||
|
||||
@@ -1,12 +1,11 @@
|
||||
import React, { useState, useRef, useEffect, Fragment } from 'react';
|
||||
import { withI18n } from '@lingui/react';
|
||||
import { t } from '@lingui/macro';
|
||||
import PropTypes from 'prop-types';
|
||||
import { Dropdown, DropdownPosition } from '@patternfly/react-core';
|
||||
import { ToolbarAddButton } from '../PaginatedDataList';
|
||||
import { useKebabifiedMenu } from '../../contexts/Kebabified';
|
||||
|
||||
function AddDropDownButton({ dropdownItems, i18n }) {
|
||||
function AddDropDownButton({ dropdownItems, ouiaId }) {
|
||||
const { isKebabified } = useKebabifiedMenu();
|
||||
const [isOpen, setIsOpen] = useState(false);
|
||||
const element = useRef(null);
|
||||
@@ -36,7 +35,8 @@ function AddDropDownButton({ dropdownItems, i18n }) {
|
||||
position={DropdownPosition.right}
|
||||
toggle={
|
||||
<ToolbarAddButton
|
||||
aria-label={i18n._(t`Add`)}
|
||||
ouiaId={ouiaId}
|
||||
aria-label={t`Add`}
|
||||
showToggleIndicator
|
||||
onClick={() => setIsOpen(!isOpen)}
|
||||
/>
|
||||
@@ -52,4 +52,4 @@ AddDropDownButton.propTypes = {
|
||||
};
|
||||
|
||||
export { AddDropDownButton as _AddDropDownButton };
|
||||
export default withI18n()(AddDropDownButton);
|
||||
export default AddDropDownButton;
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
import React, { Fragment, useState, useEffect } from 'react';
|
||||
import PropTypes from 'prop-types';
|
||||
import { useHistory } from 'react-router-dom';
|
||||
import { withI18n } from '@lingui/react';
|
||||
import { t } from '@lingui/macro';
|
||||
import SelectableCard from '../SelectableCard';
|
||||
import Wizard from '../Wizard';
|
||||
@@ -18,7 +17,7 @@ const readTeams = async queryParams => TeamsAPI.read(queryParams);
|
||||
|
||||
const readTeamsOptions = async () => TeamsAPI.readOptions();
|
||||
|
||||
function AddResourceRole({ onSave, onClose, roles, i18n, resource, onError }) {
|
||||
function AddResourceRole({ onSave, onClose, roles, resource, onError }) {
|
||||
const history = useHistory();
|
||||
|
||||
const [selectedResource, setSelectedResource] = useState(null);
|
||||
@@ -122,52 +121,52 @@ function AddResourceRole({ onSave, onClose, roles, i18n, resource, onError }) {
|
||||
|
||||
const userSearchColumns = [
|
||||
{
|
||||
name: i18n._(t`Username`),
|
||||
name: t`Username`,
|
||||
key: 'username__icontains',
|
||||
isDefault: true,
|
||||
},
|
||||
{
|
||||
name: i18n._(t`First Name`),
|
||||
name: t`First Name`,
|
||||
key: 'first_name__icontains',
|
||||
},
|
||||
{
|
||||
name: i18n._(t`Last Name`),
|
||||
name: t`Last Name`,
|
||||
key: 'last_name__icontains',
|
||||
},
|
||||
];
|
||||
const userSortColumns = [
|
||||
{
|
||||
name: i18n._(t`Username`),
|
||||
name: t`Username`,
|
||||
key: 'username',
|
||||
},
|
||||
{
|
||||
name: i18n._(t`First Name`),
|
||||
name: t`First Name`,
|
||||
key: 'first_name',
|
||||
},
|
||||
{
|
||||
name: i18n._(t`Last Name`),
|
||||
name: t`Last Name`,
|
||||
key: 'last_name',
|
||||
},
|
||||
];
|
||||
const teamSearchColumns = [
|
||||
{
|
||||
name: i18n._(t`Name`),
|
||||
name: t`Name`,
|
||||
key: 'name',
|
||||
isDefault: true,
|
||||
},
|
||||
{
|
||||
name: i18n._(t`Created By (Username)`),
|
||||
name: t`Created By (Username)`,
|
||||
key: 'created_by__username',
|
||||
},
|
||||
{
|
||||
name: i18n._(t`Modified By (Username)`),
|
||||
name: t`Modified By (Username)`,
|
||||
key: 'modified_by__username',
|
||||
},
|
||||
];
|
||||
|
||||
const teamSortColumns = [
|
||||
{
|
||||
name: i18n._(t`Name`),
|
||||
name: t`Name`,
|
||||
key: 'name',
|
||||
},
|
||||
];
|
||||
@@ -176,30 +175,28 @@ function AddResourceRole({ onSave, onClose, roles, i18n, resource, onError }) {
|
||||
|
||||
switch (selectedResource) {
|
||||
case 'users':
|
||||
wizardTitle = i18n._(t`Add User Roles`);
|
||||
wizardTitle = t`Add User Roles`;
|
||||
break;
|
||||
case 'teams':
|
||||
wizardTitle = i18n._(t`Add Team Roles`);
|
||||
wizardTitle = t`Add Team Roles`;
|
||||
break;
|
||||
default:
|
||||
wizardTitle = i18n._(t`Add Roles`);
|
||||
wizardTitle = t`Add Roles`;
|
||||
}
|
||||
|
||||
const steps = [
|
||||
{
|
||||
id: 1,
|
||||
name: i18n._(t`Select a Resource Type`),
|
||||
name: t`Select a Resource Type`,
|
||||
component: (
|
||||
<div style={{ display: 'flex', flexWrap: 'wrap' }}>
|
||||
<div style={{ width: '100%', marginBottom: '10px' }}>
|
||||
{i18n._(
|
||||
t`Choose the type of resource that will be receiving new roles. For example, if you'd like to add new roles to a set of users please choose Users and click Next. You'll be able to select the specific resources in the next step.`
|
||||
)}
|
||||
{t`Choose the type of resource that will be receiving new roles. For example, if you'd like to add new roles to a set of users please choose Users and click Next. You'll be able to select the specific resources in the next step.`}
|
||||
</div>
|
||||
<SelectableCard
|
||||
isSelected={selectedResource === 'users'}
|
||||
label={i18n._(t`Users`)}
|
||||
ariaLabel={i18n._(t`Users`)}
|
||||
label={t`Users`}
|
||||
ariaLabel={t`Users`}
|
||||
dataCy="add-role-users"
|
||||
onClick={() => handleResourceSelect('users')}
|
||||
/>
|
||||
@@ -208,8 +205,8 @@ function AddResourceRole({ onSave, onClose, roles, i18n, resource, onError }) {
|
||||
!resource?.organization) ? null : (
|
||||
<SelectableCard
|
||||
isSelected={selectedResource === 'teams'}
|
||||
label={i18n._(t`Teams`)}
|
||||
ariaLabel={i18n._(t`Teams`)}
|
||||
label={t`Teams`}
|
||||
ariaLabel={t`Teams`}
|
||||
dataCy="add-role-teams"
|
||||
onClick={() => handleResourceSelect('teams')}
|
||||
/>
|
||||
@@ -220,7 +217,7 @@ function AddResourceRole({ onSave, onClose, roles, i18n, resource, onError }) {
|
||||
},
|
||||
{
|
||||
id: 2,
|
||||
name: i18n._(t`Select Items from List`),
|
||||
name: t`Select Items from List`,
|
||||
component: (
|
||||
<Fragment>
|
||||
{selectedResource === 'users' && (
|
||||
@@ -231,7 +228,7 @@ function AddResourceRole({ onSave, onClose, roles, i18n, resource, onError }) {
|
||||
onRowClick={handleResourceCheckboxClick}
|
||||
fetchItems={readUsers}
|
||||
fetchOptions={readUsersOptions}
|
||||
selectedLabel={i18n._(t`Selected`)}
|
||||
selectedLabel={t`Selected`}
|
||||
selectedResourceRows={selectedResourceRows}
|
||||
sortedColumnKey="username"
|
||||
/>
|
||||
@@ -243,7 +240,7 @@ function AddResourceRole({ onSave, onClose, roles, i18n, resource, onError }) {
|
||||
onRowClick={handleResourceCheckboxClick}
|
||||
fetchItems={readTeams}
|
||||
fetchOptions={readTeamsOptions}
|
||||
selectedLabel={i18n._(t`Selected`)}
|
||||
selectedLabel={t`Selected`}
|
||||
selectedResourceRows={selectedResourceRows}
|
||||
/>
|
||||
)}
|
||||
@@ -254,18 +251,18 @@ function AddResourceRole({ onSave, onClose, roles, i18n, resource, onError }) {
|
||||
},
|
||||
{
|
||||
id: 3,
|
||||
name: i18n._(t`Select Roles to Apply`),
|
||||
name: t`Select Roles to Apply`,
|
||||
component: (
|
||||
<SelectRoleStep
|
||||
onRolesClick={handleRoleCheckboxClick}
|
||||
roles={selectableRoles}
|
||||
selectedListKey={selectedResource === 'users' ? 'username' : 'name'}
|
||||
selectedListLabel={i18n._(t`Selected`)}
|
||||
selectedListLabel={t`Selected`}
|
||||
selectedResourceRows={selectedResourceRows}
|
||||
selectedRoleRows={selectedRoleRows}
|
||||
/>
|
||||
),
|
||||
nextButtonText: i18n._(t`Save`),
|
||||
nextButtonText: t`Save`,
|
||||
enableNext: selectedRoleRows.length > 0,
|
||||
canJumpTo: maxEnabledStep >= 3,
|
||||
},
|
||||
@@ -285,8 +282,8 @@ function AddResourceRole({ onSave, onClose, roles, i18n, resource, onError }) {
|
||||
steps={steps}
|
||||
title={wizardTitle}
|
||||
nextButtonText={currentStep.nextButtonText || undefined}
|
||||
backButtonText={i18n._(t`Back`)}
|
||||
cancelButtonText={i18n._(t`Cancel`)}
|
||||
backButtonText={t`Back`}
|
||||
cancelButtonText={t`Cancel`}
|
||||
/>
|
||||
);
|
||||
}
|
||||
@@ -304,4 +301,4 @@ AddResourceRole.defaultProps = {
|
||||
};
|
||||
|
||||
export { AddResourceRole as _AddResourceRole };
|
||||
export default withI18n()(AddResourceRole);
|
||||
export default AddResourceRole;
|
||||
|
||||
@@ -96,12 +96,12 @@ describe('<_AddResourceRole />', () => {
|
||||
// Step 2
|
||||
await waitForElement(wrapper, 'EmptyStateBody', el => el.length === 0);
|
||||
act(() =>
|
||||
wrapper.find('DataListCheck[name="foo"]').invoke('onChange')(true)
|
||||
wrapper.find('CheckboxListItem[name="foo"]').invoke('onSelect')(true)
|
||||
);
|
||||
wrapper.update();
|
||||
expect(wrapper.find('DataListCheck[name="foo"]').prop('checked')).toBe(
|
||||
true
|
||||
);
|
||||
expect(
|
||||
wrapper.find('CheckboxListItem[name="foo"]').prop('isSelected')
|
||||
).toBe(true);
|
||||
act(() => wrapper.find('Button[type="submit"]').prop('onClick')());
|
||||
wrapper.update();
|
||||
|
||||
@@ -162,12 +162,12 @@ describe('<_AddResourceRole />', () => {
|
||||
// Step 2
|
||||
await waitForElement(wrapper, 'EmptyStateBody', el => el.length === 0);
|
||||
act(() =>
|
||||
wrapper.find('DataListCheck[name="foo"]').invoke('onChange')(true)
|
||||
wrapper.find('CheckboxListItem[name="foo"]').invoke('onSelect')(true)
|
||||
);
|
||||
wrapper.update();
|
||||
expect(wrapper.find('DataListCheck[name="foo"]').prop('checked')).toBe(
|
||||
true
|
||||
);
|
||||
expect(
|
||||
wrapper.find('CheckboxListItem[name="foo"]').prop('isSelected')
|
||||
).toBe(true);
|
||||
act(() => wrapper.find('Button[type="submit"]').prop('onClick')());
|
||||
wrapper.update();
|
||||
|
||||
@@ -214,12 +214,12 @@ describe('<_AddResourceRole />', () => {
|
||||
// Step 2
|
||||
await waitForElement(wrapper, 'EmptyStateBody', el => el.length === 0);
|
||||
act(() =>
|
||||
wrapper.find('DataListCheck[name="foo"]').invoke('onChange')(true)
|
||||
wrapper.find('CheckboxListItem[name="foo"]').invoke('onSelect')(true)
|
||||
);
|
||||
wrapper.update();
|
||||
expect(wrapper.find('DataListCheck[name="foo"]').prop('checked')).toBe(
|
||||
true
|
||||
);
|
||||
expect(
|
||||
wrapper.find('CheckboxListItem[name="foo"]').prop('isSelected')
|
||||
).toBe(true);
|
||||
await act(async () =>
|
||||
wrapper.find('PFWizard').prop('onGoToStep')({ id: 1 })
|
||||
);
|
||||
@@ -280,12 +280,12 @@ describe('<_AddResourceRole />', () => {
|
||||
// Step 2
|
||||
await waitForElement(wrapper, 'EmptyStateBody', el => el.length === 0);
|
||||
act(() =>
|
||||
wrapper.find('DataListCheck[name="foo"]').invoke('onChange')(true)
|
||||
wrapper.find('CheckboxListItem[name="foo"]').invoke('onSelect')(true)
|
||||
);
|
||||
wrapper.update();
|
||||
expect(wrapper.find('DataListCheck[name="foo"]').prop('checked')).toBe(
|
||||
true
|
||||
);
|
||||
expect(
|
||||
wrapper.find('CheckboxListItem[name="foo"]').prop('isSelected')
|
||||
).toBe(true);
|
||||
act(() => wrapper.find('Button[type="submit"]').prop('onClick')());
|
||||
wrapper.update();
|
||||
|
||||
|
||||
@@ -1,16 +1,14 @@
|
||||
import React, { Fragment, useCallback, useEffect } from 'react';
|
||||
import PropTypes from 'prop-types';
|
||||
import { withRouter, useLocation } from 'react-router-dom';
|
||||
import { withI18n } from '@lingui/react';
|
||||
import { t } from '@lingui/macro';
|
||||
import useRequest from '../../util/useRequest';
|
||||
|
||||
import { SearchColumns, SortColumns } from '../../types';
|
||||
import PaginatedDataList from '../PaginatedDataList';
|
||||
import DataListToolbar from '../DataListToolbar';
|
||||
import CheckboxListItem from '../CheckboxListItem';
|
||||
import SelectedList from '../SelectedList';
|
||||
import { getQSConfig, parseQueryString } from '../../util/qs';
|
||||
import PaginatedTable, { HeaderCell, HeaderRow } from '../PaginatedTable';
|
||||
|
||||
const QS_Config = sortColumns => {
|
||||
return getQSConfig('resource', {
|
||||
@@ -30,7 +28,6 @@ function SelectResourceStep({
|
||||
selectedResourceRows,
|
||||
fetchItems,
|
||||
fetchOptions,
|
||||
i18n,
|
||||
}) {
|
||||
const location = useLocation();
|
||||
|
||||
@@ -78,9 +75,7 @@ function SelectResourceStep({
|
||||
return (
|
||||
<Fragment>
|
||||
<div>
|
||||
{i18n._(
|
||||
t`Choose the resources that will be receiving new roles. You'll be able to select the roles to apply in the next step. Note that the resources chosen here will receive all roles chosen in the next step.`
|
||||
)}
|
||||
{t`Choose the resources that will be receiving new roles. You'll be able to select the roles to apply in the next step. Note that the resources chosen here will receive all roles chosen in the next step.`}
|
||||
</div>
|
||||
{selectedResourceRows.length > 0 && (
|
||||
<SelectedList
|
||||
@@ -90,7 +85,8 @@ function SelectResourceStep({
|
||||
selected={selectedResourceRows}
|
||||
/>
|
||||
)}
|
||||
<PaginatedDataList
|
||||
|
||||
<PaginatedTable
|
||||
hasContentLoading={isLoading}
|
||||
contentError={error}
|
||||
items={resources}
|
||||
@@ -101,11 +97,21 @@ function SelectResourceStep({
|
||||
toolbarSortColumns={sortColumns}
|
||||
toolbarSearchableKeys={searchableKeys}
|
||||
toolbarRelatedSearchableKeys={relatedSearchableKeys}
|
||||
renderItem={item => (
|
||||
headerRow={
|
||||
<HeaderRow qsConfig={QS_Config(sortColumns)}>
|
||||
{sortColumns.map(({ name, key }) => (
|
||||
<HeaderCell sortKey={key}>{name}</HeaderCell>
|
||||
))}
|
||||
</HeaderRow>
|
||||
}
|
||||
renderRow={(item, index) => (
|
||||
<CheckboxListItem
|
||||
isSelected={selectedResourceRows.some(i => i.id === item.id)}
|
||||
itemId={item.id}
|
||||
item={item}
|
||||
rowIndex={index}
|
||||
key={item.id}
|
||||
columns={sortColumns}
|
||||
name={item[displayKey]}
|
||||
label={item[displayKey]}
|
||||
onSelect={() => onRowClick(item)}
|
||||
@@ -139,4 +145,4 @@ SelectResourceStep.defaultProps = {
|
||||
};
|
||||
|
||||
export { SelectResourceStep as _SelectResourceStep };
|
||||
export default withI18n()(withRouter(SelectResourceStep));
|
||||
export default withRouter(SelectResourceStep);
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
import React, { Fragment } from 'react';
|
||||
import PropTypes from 'prop-types';
|
||||
|
||||
import { withI18n } from '@lingui/react';
|
||||
import { t } from '@lingui/macro';
|
||||
|
||||
import CheckboxCard from './CheckboxCard';
|
||||
@@ -14,21 +13,18 @@ function RolesStep({
|
||||
selectedListLabel,
|
||||
selectedResourceRows,
|
||||
selectedRoleRows,
|
||||
i18n,
|
||||
}) {
|
||||
return (
|
||||
<Fragment>
|
||||
<div>
|
||||
{i18n._(
|
||||
t`Choose roles to apply to the selected resources. Note that all selected roles will be applied to all selected resources.`
|
||||
)}
|
||||
{t`Choose roles to apply to the selected resources. Note that all selected roles will be applied to all selected resources.`}
|
||||
</div>
|
||||
<div>
|
||||
{selectedResourceRows.length > 0 && (
|
||||
<SelectedList
|
||||
displayKey={selectedListKey}
|
||||
isReadOnly
|
||||
label={selectedListLabel || i18n._(t`Selected`)}
|
||||
label={selectedListLabel || t`Selected`}
|
||||
selected={selectedResourceRows}
|
||||
/>
|
||||
)}
|
||||
@@ -75,4 +71,4 @@ RolesStep.defaultProps = {
|
||||
selectedRoleRows: [],
|
||||
};
|
||||
|
||||
export default withI18n()(RolesStep);
|
||||
export default RolesStep;
|
||||
|
||||
@@ -8,7 +8,7 @@ import {
|
||||
InfoCircleIcon,
|
||||
TimesCircleIcon,
|
||||
} from '@patternfly/react-icons';
|
||||
import { withI18n } from '@lingui/react';
|
||||
|
||||
import { t } from '@lingui/macro';
|
||||
import styled from 'styled-components';
|
||||
|
||||
@@ -20,13 +20,11 @@ const Header = styled.div`
|
||||
`;
|
||||
|
||||
function AlertModal({
|
||||
i18n,
|
||||
isOpen = null,
|
||||
title,
|
||||
label,
|
||||
variant,
|
||||
children,
|
||||
i18nHash,
|
||||
...props
|
||||
}) {
|
||||
const variantIcons = {
|
||||
@@ -74,7 +72,7 @@ function AlertModal({
|
||||
return (
|
||||
<Modal
|
||||
header={customHeader}
|
||||
aria-label={label || i18n._(t`Alert modal`)}
|
||||
aria-label={label || t`Alert modal`}
|
||||
aria-labelledby="alert-modal-header-label"
|
||||
isOpen={Boolean(isOpen)}
|
||||
variant="small"
|
||||
@@ -86,4 +84,4 @@ function AlertModal({
|
||||
);
|
||||
}
|
||||
|
||||
export default withI18n()(AlertModal);
|
||||
export default AlertModal;
|
||||
|
||||
@@ -8,14 +8,14 @@ import {
|
||||
shape,
|
||||
bool,
|
||||
} from 'prop-types';
|
||||
import { withI18n } from '@lingui/react';
|
||||
|
||||
import { t } from '@lingui/macro';
|
||||
import { FormSelect, FormSelectOption } from '@patternfly/react-core';
|
||||
|
||||
function AnsibleSelect({
|
||||
id,
|
||||
data,
|
||||
i18n,
|
||||
|
||||
isValid,
|
||||
onBlur,
|
||||
value,
|
||||
@@ -35,7 +35,7 @@ function AnsibleSelect({
|
||||
value={value}
|
||||
onChange={onSelectChange}
|
||||
onBlur={onBlur}
|
||||
aria-label={i18n._(t`Select Input`)}
|
||||
aria-label={t`Select Input`}
|
||||
validated={isValid ? 'default' : 'error'}
|
||||
className={className}
|
||||
isDisabled={isDisabled}
|
||||
@@ -79,4 +79,4 @@ AnsibleSelect.propTypes = {
|
||||
};
|
||||
|
||||
export { AnsibleSelect as _AnsibleSelect };
|
||||
export default withI18n()(AnsibleSelect);
|
||||
export default AnsibleSelect;
|
||||
|
||||
@@ -12,13 +12,14 @@ import {
|
||||
PageSidebar,
|
||||
} from '@patternfly/react-core';
|
||||
import { t } from '@lingui/macro';
|
||||
import { withI18n } from '@lingui/react';
|
||||
|
||||
import styled from 'styled-components';
|
||||
|
||||
import { MeAPI, RootAPI } from '../../api';
|
||||
import { useConfig, useAuthorizedPath } from '../../contexts/Config';
|
||||
import { SESSION_TIMEOUT_KEY } from '../../constants';
|
||||
import { isAuthenticated } from '../../util/auth';
|
||||
import issuePendoIdentity from '../../util/issuePendoIdentity';
|
||||
import About from '../About';
|
||||
import AlertModal from '../AlertModal';
|
||||
import BrandLogo from './BrandLogo';
|
||||
@@ -85,7 +86,7 @@ function useStorage(key) {
|
||||
return [storageVal, setValue];
|
||||
}
|
||||
|
||||
function AppContainer({ i18n, navRouteConfig = [], children }) {
|
||||
function AppContainer({ navRouteConfig = [], children }) {
|
||||
const history = useHistory();
|
||||
const config = useConfig();
|
||||
|
||||
@@ -138,8 +139,15 @@ function AppContainer({ i18n, navRouteConfig = [], children }) {
|
||||
}
|
||||
}, [handleLogout, timeRemaining]);
|
||||
|
||||
useEffect(() => {
|
||||
if ('analytics_status' in config) {
|
||||
issuePendoIdentity(config);
|
||||
}
|
||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||
}, [config.analytics_status]);
|
||||
|
||||
const brandName = config?.license_info?.product_name;
|
||||
const alt = brandName ? i18n._(t`${brandName} logo`) : i18n._(t`brand logo`);
|
||||
const alt = brandName ? t`${brandName} logo` : t`brand logo`;
|
||||
|
||||
const header = (
|
||||
<PageHeader
|
||||
@@ -165,7 +173,7 @@ function AppContainer({ i18n, navRouteConfig = [], children }) {
|
||||
<PageHeaderToolsGroup>
|
||||
<PageHeaderToolsItem>
|
||||
<Button onClick={handleLogout} variant="tertiary" ouiaId="logout">
|
||||
{i18n._(t`Logout`)}
|
||||
{t`Logout`}
|
||||
</Button>
|
||||
</PageHeaderToolsItem>
|
||||
</PageHeaderToolsGroup>
|
||||
@@ -178,7 +186,7 @@ function AppContainer({ i18n, navRouteConfig = [], children }) {
|
||||
<PageSidebar
|
||||
theme="dark"
|
||||
nav={
|
||||
<Nav aria-label={i18n._(t`Navigation`)} theme="dark">
|
||||
<Nav aria-label={t`Navigation`} theme="dark">
|
||||
<NavList>
|
||||
{navRouteConfig.map(({ groupId, groupTitle, routes }) => (
|
||||
<NavExpandableGroup
|
||||
@@ -210,7 +218,7 @@ function AppContainer({ i18n, navRouteConfig = [], children }) {
|
||||
/>
|
||||
<AlertModal
|
||||
ouiaId="session-expiration-modal"
|
||||
title={i18n._(t`Your session is about to expire`)}
|
||||
title={t`Your session is about to expire`}
|
||||
isOpen={timeoutWarning && sessionTimeout > 0 && timeRemaining !== null}
|
||||
onClose={handleLogout}
|
||||
showClose={false}
|
||||
@@ -222,7 +230,7 @@ function AppContainer({ i18n, navRouteConfig = [], children }) {
|
||||
variant="primary"
|
||||
onClick={handleSessionContinue}
|
||||
>
|
||||
{i18n._(t`Continue`)}
|
||||
{t`Continue`}
|
||||
</Button>,
|
||||
<Button
|
||||
ouiaId="session-expiration-logout-button"
|
||||
@@ -230,19 +238,17 @@ function AppContainer({ i18n, navRouteConfig = [], children }) {
|
||||
variant="secondary"
|
||||
onClick={handleLogout}
|
||||
>
|
||||
{i18n._(t`Logout`)}
|
||||
{t`Logout`}
|
||||
</Button>,
|
||||
]}
|
||||
>
|
||||
{i18n._(
|
||||
t`You will be logged out in ${Number(
|
||||
Math.max(Math.floor(timeRemaining / 1000), 0)
|
||||
)} seconds due to inactivity.`
|
||||
)}
|
||||
{t`You will be logged out in ${Number(
|
||||
Math.max(Math.floor(timeRemaining / 1000), 0)
|
||||
)} seconds due to inactivity.`}
|
||||
</AlertModal>
|
||||
</>
|
||||
);
|
||||
}
|
||||
|
||||
export { AppContainer as _AppContainer };
|
||||
export default withI18n()(withRouter(AppContainer));
|
||||
export default withRouter(AppContainer);
|
||||
|
||||
@@ -4,19 +4,25 @@ import {
|
||||
mountWithContexts,
|
||||
waitForElement,
|
||||
} from '../../../testUtils/enzymeHelpers';
|
||||
import { ConfigAPI, MeAPI, RootAPI } from '../../api';
|
||||
import { MeAPI, RootAPI } from '../../api';
|
||||
import { useAuthorizedPath } from '../../contexts/Config';
|
||||
import AppContainer from './AppContainer';
|
||||
|
||||
jest.mock('../../api');
|
||||
jest.mock('../../util/bootstrapPendo');
|
||||
|
||||
global.pendo = {
|
||||
initialize: jest.fn(),
|
||||
};
|
||||
|
||||
describe('<AppContainer />', () => {
|
||||
const version = '222';
|
||||
|
||||
beforeEach(() => {
|
||||
ConfigAPI.read.mockResolvedValue({
|
||||
RootAPI.readAssetVariables.mockResolvedValue({
|
||||
data: {
|
||||
version,
|
||||
BRAND_NAME: 'AWX',
|
||||
PENDO_API_KEY: 'some-pendo-key',
|
||||
},
|
||||
});
|
||||
MeAPI.read.mockResolvedValue({ data: { results: [{}] } });
|
||||
@@ -31,7 +37,7 @@ describe('<AppContainer />', () => {
|
||||
test('expected content is rendered', async () => {
|
||||
const routeConfig = [
|
||||
{
|
||||
groupTitle: 'Group One',
|
||||
groupTitle: <span>Group One</span>,
|
||||
groupId: 'group_one',
|
||||
routes: [
|
||||
{ title: 'Foo', path: '/foo' },
|
||||
@@ -39,7 +45,7 @@ describe('<AppContainer />', () => {
|
||||
],
|
||||
},
|
||||
{
|
||||
groupTitle: 'Group Two',
|
||||
groupTitle: <span>Group Two</span>,
|
||||
groupId: 'group_two',
|
||||
routes: [{ title: 'Fiz', path: '/fiz' }],
|
||||
},
|
||||
@@ -52,7 +58,22 @@ describe('<AppContainer />', () => {
|
||||
{routeConfig.map(({ groupId }) => (
|
||||
<div key={groupId} id={groupId} />
|
||||
))}
|
||||
</AppContainer>
|
||||
</AppContainer>,
|
||||
{
|
||||
context: {
|
||||
config: {
|
||||
analytics_status: 'detailed',
|
||||
ansible_version: null,
|
||||
custom_virtualenvs: [],
|
||||
version: '9000',
|
||||
me: { is_superuser: true },
|
||||
toJSON: () => '/config/',
|
||||
license_info: {
|
||||
valid_key: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
);
|
||||
});
|
||||
wrapper.update();
|
||||
@@ -70,6 +91,60 @@ describe('<AppContainer />', () => {
|
||||
|
||||
expect(wrapper.find('#group_one').length).toBe(1);
|
||||
expect(wrapper.find('#group_two').length).toBe(1);
|
||||
|
||||
expect(global.pendo.initialize).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
|
||||
test('Pendo not initialized when key is missing', async () => {
|
||||
RootAPI.readAssetVariables.mockResolvedValue({
|
||||
data: {
|
||||
BRAND_NAME: 'AWX',
|
||||
PENDO_API_KEY: '',
|
||||
},
|
||||
});
|
||||
let wrapper;
|
||||
await act(async () => {
|
||||
wrapper = mountWithContexts(<AppContainer />, {
|
||||
context: {
|
||||
config: {
|
||||
analytics_status: 'detailed',
|
||||
ansible_version: null,
|
||||
custom_virtualenvs: [],
|
||||
version: '9000',
|
||||
me: { is_superuser: true },
|
||||
toJSON: () => '/config/',
|
||||
license_info: {
|
||||
valid_key: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
});
|
||||
wrapper.update();
|
||||
expect(global.pendo.initialize).toHaveBeenCalledTimes(0);
|
||||
});
|
||||
|
||||
test('Pendo not initialized when status is analytics off', async () => {
|
||||
let wrapper;
|
||||
await act(async () => {
|
||||
wrapper = mountWithContexts(<AppContainer />, {
|
||||
context: {
|
||||
config: {
|
||||
analytics_status: 'off',
|
||||
ansible_version: null,
|
||||
custom_virtualenvs: [],
|
||||
version: '9000',
|
||||
me: { is_superuser: true },
|
||||
toJSON: () => '/config/',
|
||||
license_info: {
|
||||
valid_key: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
});
|
||||
wrapper.update();
|
||||
expect(global.pendo.initialize).toHaveBeenCalledTimes(0);
|
||||
});
|
||||
|
||||
test('opening the about modal renders prefetched config data', async () => {
|
||||
|
||||
@@ -59,7 +59,7 @@ class NavExpandableGroup extends Component {
|
||||
|
||||
NavExpandableGroup.propTypes = {
|
||||
groupId: PropTypes.string.isRequired,
|
||||
groupTitle: PropTypes.string.isRequired,
|
||||
groupTitle: PropTypes.element.isRequired,
|
||||
routes: PropTypes.arrayOf(PropTypes.object).isRequired,
|
||||
};
|
||||
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import React, { useCallback, useEffect, useState } from 'react';
|
||||
import PropTypes from 'prop-types';
|
||||
import { withI18n } from '@lingui/react';
|
||||
|
||||
import { t } from '@lingui/macro';
|
||||
import { Link } from 'react-router-dom';
|
||||
import styled from 'styled-components';
|
||||
@@ -42,7 +42,6 @@ function PageHeaderToolbar({
|
||||
onAboutClick,
|
||||
onLogoutClick,
|
||||
loggedInUser,
|
||||
i18n,
|
||||
}) {
|
||||
const [isHelpOpen, setIsHelpOpen] = useState(false);
|
||||
const [isUserOpen, setIsUserOpen] = useState(false);
|
||||
@@ -83,10 +82,7 @@ function PageHeaderToolbar({
|
||||
return (
|
||||
<PageHeaderTools>
|
||||
<PageHeaderToolsGroup>
|
||||
<Tooltip
|
||||
position="bottom"
|
||||
content={i18n._(t`Pending Workflow Approvals`)}
|
||||
>
|
||||
<Tooltip position="bottom" content={t`Pending Workflow Approvals`}>
|
||||
<PageHeaderToolsItem>
|
||||
<Link to="/workflow_approvals?workflow_approvals.status=pending">
|
||||
<PendingWorkflowApprovals>
|
||||
@@ -108,10 +104,7 @@ function PageHeaderToolbar({
|
||||
position={DropdownPosition.right}
|
||||
onSelect={handleHelpSelect}
|
||||
toggle={
|
||||
<DropdownToggle
|
||||
onToggle={setIsHelpOpen}
|
||||
aria-label={i18n._(t`Info`)}
|
||||
>
|
||||
<DropdownToggle onToggle={setIsHelpOpen} aria-label={t`Info`}>
|
||||
<QuestionCircleIcon />
|
||||
</DropdownToggle>
|
||||
}
|
||||
@@ -121,7 +114,7 @@ function PageHeaderToolbar({
|
||||
target="_blank"
|
||||
href={`${getDocsBaseUrl(config)}/html/userguide/index.html`}
|
||||
>
|
||||
{i18n._(t`Help`)}
|
||||
{t`Help`}
|
||||
</DropdownItem>,
|
||||
<DropdownItem
|
||||
key="about"
|
||||
@@ -129,12 +122,12 @@ function PageHeaderToolbar({
|
||||
isDisabled={isAboutDisabled}
|
||||
onClick={onAboutClick}
|
||||
>
|
||||
{i18n._(t`About`)}
|
||||
{t`About`}
|
||||
</DropdownItem>,
|
||||
]}
|
||||
/>
|
||||
</PageHeaderToolsItem>
|
||||
<Tooltip position="left" content={<div>{i18n._(t`User`)}</div>}>
|
||||
<Tooltip position="left" content={<div>{t`User`}</div>}>
|
||||
<PageHeaderToolsItem>
|
||||
<Dropdown
|
||||
id="toolbar-user-dropdown"
|
||||
@@ -155,14 +148,14 @@ function PageHeaderToolbar({
|
||||
dropdownItems={[
|
||||
<DropdownItem
|
||||
key="user"
|
||||
aria-label={i18n._(t`User details`)}
|
||||
aria-label={t`User details`}
|
||||
href={
|
||||
loggedInUser
|
||||
? `/#/users/${loggedInUser.id}/details`
|
||||
: '/#/home'
|
||||
? `#/users/${loggedInUser.id}/details`
|
||||
: '#/home'
|
||||
}
|
||||
>
|
||||
{i18n._(t`User Details`)}
|
||||
{t`User Details`}
|
||||
</DropdownItem>,
|
||||
<DropdownItem
|
||||
key="logout"
|
||||
@@ -170,7 +163,7 @@ function PageHeaderToolbar({
|
||||
onClick={onLogoutClick}
|
||||
id="logout-button"
|
||||
>
|
||||
{i18n._(t`Logout`)}
|
||||
{t`Logout`}
|
||||
</DropdownItem>,
|
||||
]}
|
||||
/>
|
||||
@@ -191,4 +184,4 @@ PageHeaderToolbar.defaultProps = {
|
||||
isAboutDisabled: false,
|
||||
};
|
||||
|
||||
export default withI18n()(PageHeaderToolbar);
|
||||
export default PageHeaderToolbar;
|
||||
|
||||
@@ -60,7 +60,7 @@ describe('PageHeaderToolbar', () => {
|
||||
wrapper.update();
|
||||
expect(
|
||||
wrapper.find('DropdownItem[aria-label="User details"]').prop('href')
|
||||
).toBe('/#/users/1/details');
|
||||
).toBe('#/users/1/details');
|
||||
expect(wrapper.find('DropdownItem')).toHaveLength(2);
|
||||
|
||||
const logout = wrapper.find('DropdownItem li button');
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import React, { Fragment, useEffect, useCallback } from 'react';
|
||||
import { useHistory } from 'react-router-dom';
|
||||
import { withI18n } from '@lingui/react';
|
||||
|
||||
import { t } from '@lingui/macro';
|
||||
import { Button, Modal } from '@patternfly/react-core';
|
||||
import OptionsList from '../OptionsList';
|
||||
@@ -17,15 +17,15 @@ const QS_CONFIG = (order_by = 'name') => {
|
||||
};
|
||||
|
||||
function AssociateModal({
|
||||
i18n,
|
||||
header = i18n._(t`Items`),
|
||||
title = i18n._(t`Select Items`),
|
||||
header = t`Items`,
|
||||
title = t`Select Items`,
|
||||
onClose,
|
||||
onAssociate,
|
||||
fetchRequest,
|
||||
optionsRequest,
|
||||
isModalOpen = false,
|
||||
displayKey = 'name',
|
||||
ouiaId,
|
||||
}) {
|
||||
const history = useHistory();
|
||||
const { selected, handleSelect } = useSelected([]);
|
||||
@@ -94,30 +94,31 @@ function AssociateModal({
|
||||
return (
|
||||
<Fragment>
|
||||
<Modal
|
||||
ouiaId={ouiaId}
|
||||
variant="large"
|
||||
title={title}
|
||||
aria-label={i18n._(t`Association modal`)}
|
||||
aria-label={t`Association modal`}
|
||||
isOpen={isModalOpen}
|
||||
onClose={handleClose}
|
||||
actions={[
|
||||
<Button
|
||||
ouiaId="associate-modal-save"
|
||||
aria-label={i18n._(t`Save`)}
|
||||
aria-label={t`Save`}
|
||||
key="select"
|
||||
variant="primary"
|
||||
onClick={handleSave}
|
||||
isDisabled={selected.length === 0}
|
||||
>
|
||||
{i18n._(t`Save`)}
|
||||
{t`Save`}
|
||||
</Button>,
|
||||
<Button
|
||||
ouiaId="associate-modal-cancel"
|
||||
aria-label={i18n._(t`Cancel`)}
|
||||
aria-label={t`Cancel`}
|
||||
key="cancel"
|
||||
variant="link"
|
||||
onClick={handleClose}
|
||||
>
|
||||
{i18n._(t`Cancel`)}
|
||||
{t`Cancel`}
|
||||
</Button>,
|
||||
]}
|
||||
>
|
||||
@@ -136,22 +137,22 @@ function AssociateModal({
|
||||
value={selected}
|
||||
searchColumns={[
|
||||
{
|
||||
name: i18n._(t`Name`),
|
||||
name: t`Name`,
|
||||
key: `${displayKey}__icontains`,
|
||||
isDefault: true,
|
||||
},
|
||||
{
|
||||
name: i18n._(t`Created By (Username)`),
|
||||
name: t`Created By (Username)`,
|
||||
key: 'created_by__username__icontains',
|
||||
},
|
||||
{
|
||||
name: i18n._(t`Modified By (Username)`),
|
||||
name: t`Modified By (Username)`,
|
||||
key: 'modified_by__username__icontains',
|
||||
},
|
||||
]}
|
||||
sortColumns={[
|
||||
{
|
||||
name: i18n._(t`Name`),
|
||||
name: t`Name`,
|
||||
key: `${displayKey}`,
|
||||
},
|
||||
]}
|
||||
@@ -163,4 +164,4 @@ function AssociateModal({
|
||||
);
|
||||
}
|
||||
|
||||
export default withI18n()(AssociateModal);
|
||||
export default AssociateModal;
|
||||
|
||||
@@ -1,80 +1,58 @@
|
||||
import React from 'react';
|
||||
import PropTypes from 'prop-types';
|
||||
import styled from 'styled-components';
|
||||
import {
|
||||
DataListItem,
|
||||
DataListItemRow as PFDataListItemRow,
|
||||
DataListItemCells,
|
||||
DataListCheck,
|
||||
Radio,
|
||||
} from '@patternfly/react-core';
|
||||
import _DataListCell from '../DataListCell';
|
||||
|
||||
const Label = styled.label`
|
||||
${({ isDisabled }) =>
|
||||
isDisabled &&
|
||||
`
|
||||
opacity: 0.5;
|
||||
`}
|
||||
`;
|
||||
|
||||
const DataListItemRow = styled(PFDataListItemRow)`
|
||||
&& {
|
||||
align-items: center;
|
||||
}
|
||||
`;
|
||||
|
||||
const DataListCell = styled(_DataListCell)`
|
||||
&& {
|
||||
margin-left: 10px;
|
||||
}
|
||||
`;
|
||||
import { t } from '@lingui/macro';
|
||||
import { Td, Tr } from '@patternfly/react-table';
|
||||
|
||||
const CheckboxListItem = ({
|
||||
isDisabled = false,
|
||||
isRadio = false,
|
||||
isSelected = false,
|
||||
itemId,
|
||||
label,
|
||||
name,
|
||||
onDeselect,
|
||||
rowIndex,
|
||||
onSelect,
|
||||
columns,
|
||||
item,
|
||||
}) => {
|
||||
const CheckboxRadio = isRadio ? Radio : DataListCheck;
|
||||
const handleRowClick = () => {
|
||||
if (isSelected && !isRadio) {
|
||||
onDeselect(itemId);
|
||||
} else {
|
||||
onSelect(itemId);
|
||||
}
|
||||
};
|
||||
|
||||
return (
|
||||
<DataListItem
|
||||
key={itemId}
|
||||
aria-labelledby={`check-action-item-${itemId}`}
|
||||
id={`${itemId}`}
|
||||
<Tr
|
||||
ouiaId={`list-item-${itemId}`}
|
||||
id={`list-item-${itemId}`}
|
||||
onClick={handleRowClick}
|
||||
>
|
||||
<DataListItemRow>
|
||||
<CheckboxRadio
|
||||
aria-label={`check-action-item-${itemId}`}
|
||||
aria-labelledby={`check-action-item-${itemId}`}
|
||||
checked={isSelected}
|
||||
isDisabled={isDisabled}
|
||||
id={`selected-${itemId}`}
|
||||
isChecked={isSelected}
|
||||
name={name}
|
||||
onChange={isSelected ? onDeselect : onSelect}
|
||||
value={itemId}
|
||||
/>
|
||||
<DataListItemCells
|
||||
dataListCells={[
|
||||
<DataListCell key="name">
|
||||
<Label
|
||||
id={`check-action-item-${itemId}`}
|
||||
htmlFor={`selected-${itemId}`}
|
||||
className="check-action-item"
|
||||
isDisabled={isDisabled}
|
||||
>
|
||||
<b>{label}</b>
|
||||
</Label>
|
||||
</DataListCell>,
|
||||
]}
|
||||
/>
|
||||
</DataListItemRow>
|
||||
</DataListItem>
|
||||
<Td
|
||||
id={`check-action-item-${itemId}`}
|
||||
select={{
|
||||
rowIndex,
|
||||
isSelected,
|
||||
onSelect: isSelected ? onDeselect : onSelect,
|
||||
variant: isRadio ? 'radio' : 'checkbox',
|
||||
}}
|
||||
name={name}
|
||||
dataLabel={t`Selected`}
|
||||
/>
|
||||
|
||||
{columns?.length > 0 ? (
|
||||
columns.map(col => (
|
||||
<Td aria-label={col.name} dataLabel={col.key}>
|
||||
{item[col.key]}
|
||||
</Td>
|
||||
))
|
||||
) : (
|
||||
<Td aria-labelledby={itemId} dataLabel={label}>
|
||||
<b>{label}</b>
|
||||
</Td>
|
||||
)}
|
||||
</Tr>
|
||||
);
|
||||
};
|
||||
|
||||
|
||||
@@ -1,16 +1,16 @@
|
||||
import React from 'react';
|
||||
import { withI18n } from '@lingui/react';
|
||||
|
||||
import { t } from '@lingui/macro';
|
||||
import { ChipGroup as PFChipGroup } from '@patternfly/react-core';
|
||||
import { number, shape } from 'prop-types';
|
||||
import { number } from 'prop-types';
|
||||
|
||||
function ChipGroup({ i18n, numChips, totalChips, i18nHash, ...props }) {
|
||||
function ChipGroup({ numChips, totalChips, ...props }) {
|
||||
return (
|
||||
<PFChipGroup
|
||||
{...props}
|
||||
numChips={numChips}
|
||||
expandedText={i18n._(t`Show less`)}
|
||||
collapsedText={i18n._(t`${totalChips - numChips} more`)}
|
||||
expandedText={t`Show less`}
|
||||
collapsedText={t`${totalChips - numChips} more`}
|
||||
/>
|
||||
);
|
||||
}
|
||||
@@ -18,7 +18,6 @@ function ChipGroup({ i18n, numChips, totalChips, i18nHash, ...props }) {
|
||||
ChipGroup.propTypes = {
|
||||
numChips: number.isRequired,
|
||||
totalChips: number.isRequired,
|
||||
i18n: shape({}).isRequired,
|
||||
};
|
||||
|
||||
export default withI18n()(ChipGroup);
|
||||
export default ChipGroup;
|
||||
|
||||
@@ -8,7 +8,7 @@ import 'ace-builds/src-noconflict/mode-javascript';
|
||||
import 'ace-builds/src-noconflict/mode-yaml';
|
||||
import 'ace-builds/src-noconflict/mode-django';
|
||||
import 'ace-builds/src-noconflict/theme-github';
|
||||
import { withI18n } from '@lingui/react';
|
||||
|
||||
import { t } from '@lingui/macro';
|
||||
import styled from 'styled-components';
|
||||
import debounce from '../../util/debounce';
|
||||
@@ -81,11 +81,10 @@ function CodeEditor({
|
||||
rows,
|
||||
fullHeight,
|
||||
className,
|
||||
i18n,
|
||||
}) {
|
||||
if (rows && typeof rows !== 'number' && rows !== 'auto') {
|
||||
// eslint-disable-next-line no-console
|
||||
console.warning(
|
||||
console.warn(
|
||||
`CodeEditor: Unexpected value for 'rows': ${rows}; expected number or 'auto'`
|
||||
);
|
||||
}
|
||||
@@ -185,7 +184,7 @@ function CodeEditor({
|
||||
className="pf-c-form__helper-text keyboard-help-text"
|
||||
aria-live="polite"
|
||||
>
|
||||
{i18n._(t`Press Enter to edit. Press ESC to stop editing.`)}
|
||||
{t`Press Enter to edit. Press ESC to stop editing.`}
|
||||
</div>
|
||||
)}
|
||||
</>
|
||||
@@ -210,4 +209,4 @@ CodeEditor.defaultProps = {
|
||||
className: '',
|
||||
};
|
||||
|
||||
export default withI18n()(CodeEditor);
|
||||
export default CodeEditor;
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import 'styled-components/macro';
|
||||
import React, { useState } from 'react';
|
||||
import { node, number, oneOfType, shape, string, arrayOf } from 'prop-types';
|
||||
import { withI18n } from '@lingui/react';
|
||||
|
||||
import { t } from '@lingui/macro';
|
||||
import {
|
||||
Split,
|
||||
@@ -23,15 +23,7 @@ import {
|
||||
import CodeEditor from './CodeEditor';
|
||||
import { JSON_MODE, YAML_MODE } from './constants';
|
||||
|
||||
function VariablesDetail({
|
||||
dataCy,
|
||||
helpText,
|
||||
value,
|
||||
label,
|
||||
rows,
|
||||
fullHeight,
|
||||
i18n,
|
||||
}) {
|
||||
function VariablesDetail({ dataCy, helpText, value, label, rows, fullHeight }) {
|
||||
const [mode, setMode] = useState(
|
||||
isJsonObject(value) || isJsonString(value) ? JSON_MODE : YAML_MODE
|
||||
);
|
||||
@@ -84,7 +76,6 @@ function VariablesDetail({
|
||||
setMode={setMode}
|
||||
currentValue={currentValue}
|
||||
onExpand={() => setIsExpanded(true)}
|
||||
i18n={i18n}
|
||||
/>
|
||||
</DetailName>
|
||||
<DetailValue
|
||||
@@ -107,7 +98,7 @@ function VariablesDetail({
|
||||
css="color: var(--pf-global--danger-color--100);
|
||||
font-size: var(--pf-global--FontSize--sm"
|
||||
>
|
||||
{i18n._(t`Error:`)} {error.message}
|
||||
{t`Error:`} {error.message}
|
||||
</div>
|
||||
)}
|
||||
</DetailValue>
|
||||
@@ -118,13 +109,13 @@ function VariablesDetail({
|
||||
onClose={() => setIsExpanded(false)}
|
||||
actions={[
|
||||
<Button
|
||||
aria-label={i18n._(t`Done`)}
|
||||
aria-label={t`Done`}
|
||||
key="select"
|
||||
variant="primary"
|
||||
onClick={() => setIsExpanded(false)}
|
||||
ouiaId={`${dataCy}-unexpand`}
|
||||
>
|
||||
{i18n._(t`Done`)}
|
||||
{t`Done`}
|
||||
</Button>,
|
||||
]}
|
||||
>
|
||||
@@ -137,7 +128,6 @@ function VariablesDetail({
|
||||
mode={mode}
|
||||
setMode={setMode}
|
||||
currentValue={currentValue}
|
||||
i18n={i18n}
|
||||
/>
|
||||
<CodeEditor
|
||||
id={`${dataCy}-preview-expanded`}
|
||||
@@ -166,16 +156,7 @@ VariablesDetail.defaultProps = {
|
||||
helpText: '',
|
||||
};
|
||||
|
||||
function ModeToggle({
|
||||
id,
|
||||
label,
|
||||
helpText,
|
||||
dataCy,
|
||||
mode,
|
||||
setMode,
|
||||
onExpand,
|
||||
i18n,
|
||||
}) {
|
||||
function ModeToggle({ id, label, helpText, dataCy, mode, setMode, onExpand }) {
|
||||
return (
|
||||
<Split hasGutter>
|
||||
<SplitItem isFilled>
|
||||
@@ -211,7 +192,7 @@ function ModeToggle({
|
||||
<SplitItem>
|
||||
<Button
|
||||
variant="plain"
|
||||
aria-label={i18n._(t`Expand input`)}
|
||||
aria-label={t`Expand input`}
|
||||
onClick={onExpand}
|
||||
ouiaId={`${dataCy}-expand`}
|
||||
>
|
||||
@@ -223,4 +204,4 @@ function ModeToggle({
|
||||
);
|
||||
}
|
||||
|
||||
export default withI18n()(VariablesDetail);
|
||||
export default VariablesDetail;
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import React, { useState, useEffect, useCallback } from 'react';
|
||||
import { string, bool } from 'prop-types';
|
||||
import { withI18n } from '@lingui/react';
|
||||
import { string, bool, func, oneOf } from 'prop-types';
|
||||
|
||||
import { t } from '@lingui/macro';
|
||||
import { useField } from 'formik';
|
||||
import styled from 'styled-components';
|
||||
@@ -25,18 +25,19 @@ const StyledCheckboxField = styled(CheckboxField)`
|
||||
`;
|
||||
|
||||
function VariablesField({
|
||||
i18n,
|
||||
id,
|
||||
name,
|
||||
label,
|
||||
readOnly,
|
||||
promptId,
|
||||
tooltip,
|
||||
initialMode,
|
||||
onModeChange,
|
||||
}) {
|
||||
// track focus manually, because the Code Editor library doesn't wire
|
||||
// into Formik completely
|
||||
const [shouldValidate, setShouldValidate] = useState(false);
|
||||
const [mode, setMode] = useState(YAML_MODE);
|
||||
const [mode, setMode] = useState(initialMode || YAML_MODE);
|
||||
const validate = useCallback(
|
||||
value => {
|
||||
if (!shouldValidate) {
|
||||
@@ -62,6 +63,7 @@ function VariablesField({
|
||||
// mode's useState above couldn't be initialized to JSON_MODE because
|
||||
// the field value had to be defined below it
|
||||
setMode(JSON_MODE);
|
||||
onModeChange(JSON_MODE);
|
||||
helpers.setValue(JSON.stringify(JSON.parse(field.value), null, 2));
|
||||
}
|
||||
}, []); // eslint-disable-line react-hooks/exhaustive-deps
|
||||
@@ -84,6 +86,7 @@ function VariablesField({
|
||||
if (newMode === YAML_MODE && !isJsonEdited && lastYamlValue !== null) {
|
||||
helpers.setValue(lastYamlValue, false);
|
||||
setMode(newMode);
|
||||
onModeChange(newMode);
|
||||
return;
|
||||
}
|
||||
|
||||
@@ -94,6 +97,7 @@ function VariablesField({
|
||||
: yamlToJson(field.value);
|
||||
helpers.setValue(newVal, false);
|
||||
setMode(newMode);
|
||||
onModeChange(newMode);
|
||||
} catch (err) {
|
||||
helpers.setError(err.message);
|
||||
}
|
||||
@@ -112,7 +116,6 @@ function VariablesField({
|
||||
return (
|
||||
<div>
|
||||
<VariablesFieldInternals
|
||||
i18n={i18n}
|
||||
id={id}
|
||||
name={name}
|
||||
label={label}
|
||||
@@ -132,19 +135,18 @@ function VariablesField({
|
||||
onClose={() => setIsExpanded(false)}
|
||||
actions={[
|
||||
<Button
|
||||
aria-label={i18n._(t`Done`)}
|
||||
aria-label={t`Done`}
|
||||
key="select"
|
||||
variant="primary"
|
||||
onClick={() => setIsExpanded(false)}
|
||||
ouiaId={`${id}-variables-unexpand`}
|
||||
>
|
||||
{i18n._(t`Done`)}
|
||||
{t`Done`}
|
||||
</Button>,
|
||||
]}
|
||||
>
|
||||
<div className="pf-c-form">
|
||||
<VariablesFieldInternals
|
||||
i18n={i18n}
|
||||
id={`${id}-expanded`}
|
||||
name={name}
|
||||
label={label}
|
||||
@@ -173,14 +175,17 @@ VariablesField.propTypes = {
|
||||
label: string.isRequired,
|
||||
readOnly: bool,
|
||||
promptId: string,
|
||||
initialMode: oneOf([YAML_MODE, JSON_MODE]),
|
||||
onModeChange: func,
|
||||
};
|
||||
VariablesField.defaultProps = {
|
||||
readOnly: false,
|
||||
promptId: null,
|
||||
initialMode: YAML_MODE,
|
||||
onModeChange: () => {},
|
||||
};
|
||||
|
||||
function VariablesFieldInternals({
|
||||
i18n,
|
||||
id,
|
||||
name,
|
||||
label,
|
||||
@@ -200,7 +205,11 @@ function VariablesFieldInternals({
|
||||
if (mode === YAML_MODE) {
|
||||
return;
|
||||
}
|
||||
helpers.setValue(JSON.stringify(JSON.parse(field.value), null, 2));
|
||||
try {
|
||||
helpers.setValue(JSON.stringify(JSON.parse(field.value), null, 2));
|
||||
} catch (e) {
|
||||
helpers.setError(e.message);
|
||||
}
|
||||
}, []); // eslint-disable-line react-hooks/exhaustive-deps
|
||||
|
||||
return (
|
||||
@@ -227,14 +236,14 @@ function VariablesFieldInternals({
|
||||
{promptId && (
|
||||
<StyledCheckboxField
|
||||
id="template-ask-variables-on-launch"
|
||||
label={i18n._(t`Prompt on launch`)}
|
||||
label={t`Prompt on launch`}
|
||||
name="ask_variables_on_launch"
|
||||
/>
|
||||
)}
|
||||
{onExpand && (
|
||||
<Button
|
||||
variant="plain"
|
||||
aria-label={i18n._(t`Expand input`)}
|
||||
aria-label={t`Expand input`}
|
||||
onClick={onExpand}
|
||||
ouiaId={`${id}-variables-expand`}
|
||||
>
|
||||
@@ -257,4 +266,4 @@ function VariablesFieldInternals({
|
||||
);
|
||||
}
|
||||
|
||||
export default withI18n()(VariablesField);
|
||||
export default VariablesField;
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import React from 'react';
|
||||
import { t } from '@lingui/macro';
|
||||
import { withI18n } from '@lingui/react';
|
||||
|
||||
import {
|
||||
Title,
|
||||
EmptyState,
|
||||
@@ -9,15 +9,15 @@ import {
|
||||
} from '@patternfly/react-core';
|
||||
import { CubesIcon } from '@patternfly/react-icons';
|
||||
|
||||
const ContentEmpty = ({ i18n, title = '', message = '' }) => (
|
||||
const ContentEmpty = ({ title = '', message = '' }) => (
|
||||
<EmptyState variant="full">
|
||||
<EmptyStateIcon icon={CubesIcon} />
|
||||
<Title size="lg" headingLevel="h3">
|
||||
{title || i18n._(t`No items found.`)}
|
||||
{title || t`No items found.`}
|
||||
</Title>
|
||||
<EmptyStateBody>{message}</EmptyStateBody>
|
||||
</EmptyState>
|
||||
);
|
||||
|
||||
export { ContentEmpty as _ContentEmpty };
|
||||
export default withI18n()(ContentEmpty);
|
||||
export default ContentEmpty;
|
||||
|
||||
@@ -2,7 +2,7 @@ import React, { Fragment } from 'react';
|
||||
import { Link, Redirect } from 'react-router-dom';
|
||||
import { bool, instanceOf } from 'prop-types';
|
||||
import { t } from '@lingui/macro';
|
||||
import { withI18n } from '@lingui/react';
|
||||
|
||||
import {
|
||||
Title,
|
||||
EmptyState,
|
||||
@@ -15,10 +15,10 @@ import ErrorDetail from '../ErrorDetail';
|
||||
|
||||
async function logout() {
|
||||
await RootAPI.logout();
|
||||
window.location.replace('/#/login');
|
||||
window.location.replace('#/login');
|
||||
}
|
||||
|
||||
function ContentError({ error, children, isNotFound, i18n }) {
|
||||
function ContentError({ error, children, isNotFound }) {
|
||||
if (error && error.response && error.response.status === 401) {
|
||||
if (!error.response.headers['session-timeout']) {
|
||||
logout();
|
||||
@@ -36,17 +36,13 @@ function ContentError({ error, children, isNotFound, i18n }) {
|
||||
<EmptyState variant="full">
|
||||
<EmptyStateIcon icon={ExclamationTriangleIcon} />
|
||||
<Title size="lg" headingLevel="h3">
|
||||
{is404 ? i18n._(t`Not Found`) : i18n._(t`Something went wrong...`)}
|
||||
{is404 ? t`Not Found` : t`Something went wrong...`}
|
||||
</Title>
|
||||
<EmptyStateBody>
|
||||
{is404
|
||||
? i18n._(t`The page you requested could not be found.`)
|
||||
: i18n._(
|
||||
t`There was an error loading this content. Please reload the page.`
|
||||
)}{' '}
|
||||
{children || (
|
||||
<Link to="/home">{i18n._(t`Back to Dashboard.`)}</Link>
|
||||
)}
|
||||
? t`The page you requested could not be found.`
|
||||
: t`There was an error loading this content. Please reload the page.`}{' '}
|
||||
{children || <Link to="/home">{t`Back to Dashboard.`}</Link>}
|
||||
</EmptyStateBody>
|
||||
{error && <ErrorDetail error={error} />}
|
||||
</EmptyState>
|
||||
@@ -64,4 +60,4 @@ ContentError.defaultProps = {
|
||||
};
|
||||
|
||||
export { ContentError as _ContentError };
|
||||
export default withI18n()(ContentError);
|
||||
export default ContentError;
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import React, { useEffect } from 'react';
|
||||
import { withI18n } from '@lingui/react';
|
||||
|
||||
import { t } from '@lingui/macro';
|
||||
import PropTypes from 'prop-types';
|
||||
|
||||
@@ -16,7 +16,7 @@ function CopyButton({
|
||||
onCopyStart,
|
||||
onCopyFinish,
|
||||
errorMessage,
|
||||
i18n,
|
||||
|
||||
ouiaId,
|
||||
}) {
|
||||
const { isLoading, error: copyError, request: copyItemToAPI } = useRequest(
|
||||
@@ -38,17 +38,17 @@ function CopyButton({
|
||||
id={id}
|
||||
ouiaId={ouiaId}
|
||||
isDisabled={isLoading || isDisabled}
|
||||
aria-label={i18n._(t`Copy`)}
|
||||
aria-label={t`Copy`}
|
||||
variant="plain"
|
||||
onClick={copyItemToAPI}
|
||||
>
|
||||
<CopyIcon />
|
||||
</Button>
|
||||
<AlertModal
|
||||
aria-label={i18n._(t`Copy Error`)}
|
||||
aria-label={t`Copy Error`}
|
||||
isOpen={error}
|
||||
variant="error"
|
||||
title={i18n._(t`Error!`)}
|
||||
title={t`Error!`}
|
||||
onClose={dismissError}
|
||||
>
|
||||
{errorMessage}
|
||||
@@ -72,4 +72,4 @@ CopyButton.defaultProps = {
|
||||
ouiaId: null,
|
||||
};
|
||||
|
||||
export default withI18n()(CopyButton);
|
||||
export default CopyButton;
|
||||
|
||||
@@ -1,15 +1,14 @@
|
||||
import React from 'react';
|
||||
import { shape } from 'prop-types';
|
||||
import { withI18n } from '@lingui/react';
|
||||
|
||||
import { t } from '@lingui/macro';
|
||||
import { Chip } from '@patternfly/react-core';
|
||||
import { Credential } from '../../types';
|
||||
import { toTitleCase } from '../../util/strings';
|
||||
|
||||
function CredentialChip({ credential, i18n, i18nHash, ...props }) {
|
||||
function CredentialChip({ credential, ...props }) {
|
||||
let type;
|
||||
if (credential.cloud) {
|
||||
type = i18n._(t`Cloud`);
|
||||
type = t`Cloud`;
|
||||
} else if (credential.kind === 'aws' || credential.kind === 'ssh') {
|
||||
type = credential.kind.toUpperCase();
|
||||
} else {
|
||||
@@ -32,8 +31,7 @@ function CredentialChip({ credential, i18n, i18nHash, ...props }) {
|
||||
}
|
||||
CredentialChip.propTypes = {
|
||||
credential: Credential.isRequired,
|
||||
i18n: shape({}).isRequired,
|
||||
};
|
||||
|
||||
export { CredentialChip as _CredentialChip };
|
||||
export default withI18n()(CredentialChip);
|
||||
export default CredentialChip;
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import React, { useEffect, useState } from 'react';
|
||||
import PropTypes from 'prop-types';
|
||||
import { withI18n } from '@lingui/react';
|
||||
|
||||
import { t } from '@lingui/macro';
|
||||
import {
|
||||
Checkbox,
|
||||
@@ -37,7 +37,7 @@ function DataListToolbar({
|
||||
onExpand,
|
||||
onSelectAll,
|
||||
additionalControls,
|
||||
i18n,
|
||||
|
||||
qsConfig,
|
||||
pagination,
|
||||
}) {
|
||||
@@ -62,7 +62,7 @@ function DataListToolbar({
|
||||
id={`${qsConfig.namespace}-list-toolbar`}
|
||||
clearAllFilters={clearAllFilters}
|
||||
collapseListedFiltersBreakpoint="lg"
|
||||
clearFiltersButtonText={i18n._(t`Clear all filters`)}
|
||||
clearFiltersButtonText={t`Clear all filters`}
|
||||
>
|
||||
<ToolbarContent>
|
||||
{showSelectAll && (
|
||||
@@ -71,7 +71,7 @@ function DataListToolbar({
|
||||
<Checkbox
|
||||
isChecked={isAllSelected}
|
||||
onChange={onSelectAll}
|
||||
aria-label={i18n._(t`Select all`)}
|
||||
aria-label={t`Select all`}
|
||||
id="select-all"
|
||||
/>
|
||||
</ToolbarItem>
|
||||
@@ -83,7 +83,7 @@ function DataListToolbar({
|
||||
qsConfig={qsConfig}
|
||||
columns={[
|
||||
...searchColumns,
|
||||
{ name: i18n._(t`Advanced`), key: 'advanced' },
|
||||
{ name: t`Advanced`, key: 'advanced' },
|
||||
]}
|
||||
searchableKeys={searchableKeys}
|
||||
relatedSearchableKeys={relatedSearchableKeys}
|
||||
@@ -123,6 +123,7 @@ function DataListToolbar({
|
||||
<Dropdown
|
||||
toggle={
|
||||
<KebabToggle
|
||||
data-cy="actions-kebab-toogle"
|
||||
onToggle={isOpen => {
|
||||
if (!isKebabModalOpen) {
|
||||
setIsKebabOpen(isOpen);
|
||||
@@ -190,4 +191,4 @@ DataListToolbar.defaultProps = {
|
||||
additionalControls: [],
|
||||
};
|
||||
|
||||
export default withI18n()(DataListToolbar);
|
||||
export default DataListToolbar;
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import React, { useState } from 'react';
|
||||
import PropTypes from 'prop-types';
|
||||
import { withI18n } from '@lingui/react';
|
||||
|
||||
import { t } from '@lingui/macro';
|
||||
import styled from 'styled-components';
|
||||
import { Button, Badge, Alert, Tooltip } from '@patternfly/react-core';
|
||||
@@ -20,7 +20,7 @@ function DeleteButton({
|
||||
onConfirm,
|
||||
modalTitle,
|
||||
name,
|
||||
i18n,
|
||||
|
||||
variant,
|
||||
children,
|
||||
isDisabled,
|
||||
@@ -54,7 +54,7 @@ function DeleteButton({
|
||||
return (
|
||||
<AlertModal
|
||||
isOpen={deleteMessageError}
|
||||
title={i18n._(t`Error!`)}
|
||||
title={t`Error!`}
|
||||
onClose={() => {
|
||||
toggleModal(false);
|
||||
setDeleteMessageError();
|
||||
@@ -73,12 +73,12 @@ function DeleteButton({
|
||||
isLoading={isLoading}
|
||||
spinnerAriaValueText={isLoading ? 'Loading' : undefined}
|
||||
variant={variant || 'secondary'}
|
||||
aria-label={i18n._(t`Delete`)}
|
||||
aria-label={t`Delete`}
|
||||
isDisabled={isDisabled}
|
||||
onClick={() => toggleModal(true)}
|
||||
ouiaId={ouiaId}
|
||||
>
|
||||
{children || i18n._(t`Delete`)}
|
||||
{children || t`Delete`}
|
||||
</Button>
|
||||
</div>
|
||||
</Tooltip>
|
||||
@@ -88,11 +88,11 @@ function DeleteButton({
|
||||
isLoading={isLoading}
|
||||
spinnerAriaValueText={isLoading ? 'Loading' : undefined}
|
||||
variant={variant || 'secondary'}
|
||||
aria-label={i18n._(t`Delete`)}
|
||||
aria-label={t`Delete`}
|
||||
isDisabled={isDisabled}
|
||||
onClick={() => toggleModal(true)}
|
||||
>
|
||||
{children || i18n._(t`Delete`)}
|
||||
{children || t`Delete`}
|
||||
</Button>
|
||||
)}
|
||||
<AlertModal
|
||||
@@ -105,27 +105,27 @@ function DeleteButton({
|
||||
ouiaId="delete-modal-confirm"
|
||||
key="delete"
|
||||
variant="danger"
|
||||
aria-label={i18n._(t`Confirm Delete`)}
|
||||
aria-label={t`Confirm Delete`}
|
||||
isDisabled={isDisabled}
|
||||
onClick={() => {
|
||||
onConfirm();
|
||||
toggleModal(false);
|
||||
}}
|
||||
>
|
||||
{i18n._(t`Delete`)}
|
||||
{t`Delete`}
|
||||
</Button>,
|
||||
<Button
|
||||
ouiaId="delete-modal-cancel"
|
||||
key="cancel"
|
||||
variant="link"
|
||||
aria-label={i18n._(t`Cancel`)}
|
||||
aria-label={t`Cancel`}
|
||||
onClick={() => toggleModal(false)}
|
||||
>
|
||||
{i18n._(t`Cancel`)}
|
||||
{t`Cancel`}
|
||||
</Button>,
|
||||
]}
|
||||
>
|
||||
{i18n._(t`Are you sure you want to delete:`)}
|
||||
{t`Are you sure you want to delete:`}
|
||||
<br />
|
||||
<strong>{name}</strong>
|
||||
{Object.values(deleteDetails).length > 0 && (
|
||||
@@ -158,4 +158,4 @@ DeleteButton.defaultProps = {
|
||||
ouiaId: null,
|
||||
};
|
||||
|
||||
export default withI18n()(DeleteButton);
|
||||
export default DeleteButton;
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import React from 'react';
|
||||
import { withI18n } from '@lingui/react';
|
||||
|
||||
import { t } from '@lingui/macro';
|
||||
import { node } from 'prop-types';
|
||||
import styled from 'styled-components';
|
||||
@@ -11,12 +11,12 @@ const Detail = styled(_Detail)`
|
||||
}
|
||||
`;
|
||||
|
||||
function DeletedDetail({ i18n, label }) {
|
||||
return <Detail label={label} value={i18n._(t`Deleted`)} />;
|
||||
function DeletedDetail({ label }) {
|
||||
return <Detail label={label} value={t`Deleted`} />;
|
||||
}
|
||||
|
||||
DeletedDetail.propTypes = {
|
||||
label: node.isRequired,
|
||||
};
|
||||
|
||||
export default withI18n()(DeletedDetail);
|
||||
export default DeletedDetail;
|
||||
|
||||
@@ -32,13 +32,13 @@ const getLaunchedByDetails = ({ summary_fields = {}, related = {} }) => {
|
||||
return { link, value };
|
||||
};
|
||||
|
||||
export default function LaunchedByDetail({ job, i18n }) {
|
||||
export default function LaunchedByDetail({ job }) {
|
||||
const { value: launchedByValue, link: launchedByLink } =
|
||||
getLaunchedByDetails(job) || {};
|
||||
|
||||
return (
|
||||
<Detail
|
||||
label={i18n._(t`Launched By`)}
|
||||
label={t`Launched By`}
|
||||
value={
|
||||
launchedByLink ? (
|
||||
<Link to={`${launchedByLink}`}>{launchedByValue}</Link>
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import React, { useState, useEffect, useContext } from 'react';
|
||||
import { arrayOf, func, shape, string, oneOfType, number } from 'prop-types';
|
||||
import { withI18n } from '@lingui/react';
|
||||
|
||||
import { t } from '@lingui/macro';
|
||||
import { Button, Tooltip, DropdownItem } from '@patternfly/react-core';
|
||||
import styled from 'styled-components';
|
||||
@@ -13,10 +13,9 @@ const ModalNote = styled.div`
|
||||
`;
|
||||
|
||||
function DisassociateButton({
|
||||
i18n,
|
||||
itemsToDisassociate = [],
|
||||
modalNote = '',
|
||||
modalTitle = i18n._(t`Disassociate?`),
|
||||
modalTitle = t`Disassociate?`,
|
||||
onDisassociate,
|
||||
verifyCannotDisassociate = true,
|
||||
}) {
|
||||
@@ -48,18 +47,16 @@ function DisassociateButton({
|
||||
if (itemsToDisassociate.some(cannotDisassociate)) {
|
||||
return (
|
||||
<div>
|
||||
{i18n._(
|
||||
t`You do not have permission to disassociate the following: ${itemsUnableToDisassociate}`
|
||||
)}
|
||||
{t`You do not have permission to disassociate the following: ${itemsUnableToDisassociate}`}
|
||||
</div>
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
if (itemsToDisassociate.length) {
|
||||
return i18n._(t`Disassociate`);
|
||||
return t`Disassociate`;
|
||||
}
|
||||
return i18n._(t`Select a row to disassociate`);
|
||||
return t`Select a row to disassociate`;
|
||||
}
|
||||
|
||||
let isDisabled = false;
|
||||
@@ -79,12 +76,12 @@ function DisassociateButton({
|
||||
{isKebabified ? (
|
||||
<DropdownItem
|
||||
key="add"
|
||||
aria-label={i18n._(t`disassociate`)}
|
||||
aria-label={t`disassociate`}
|
||||
isDisabled={isDisabled}
|
||||
component="button"
|
||||
onClick={() => setIsOpen(true)}
|
||||
>
|
||||
{i18n._(t`Disassociate`)}
|
||||
{t`Disassociate`}
|
||||
</DropdownItem>
|
||||
) : (
|
||||
<Tooltip content={renderTooltip()} position="top">
|
||||
@@ -92,11 +89,11 @@ function DisassociateButton({
|
||||
<Button
|
||||
ouiaId="disassociate-button"
|
||||
variant="secondary"
|
||||
aria-label={i18n._(t`Disassociate`)}
|
||||
aria-label={t`Disassociate`}
|
||||
onClick={() => setIsOpen(true)}
|
||||
isDisabled={isDisabled}
|
||||
>
|
||||
{i18n._(t`Disassociate`)}
|
||||
{t`Disassociate`}
|
||||
</Button>
|
||||
</div>
|
||||
</Tooltip>
|
||||
@@ -113,25 +110,25 @@ function DisassociateButton({
|
||||
ouiaId="disassociate-modal-confirm"
|
||||
key="disassociate"
|
||||
variant="danger"
|
||||
aria-label={i18n._(t`confirm disassociate`)}
|
||||
aria-label={t`confirm disassociate`}
|
||||
onClick={handleDisassociate}
|
||||
>
|
||||
{i18n._(t`Disassociate`)}
|
||||
{t`Disassociate`}
|
||||
</Button>,
|
||||
<Button
|
||||
ouiaId="disassociate-modal-cancel"
|
||||
key="cancel"
|
||||
variant="link"
|
||||
aria-label={i18n._(t`Cancel`)}
|
||||
aria-label={t`Cancel`}
|
||||
onClick={() => setIsOpen(false)}
|
||||
>
|
||||
{i18n._(t`Cancel`)}
|
||||
{t`Cancel`}
|
||||
</Button>,
|
||||
]}
|
||||
>
|
||||
{modalNote && <ModalNote>{modalNote}</ModalNote>}
|
||||
|
||||
<div>{i18n._(t`This action will disassociate the following:`)}</div>
|
||||
<div>{t`This action will disassociate the following:`}</div>
|
||||
|
||||
{itemsToDisassociate.map(item => (
|
||||
<span key={item.id}>
|
||||
@@ -171,4 +168,4 @@ DisassociateButton.propTypes = {
|
||||
onDisassociate: func.isRequired,
|
||||
};
|
||||
|
||||
export default withI18n()(DisassociateButton);
|
||||
export default DisassociateButton;
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import React, { useState, Fragment } from 'react';
|
||||
import PropTypes from 'prop-types';
|
||||
import styled from 'styled-components';
|
||||
import { withI18n } from '@lingui/react';
|
||||
|
||||
import { t } from '@lingui/macro';
|
||||
|
||||
import {
|
||||
@@ -32,7 +32,7 @@ const Expandable = styled(PFExpandable)`
|
||||
}
|
||||
`;
|
||||
|
||||
function ErrorDetail({ error, i18n }) {
|
||||
function ErrorDetail({ error }) {
|
||||
const { response } = error;
|
||||
const [isExpanded, setIsExpanded] = useState(false);
|
||||
|
||||
@@ -70,7 +70,7 @@ function ErrorDetail({ error, i18n }) {
|
||||
|
||||
return (
|
||||
<Expandable
|
||||
toggleText={i18n._(t`Details`)}
|
||||
toggleText={t`Details`}
|
||||
onToggle={handleToggle}
|
||||
isExpanded={isExpanded}
|
||||
>
|
||||
@@ -87,4 +87,4 @@ ErrorDetail.propTypes = {
|
||||
error: PropTypes.instanceOf(Error).isRequired,
|
||||
};
|
||||
|
||||
export default withI18n()(ErrorDetail);
|
||||
export default ErrorDetail;
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import React from 'react';
|
||||
import { bool, string } from 'prop-types';
|
||||
import { Link } from 'react-router-dom';
|
||||
import { withI18n } from '@lingui/react';
|
||||
|
||||
import { t } from '@lingui/macro';
|
||||
import { Tooltip } from '@patternfly/react-core';
|
||||
import styled from 'styled-components';
|
||||
@@ -20,11 +20,10 @@ function ExecutionEnvironmentDetail({
|
||||
virtualEnvironment,
|
||||
executionEnvironment,
|
||||
isDefaultEnvironment,
|
||||
i18n,
|
||||
}) {
|
||||
const label = isDefaultEnvironment
|
||||
? i18n._(t`Default Execution Environment`)
|
||||
: i18n._(t`Execution Environment`);
|
||||
? t`Default Execution Environment`
|
||||
: t`Execution Environment`;
|
||||
|
||||
if (executionEnvironment) {
|
||||
return (
|
||||
@@ -47,12 +46,10 @@ function ExecutionEnvironmentDetail({
|
||||
label={label}
|
||||
value={
|
||||
<>
|
||||
{i18n._(t`Missing resource`)}
|
||||
{t`Missing resource`}
|
||||
<span>
|
||||
<Tooltip
|
||||
content={i18n._(
|
||||
t`Custom virtual environment ${virtualEnvironment} must be replaced by an execution environment.`
|
||||
)}
|
||||
content={t`Custom virtual environment ${virtualEnvironment} must be replaced by an execution environment.`}
|
||||
position="right"
|
||||
>
|
||||
<ExclamationTriangleIcon />
|
||||
@@ -79,4 +76,4 @@ ExecutionEnvironmentDetail.defaultProps = {
|
||||
virtualEnvironment: '',
|
||||
};
|
||||
|
||||
export default withI18n()(ExecutionEnvironmentDetail);
|
||||
export default ExecutionEnvironmentDetail;
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import React, { Fragment } from 'react';
|
||||
import PropTypes from 'prop-types';
|
||||
import { withI18n } from '@lingui/react';
|
||||
|
||||
import { t } from '@lingui/macro';
|
||||
import {
|
||||
Button as PFButton,
|
||||
@@ -31,14 +31,14 @@ const ToolbarItem = styled(PFToolbarItem)`
|
||||
|
||||
// TODO: Recommend renaming this component to avoid confusion
|
||||
// with ExpandingContainer
|
||||
function ExpandCollapse({ isCompact, onCompact, onExpand, i18n }) {
|
||||
function ExpandCollapse({ isCompact, onCompact, onExpand }) {
|
||||
return (
|
||||
<Fragment>
|
||||
<ToolbarItem>
|
||||
<Button
|
||||
ouiaId="toolbar-collapse-button"
|
||||
variant="plain"
|
||||
aria-label={i18n._(t`Collapse`)}
|
||||
aria-label={t`Collapse`}
|
||||
onClick={onCompact}
|
||||
isActive={isCompact}
|
||||
>
|
||||
@@ -49,7 +49,7 @@ function ExpandCollapse({ isCompact, onCompact, onExpand, i18n }) {
|
||||
<Button
|
||||
ouiaId="toolbar-expand-button"
|
||||
variant="plain"
|
||||
aria-label={i18n._(t`Expand`)}
|
||||
aria-label={t`Expand`}
|
||||
onClick={onExpand}
|
||||
isActive={!isCompact}
|
||||
>
|
||||
@@ -70,4 +70,4 @@ ExpandCollapse.defaultProps = {
|
||||
isCompact: true,
|
||||
};
|
||||
|
||||
export default withI18n()(ExpandCollapse);
|
||||
export default ExpandCollapse;
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import React from 'react';
|
||||
import { bool, node, string } from 'prop-types';
|
||||
import { withI18n } from '@lingui/react';
|
||||
|
||||
import { t } from '@lingui/macro';
|
||||
import styled from 'styled-components';
|
||||
import { CheckboxField } from '../FormField';
|
||||
@@ -19,7 +19,6 @@ const StyledCheckboxField = styled(CheckboxField)`
|
||||
function FieldWithPrompt({
|
||||
children,
|
||||
fieldId,
|
||||
i18n,
|
||||
isRequired,
|
||||
label,
|
||||
promptId,
|
||||
@@ -44,7 +43,7 @@ function FieldWithPrompt({
|
||||
<StyledCheckboxField
|
||||
isDisabled={isDisabled}
|
||||
id={promptId}
|
||||
label={i18n._(t`Prompt on launch`)}
|
||||
label={t`Prompt on launch`}
|
||||
name={promptName}
|
||||
/>
|
||||
</FieldHeader>
|
||||
@@ -67,4 +66,4 @@ FieldWithPrompt.defaultProps = {
|
||||
tooltip: null,
|
||||
};
|
||||
|
||||
export default withI18n()(FieldWithPrompt);
|
||||
export default FieldWithPrompt;
|
||||
|
||||
@@ -1,32 +1,32 @@
|
||||
import React from 'react';
|
||||
import PropTypes from 'prop-types';
|
||||
import { withI18n } from '@lingui/react';
|
||||
|
||||
import { t } from '@lingui/macro';
|
||||
import { ActionGroup, Button } from '@patternfly/react-core';
|
||||
import { FormFullWidthLayout } from '../FormLayout';
|
||||
|
||||
const FormActionGroup = ({ onCancel, onSubmit, submitDisabled, i18n }) => {
|
||||
const FormActionGroup = ({ onCancel, onSubmit, submitDisabled }) => {
|
||||
return (
|
||||
<FormFullWidthLayout>
|
||||
<ActionGroup>
|
||||
<Button
|
||||
ouiaId="form-save-button"
|
||||
aria-label={i18n._(t`Save`)}
|
||||
aria-label={t`Save`}
|
||||
variant="primary"
|
||||
type="button"
|
||||
onClick={onSubmit}
|
||||
isDisabled={submitDisabled}
|
||||
>
|
||||
{i18n._(t`Save`)}
|
||||
{t`Save`}
|
||||
</Button>
|
||||
<Button
|
||||
ouiaId="form-cancel-button"
|
||||
aria-label={i18n._(t`Cancel`)}
|
||||
aria-label={t`Cancel`}
|
||||
variant="link"
|
||||
type="button"
|
||||
onClick={onCancel}
|
||||
>
|
||||
{i18n._(t`Cancel`)}
|
||||
{t`Cancel`}
|
||||
</Button>
|
||||
</ActionGroup>
|
||||
</FormFullWidthLayout>
|
||||
@@ -43,4 +43,4 @@ FormActionGroup.defaultProps = {
|
||||
submitDisabled: false,
|
||||
};
|
||||
|
||||
export default withI18n()(FormActionGroup);
|
||||
export default FormActionGroup;
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import React, { useState } from 'react';
|
||||
import PropTypes from 'prop-types';
|
||||
import { withI18n } from '@lingui/react';
|
||||
|
||||
import { t } from '@lingui/macro';
|
||||
import { useField } from 'formik';
|
||||
import {
|
||||
@@ -12,15 +12,7 @@ import {
|
||||
import { EyeIcon, EyeSlashIcon } from '@patternfly/react-icons';
|
||||
|
||||
function PasswordInput(props) {
|
||||
const {
|
||||
autocomplete,
|
||||
id,
|
||||
name,
|
||||
validate,
|
||||
isRequired,
|
||||
isDisabled,
|
||||
i18n,
|
||||
} = props;
|
||||
const { autocomplete, id, name, validate, isRequired, isDisabled } = props;
|
||||
const [inputType, setInputType] = useState('password');
|
||||
const [field, meta] = useField({ name, validate });
|
||||
|
||||
@@ -32,13 +24,11 @@ function PasswordInput(props) {
|
||||
|
||||
return (
|
||||
<>
|
||||
<Tooltip
|
||||
content={inputType === 'password' ? i18n._(t`Show`) : i18n._(t`Hide`)}
|
||||
>
|
||||
<Tooltip content={inputType === 'password' ? t`Show` : t`Hide`}>
|
||||
<Button
|
||||
ouiaId={`${id}-toggle`}
|
||||
variant={ButtonVariant.control}
|
||||
aria-label={i18n._(t`Toggle Password`)}
|
||||
aria-label={t`Toggle Password`}
|
||||
onClick={handlePasswordToggle}
|
||||
isDisabled={isDisabled}
|
||||
>
|
||||
@@ -80,4 +70,4 @@ PasswordInput.defaultProps = {
|
||||
isDisabled: false,
|
||||
};
|
||||
|
||||
export default withI18n()(PasswordInput);
|
||||
export default PasswordInput;
|
||||
|
||||
@@ -52,6 +52,9 @@ function parseFieldErrors(obj, formValues) {
|
||||
}
|
||||
formErrors = formErrors.concat(parsed.formErrors);
|
||||
}
|
||||
if (typeof formValues[key] === 'boolean') {
|
||||
formErrors = formErrors.concat(value);
|
||||
}
|
||||
});
|
||||
|
||||
return { fieldErrors, formErrors };
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import React, { useState } from 'react';
|
||||
import { bool, func, shape } from 'prop-types';
|
||||
import { Formik, useField } from 'formik';
|
||||
import { withI18n } from '@lingui/react';
|
||||
|
||||
import { t } from '@lingui/macro';
|
||||
|
||||
import { Form, FormGroup } from '@patternfly/react-core';
|
||||
@@ -13,24 +13,22 @@ import { FormColumnLayout, FormFullWidthLayout } from '../FormLayout';
|
||||
import Popover from '../Popover';
|
||||
import { required } from '../../util/validators';
|
||||
|
||||
const InventoryLookupField = withI18n()(({ i18n, host }) => {
|
||||
const InventoryLookupField = ({ host }) => {
|
||||
const [inventory, setInventory] = useState(
|
||||
host ? host.summary_fields.inventory : ''
|
||||
);
|
||||
|
||||
const [, inventoryMeta, inventoryHelpers] = useField({
|
||||
name: 'inventory',
|
||||
validate: required(i18n._(t`Select a value for this field`), i18n),
|
||||
validate: required(t`Select a value for this field`),
|
||||
});
|
||||
|
||||
return (
|
||||
<FormGroup
|
||||
label={i18n._(t`Inventory`)}
|
||||
label={t`Inventory`}
|
||||
labelIcon={
|
||||
<Popover
|
||||
content={i18n._(
|
||||
t`Select the inventory that this host will belong to.`
|
||||
)}
|
||||
content={t`Select the inventory that this host will belong to.`}
|
||||
/>
|
||||
}
|
||||
isRequired
|
||||
@@ -44,7 +42,7 @@ const InventoryLookupField = withI18n()(({ i18n, host }) => {
|
||||
fieldId="inventory-lookup"
|
||||
value={inventory}
|
||||
onBlur={() => inventoryHelpers.setTouched()}
|
||||
tooltip={i18n._(t`Select the inventory that this host will belong to.`)}
|
||||
tooltip={t`Select the inventory that this host will belong to.`}
|
||||
isValid={!inventoryMeta.touched || !inventoryMeta.error}
|
||||
helperTextInvalid={inventoryMeta.error}
|
||||
onChange={value => {
|
||||
@@ -57,14 +55,14 @@ const InventoryLookupField = withI18n()(({ i18n, host }) => {
|
||||
/>
|
||||
</FormGroup>
|
||||
);
|
||||
});
|
||||
};
|
||||
|
||||
const HostForm = ({
|
||||
handleCancel,
|
||||
handleSubmit,
|
||||
host,
|
||||
isInventoryVisible,
|
||||
i18n,
|
||||
|
||||
submitError,
|
||||
}) => {
|
||||
return (
|
||||
@@ -84,22 +82,22 @@ const HostForm = ({
|
||||
id="host-name"
|
||||
name="name"
|
||||
type="text"
|
||||
label={i18n._(t`Name`)}
|
||||
validate={required(null, i18n)}
|
||||
label={t`Name`}
|
||||
validate={required(null)}
|
||||
isRequired
|
||||
/>
|
||||
<FormField
|
||||
id="host-description"
|
||||
name="description"
|
||||
type="text"
|
||||
label={i18n._(t`Description`)}
|
||||
label={t`Description`}
|
||||
/>
|
||||
{isInventoryVisible && <InventoryLookupField host={host} />}
|
||||
<FormFullWidthLayout>
|
||||
<VariablesField
|
||||
id="host-variables"
|
||||
name="variables"
|
||||
label={i18n._(t`Variables`)}
|
||||
label={t`Variables`}
|
||||
/>
|
||||
</FormFullWidthLayout>
|
||||
{submitError && <FormSubmitError error={submitError} />}
|
||||
@@ -137,4 +135,4 @@ HostForm.defaultProps = {
|
||||
};
|
||||
|
||||
export { HostForm as _HostForm };
|
||||
export default withI18n()(HostForm);
|
||||
export default HostForm;
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import 'styled-components/macro';
|
||||
import React, { Fragment, useState, useEffect, useCallback } from 'react';
|
||||
import { withI18n } from '@lingui/react';
|
||||
|
||||
import { t } from '@lingui/macro';
|
||||
import { Switch, Tooltip } from '@patternfly/react-core';
|
||||
import AlertModal from '../AlertModal';
|
||||
@@ -9,16 +9,13 @@ import useRequest from '../../util/useRequest';
|
||||
import { HostsAPI } from '../../api';
|
||||
|
||||
function HostToggle({
|
||||
i18n,
|
||||
className,
|
||||
host,
|
||||
isDisabled = false,
|
||||
onToggle,
|
||||
tooltip = i18n._(
|
||||
t`Indicates if a host is available and should be included in running
|
||||
tooltip = t`Indicates if a host is available and should be included in running
|
||||
jobs. For hosts that are part of an external inventory, this may be
|
||||
reset by the inventory sync process.`
|
||||
),
|
||||
reset by the inventory sync process.`,
|
||||
}) {
|
||||
const [isEnabled, setIsEnabled] = useState(host.enabled);
|
||||
const [showError, setShowError] = useState(false);
|
||||
@@ -55,8 +52,8 @@ function HostToggle({
|
||||
className={className}
|
||||
css="display: inline-flex;"
|
||||
id={`host-${host.id}-toggle`}
|
||||
label={i18n._(t`On`)}
|
||||
labelOff={i18n._(t`Off`)}
|
||||
label={t`On`}
|
||||
labelOff={t`Off`}
|
||||
isChecked={isEnabled}
|
||||
isDisabled={
|
||||
isLoading ||
|
||||
@@ -64,17 +61,17 @@ function HostToggle({
|
||||
!host.summary_fields.user_capabilities.edit
|
||||
}
|
||||
onChange={toggleHost}
|
||||
aria-label={i18n._(t`Toggle host`)}
|
||||
aria-label={t`Toggle host`}
|
||||
/>
|
||||
</Tooltip>
|
||||
{showError && error && !isLoading && (
|
||||
<AlertModal
|
||||
variant="error"
|
||||
title={i18n._(t`Error!`)}
|
||||
title={t`Error!`}
|
||||
isOpen={error && !isLoading}
|
||||
onClose={() => setShowError(false)}
|
||||
>
|
||||
{i18n._(t`Failed to toggle host.`)}
|
||||
{t`Failed to toggle host.`}
|
||||
<ErrorDetail error={error} />
|
||||
</AlertModal>
|
||||
)}
|
||||
@@ -82,4 +79,4 @@ function HostToggle({
|
||||
);
|
||||
}
|
||||
|
||||
export default withI18n()(HostToggle);
|
||||
export default HostToggle;
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import React, { useState, useEffect, useCallback } from 'react';
|
||||
import { withI18n } from '@lingui/react';
|
||||
|
||||
import { t } from '@lingui/macro';
|
||||
import { Switch, Tooltip } from '@patternfly/react-core';
|
||||
import AlertModal from '../AlertModal';
|
||||
@@ -8,13 +8,7 @@ import useRequest from '../../util/useRequest';
|
||||
import { InstancesAPI } from '../../api';
|
||||
import { useConfig } from '../../contexts/Config';
|
||||
|
||||
function InstanceToggle({
|
||||
className,
|
||||
fetchInstances,
|
||||
instance,
|
||||
onToggle,
|
||||
i18n,
|
||||
}) {
|
||||
function InstanceToggle({ className, fetchInstances, instance, onToggle }) {
|
||||
const { me = {} } = useConfig();
|
||||
const [isEnabled, setIsEnabled] = useState(instance.enabled);
|
||||
const [showError, setShowError] = useState(false);
|
||||
@@ -46,31 +40,29 @@ function InstanceToggle({
|
||||
return (
|
||||
<>
|
||||
<Tooltip
|
||||
content={i18n._(
|
||||
t`Set the instance online or offline. If offline, jobs will not be assigned to this instance.`
|
||||
)}
|
||||
content={t`Set the instance online or offline. If offline, jobs will not be assigned to this instance.`}
|
||||
position="top"
|
||||
>
|
||||
<Switch
|
||||
className={className}
|
||||
css="display: inline-flex;"
|
||||
id={`host-${instance.id}-toggle`}
|
||||
label={i18n._(t`On`)}
|
||||
labelOff={i18n._(t`Off`)}
|
||||
label={t`On`}
|
||||
labelOff={t`Off`}
|
||||
isChecked={isEnabled}
|
||||
isDisabled={isLoading || !me?.is_superuser}
|
||||
onChange={toggleInstance}
|
||||
aria-label={i18n._(t`Toggle instance`)}
|
||||
aria-label={t`Toggle instance`}
|
||||
/>
|
||||
</Tooltip>
|
||||
{showError && error && !isLoading && (
|
||||
<AlertModal
|
||||
variant="error"
|
||||
title={i18n._(t`Error!`)}
|
||||
title={t`Error!`}
|
||||
isOpen={error && !isLoading}
|
||||
onClose={() => setShowError(false)}
|
||||
>
|
||||
{i18n._(t`Failed to toggle instance.`)}
|
||||
{t`Failed to toggle instance.`}
|
||||
<ErrorDetail error={error} />
|
||||
</AlertModal>
|
||||
)}
|
||||
@@ -78,4 +70,4 @@ function InstanceToggle({
|
||||
);
|
||||
}
|
||||
|
||||
export default withI18n()(InstanceToggle);
|
||||
export default InstanceToggle;
|
||||
|
||||
102
awx/ui_next/src/components/JobCancelButton/JobCancelButton.jsx
Normal file
102
awx/ui_next/src/components/JobCancelButton/JobCancelButton.jsx
Normal file
@@ -0,0 +1,102 @@
|
||||
import React, { useCallback, useState } from 'react';
|
||||
import { t } from '@lingui/macro';
|
||||
import { MinusCircleIcon } from '@patternfly/react-icons';
|
||||
import { Button, Tooltip } from '@patternfly/react-core';
|
||||
import { getJobModel } from '../../util/jobs';
|
||||
import useRequest, { useDismissableError } from '../../util/useRequest';
|
||||
import AlertModal from '../AlertModal';
|
||||
import ErrorDetail from '../ErrorDetail';
|
||||
|
||||
function JobCancelButton({
|
||||
job = {},
|
||||
errorTitle,
|
||||
title,
|
||||
showIconButton,
|
||||
errorMessage,
|
||||
buttonText,
|
||||
}) {
|
||||
const [isOpen, setIsOpen] = useState(false);
|
||||
const { error: cancelError, request: cancelJob } = useRequest(
|
||||
useCallback(async () => {
|
||||
setIsOpen(false);
|
||||
await getJobModel(job.type).cancel(job.id);
|
||||
}, [job.id, job.type]),
|
||||
{}
|
||||
);
|
||||
const { error, dismissError: dismissCancelError } = useDismissableError(
|
||||
cancelError
|
||||
);
|
||||
|
||||
return (
|
||||
<>
|
||||
<Tooltip content={title}>
|
||||
{showIconButton ? (
|
||||
<Button
|
||||
aria-label={title}
|
||||
ouiaId="cancel-job-button"
|
||||
onClick={() => setIsOpen(true)}
|
||||
variant="plain"
|
||||
>
|
||||
<MinusCircleIcon />
|
||||
</Button>
|
||||
) : (
|
||||
<Button
|
||||
aria-label={title}
|
||||
variant="secondary"
|
||||
ouiaId="cancel-job-button"
|
||||
onClick={() => setIsOpen(true)}
|
||||
>
|
||||
{buttonText || t`Cancel Job`}
|
||||
</Button>
|
||||
)}
|
||||
</Tooltip>
|
||||
{isOpen && (
|
||||
<AlertModal
|
||||
isOpen={isOpen}
|
||||
variant="danger"
|
||||
onClose={() => setIsOpen(false)}
|
||||
title={title}
|
||||
label={title}
|
||||
actions={[
|
||||
<Button
|
||||
id="cancel-job-confirm-button"
|
||||
key="delete"
|
||||
variant="danger"
|
||||
aria-label={t`Confirm cancel job`}
|
||||
ouiaId="cancel-job-confirm-button"
|
||||
onClick={cancelJob}
|
||||
>
|
||||
{t`Confirm cancellation`}
|
||||
</Button>,
|
||||
<Button
|
||||
id="cancel-job-return-button"
|
||||
key="cancel"
|
||||
ouiaId="return"
|
||||
aria-label={t`Return`}
|
||||
variant="secondary"
|
||||
onClick={() => setIsOpen(false)}
|
||||
>
|
||||
{t`Return`}
|
||||
</Button>,
|
||||
]}
|
||||
>
|
||||
{t`Are you sure you want to cancel this job?`}
|
||||
</AlertModal>
|
||||
)}
|
||||
{error && (
|
||||
<AlertModal
|
||||
isOpen={error}
|
||||
variant="danger"
|
||||
onClose={dismissCancelError}
|
||||
title={errorTitle}
|
||||
label={errorTitle}
|
||||
>
|
||||
{errorMessage}
|
||||
<ErrorDetail error={error} />
|
||||
</AlertModal>
|
||||
)}
|
||||
</>
|
||||
);
|
||||
}
|
||||
|
||||
export default JobCancelButton;
|
||||
@@ -0,0 +1,180 @@
|
||||
import React from 'react';
|
||||
import { act } from 'react-dom/test-utils';
|
||||
import {
|
||||
ProjectUpdatesAPI,
|
||||
AdHocCommandsAPI,
|
||||
SystemJobsAPI,
|
||||
WorkflowJobsAPI,
|
||||
JobsAPI,
|
||||
} from '../../api';
|
||||
import { mountWithContexts } from '../../../testUtils/enzymeHelpers';
|
||||
import JobCancelButton from './JobCancelButton';
|
||||
|
||||
jest.mock('../../api');
|
||||
|
||||
describe('<JobCancelButton/>', () => {
|
||||
let wrapper;
|
||||
|
||||
test('should render properly', () => {
|
||||
act(() => {
|
||||
wrapper = mountWithContexts(
|
||||
<JobCancelButton
|
||||
job={{ id: 1, type: 'project_update' }}
|
||||
errorTitle="Error"
|
||||
title="Title"
|
||||
/>
|
||||
);
|
||||
});
|
||||
expect(wrapper.length).toBe(1);
|
||||
expect(wrapper.find('MinusCircleIcon').length).toBe(0);
|
||||
});
|
||||
test('should render icon button', () => {
|
||||
act(() => {
|
||||
wrapper = mountWithContexts(
|
||||
<JobCancelButton
|
||||
job={{ id: 1, type: 'project_update' }}
|
||||
errorTitle="Error"
|
||||
title="Title"
|
||||
showIconButton
|
||||
/>
|
||||
);
|
||||
});
|
||||
expect(wrapper.find('MinusCircleIcon').length).toBe(1);
|
||||
});
|
||||
test('should call api', async () => {
|
||||
act(() => {
|
||||
wrapper = mountWithContexts(
|
||||
<JobCancelButton
|
||||
job={{ id: 1, type: 'project_update' }}
|
||||
errorTitle="Error"
|
||||
title="Title"
|
||||
showIconButton
|
||||
/>
|
||||
);
|
||||
});
|
||||
await act(async () => wrapper.find('Button').prop('onClick')(true));
|
||||
wrapper.update();
|
||||
|
||||
expect(wrapper.find('AlertModal').length).toBe(1);
|
||||
await act(() =>
|
||||
wrapper.find('Button#cancel-job-confirm-button').prop('onClick')()
|
||||
);
|
||||
expect(ProjectUpdatesAPI.cancel).toBeCalledWith(1);
|
||||
});
|
||||
test('should throw error', async () => {
|
||||
ProjectUpdatesAPI.cancel.mockRejectedValue(
|
||||
new Error({
|
||||
response: {
|
||||
config: {
|
||||
method: 'post',
|
||||
url: '/api/v2/projectupdates',
|
||||
},
|
||||
data: 'An error occurred',
|
||||
status: 403,
|
||||
},
|
||||
})
|
||||
);
|
||||
act(() => {
|
||||
wrapper = mountWithContexts(
|
||||
<JobCancelButton
|
||||
job={{ id: 'a', type: 'project_update' }}
|
||||
errorTitle="Error"
|
||||
title="Title"
|
||||
showIconButton
|
||||
/>
|
||||
);
|
||||
});
|
||||
await act(async () => wrapper.find('Button').prop('onClick')(true));
|
||||
wrapper.update();
|
||||
expect(wrapper.find('AlertModal').length).toBe(1);
|
||||
await act(() =>
|
||||
wrapper.find('Button#cancel-job-confirm-button').prop('onClick')()
|
||||
);
|
||||
wrapper.update();
|
||||
expect(wrapper.find('ErrorDetail').length).toBe(1);
|
||||
expect(wrapper.find('AlertModal[title="Title"]').length).toBe(0);
|
||||
});
|
||||
|
||||
test('should cancel Ad Hoc Command job', async () => {
|
||||
act(() => {
|
||||
wrapper = mountWithContexts(
|
||||
<JobCancelButton
|
||||
job={{ id: 1, type: 'ad_hoc_command' }}
|
||||
errorTitle="Error"
|
||||
title="Title"
|
||||
showIconButton
|
||||
/>
|
||||
);
|
||||
});
|
||||
await act(async () => wrapper.find('Button').prop('onClick')(true));
|
||||
wrapper.update();
|
||||
|
||||
expect(wrapper.find('AlertModal').length).toBe(1);
|
||||
await act(() =>
|
||||
wrapper.find('Button#cancel-job-confirm-button').prop('onClick')()
|
||||
);
|
||||
expect(AdHocCommandsAPI.cancel).toBeCalledWith(1);
|
||||
});
|
||||
|
||||
test('should cancel system job', async () => {
|
||||
act(() => {
|
||||
wrapper = mountWithContexts(
|
||||
<JobCancelButton
|
||||
job={{ id: 1, type: 'system_job' }}
|
||||
errorTitle="Error"
|
||||
title="Title"
|
||||
showIconButton
|
||||
/>
|
||||
);
|
||||
});
|
||||
await act(async () => wrapper.find('Button').prop('onClick')(true));
|
||||
wrapper.update();
|
||||
|
||||
expect(wrapper.find('AlertModal').length).toBe(1);
|
||||
await act(() =>
|
||||
wrapper.find('Button#cancel-job-confirm-button').prop('onClick')()
|
||||
);
|
||||
expect(SystemJobsAPI.cancel).toBeCalledWith(1);
|
||||
});
|
||||
|
||||
test('should cancel workflow job', async () => {
|
||||
act(() => {
|
||||
wrapper = mountWithContexts(
|
||||
<JobCancelButton
|
||||
job={{ id: 1, type: 'workflow_job' }}
|
||||
errorTitle="Error"
|
||||
title="Title"
|
||||
showIconButton
|
||||
/>
|
||||
);
|
||||
});
|
||||
await act(async () => wrapper.find('Button').prop('onClick')(true));
|
||||
wrapper.update();
|
||||
|
||||
expect(wrapper.find('AlertModal').length).toBe(1);
|
||||
await act(() =>
|
||||
wrapper.find('Button#cancel-job-confirm-button').prop('onClick')()
|
||||
);
|
||||
expect(WorkflowJobsAPI.cancel).toBeCalledWith(1);
|
||||
});
|
||||
test('should cancel workflow job', async () => {
|
||||
act(() => {
|
||||
wrapper = mountWithContexts(
|
||||
<JobCancelButton
|
||||
job={{ id: 1, type: 'hakunah_matata' }}
|
||||
errorTitle="Error"
|
||||
title="Title"
|
||||
showIconButton
|
||||
/>
|
||||
);
|
||||
});
|
||||
await act(async () => wrapper.find('Button').prop('onClick')(true));
|
||||
wrapper.update();
|
||||
|
||||
expect(wrapper.find('AlertModal').length).toBe(1);
|
||||
await act(() =>
|
||||
wrapper.find('Button#cancel-job-confirm-button').prop('onClick')()
|
||||
);
|
||||
expect(JobsAPI.cancel).toBeCalledWith(1);
|
||||
});
|
||||
});
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user