Compare commits

..

775 Commits
9.1.1 ... 9.3.0

Author SHA1 Message Date
softwarefactory-project-zuul[bot]
beb8021580 Merge pull request #6263 from ryanpetrello/930-release-branch
bump version to 9.3.0

Reviewed-by: https://github.com/apps/softwarefactory-project-zuul
2020-03-11 22:57:45 +00:00
Ryan Petrello
36078651d3 bump version to 9.3.0 2020-03-11 14:34:27 -04:00
softwarefactory-project-zuul[bot]
8b768bcb01 Merge pull request #6174 from AlanCoding/collection_single_loop
Make demo data in tests, further refine templating loop

Reviewed-by: https://github.com/apps/softwarefactory-project-zuul
2020-03-11 18:19:31 +00:00
softwarefactory-project-zuul[bot]
16d9e1cfc7 Merge pull request #5968 from mabashian/jobs-list-sockets
Changes ui-side behavior in response to websocket job status updates on several lists

Reviewed-by: https://github.com/apps/softwarefactory-project-zuul
2020-03-11 18:13:06 +00:00
softwarefactory-project-zuul[bot]
0fd9153cf7 Merge pull request #6254 from ryanpetrello/redact-faster
optimize the awx.main.redact SCM URL sanitizer regex

Reviewed-by: https://github.com/apps/softwarefactory-project-zuul
2020-03-11 14:56:02 +00:00
Ryan Petrello
c95624e27f optimize the SCM URL sanitizer regex
\w+ is too greedy for large strings that don't contain URLs
2020-03-11 10:10:35 -04:00
softwarefactory-project-zuul[bot]
5cf33f57a4 Merge pull request #6253 from AlanCoding/smart_error
Fix server error creating smart inventories

Reviewed-by: https://github.com/apps/softwarefactory-project-zuul
2020-03-11 13:08:00 +00:00
AlanCoding
5c331934e2 Fix server error creating smart inventories 2020-03-10 22:00:31 -04:00
softwarefactory-project-zuul[bot]
7ac21b4922 Merge pull request #6252 from gamuniz/fix_typo_meta
removed extra quotes in example

Reviewed-by: https://github.com/apps/softwarefactory-project-zuul
2020-03-11 00:14:46 +00:00
softwarefactory-project-zuul[bot]
04fe18d840 Merge pull request #6239 from marshmalien/check-host-inventory-id
Check for top-level inventory and host inventory match 

Reviewed-by: https://github.com/apps/softwarefactory-project-zuul
2020-03-10 23:13:16 +00:00
Gabe Muniz
b9829e2bde removed extra quotes in example 2020-03-10 23:08:38 +00:00
softwarefactory-project-zuul[bot]
a99d4a8419 Merge pull request #6248 from marshmalien/side-nav-dark-theme
Use dark theme in side navigation 

Reviewed-by: https://github.com/apps/softwarefactory-project-zuul
2020-03-10 21:53:22 +00:00
Marliana Lara
676b29346c Switch side nav to dark theme 2020-03-10 16:40:34 -04:00
softwarefactory-project-zuul[bot]
208dbc1f92 Merge pull request #6237 from beeankha/fix_user_module
Fix Username Idempotency in tower_user Module

Reviewed-by: https://github.com/apps/softwarefactory-project-zuul
2020-03-10 20:06:05 +00:00
Marliana Lara
2cb5046ec6 Throw an error when host inventory doesn't match parent inventory 2020-03-10 15:33:39 -04:00
softwarefactory-project-zuul[bot]
356b674a49 Merge pull request #6241 from mabashian/4085-cred-edit
Fixes bug where users without read access on a creds org cannot edit the cred

Reviewed-by: https://github.com/apps/softwarefactory-project-zuul
2020-03-10 19:19:28 +00:00
beeankha
185c581007 Remove len() check
Remove comment

Remove quote marks from default values
2020-03-10 15:18:34 -04:00
softwarefactory-project-zuul[bot]
789397d56c Merge pull request #6240 from ryanpetrello/schedule-rbac-tests
add RBAC tests for schedules on inventory and project updates

Reviewed-by: https://github.com/apps/softwarefactory-project-zuul
2020-03-10 18:44:18 +00:00
Marliana Lara
e816f73ecf Show content error when the top-level inventory and host inventory do not match 2020-03-10 14:24:59 -04:00
softwarefactory-project-zuul[bot]
bbe5789e70 Merge pull request #6235 from marshmalien/6142-inv-group-add-host-form
Add Inventory Group Host Add form

Reviewed-by: https://github.com/apps/softwarefactory-project-zuul
2020-03-10 17:48:27 +00:00
Ryan Petrello
ad1a7fc9c9 add RBAC tests for schedules on inventory and project updates 2020-03-10 13:45:57 -04:00
mabashian
dd5f25186b Fixes bug where user with edit permissions on a credential but no permissions on the credentials org would be denied access to the form in the UI. 2020-03-10 13:31:55 -04:00
softwarefactory-project-zuul[bot]
ecb7147614 Merge pull request #6229 from AlexSCorey/5895-SurveyListToolbar
Adds SurveyList tool bar

Reviewed-by: https://github.com/apps/softwarefactory-project-zuul
2020-03-10 17:13:46 +00:00
Marliana Lara
87396f968c Use variables for inventory breadcrumb path segments 2020-03-10 13:02:14 -04:00
softwarefactory-project-zuul[bot]
87bfb82907 Merge pull request #6238 from jakemcdermott/awaiting-for-godot
Remove some rogue awaits

Reviewed-by: https://github.com/apps/softwarefactory-project-zuul
2020-03-10 16:53:30 +00:00
Alex Corey
65e988b44c Impoves test and removes uncessary condition 2020-03-10 12:20:48 -04:00
Jake McDermott
4381a7d75c Remove some rogue awaits 2020-03-10 11:59:37 -04:00
Alex Corey
3a6528dc0d Adds toolbar 2020-03-10 10:08:40 -04:00
beeankha
c113c2db52 Fix username/name issue in tower_user module, update test playbook 2020-03-10 10:06:38 -04:00
Marliana Lara
b7d2d6ad89 Add Inventory Group Host Add form 2020-03-09 19:58:31 -04:00
softwarefactory-project-zuul[bot]
01d77d5407 Merge pull request #6221 from marshmalien/6150-disassociate-modal
Add disassociate inventory group host button 

Reviewed-by: https://github.com/apps/softwarefactory-project-zuul
2020-03-09 23:47:16 +00:00
softwarefactory-project-zuul[bot]
87c6ed52cd Merge pull request #6234 from wenottingham/found-a-mole--lets-whack-it
Remove SCL python from awx-python

Reviewed-by: https://github.com/apps/softwarefactory-project-zuul
2020-03-09 20:59:30 +00:00
softwarefactory-project-zuul[bot]
0d5a46a6e1 Merge pull request #6233 from marshmalien/6210-user-teams-form-btn-layout
Fix form action button layout

Reviewed-by: https://github.com/apps/softwarefactory-project-zuul
2020-03-09 20:41:22 +00:00
softwarefactory-project-zuul[bot]
f3ab3de1be Merge pull request #6228 from jakemcdermott/6191-fix-unnecessary-panel-reload-02
Fix unnecessary panel reload and refactor top-level host and inventory routing components

Reviewed-by: https://github.com/apps/softwarefactory-project-zuul
2020-03-09 20:03:44 +00:00
Bill Nottingham
871695ea5e Remove SCL python from awx-python
This really should be created at build time.
2020-03-09 15:51:56 -04:00
Marliana Lara
0f5b694514 Fix form action button layout 2020-03-09 14:56:39 -04:00
softwarefactory-project-zuul[bot]
9567dc9e17 Merge pull request #6231 from jakemcdermott/6230-fix-double-schedules
Remove duplicate schedule tab

Reviewed-by: https://github.com/apps/softwarefactory-project-zuul
2020-03-09 18:46:12 +00:00
Jake McDermott
24d35e9593 Remove duplicate schedule tab 2020-03-09 13:39:40 -04:00
softwarefactory-project-zuul[bot]
b6be8ca445 Merge pull request #6212 from marshmalien/6210-fix-cred-form-button-layout
Fix credential form action button layout

Reviewed-by: https://github.com/apps/softwarefactory-project-zuul
2020-03-09 17:32:34 +00:00
Jake McDermott
beb10feb0c Refactor top-level host and inv routing components
* Remove route render props
* Remove unneeded load checks
* Make PageSection component placement consistent with other screens
* Fix issues causing complete panel reload on tab change
2020-03-09 12:12:49 -04:00
Jake McDermott
6ec9c45341 Fix screen reload on host tab change 2020-03-09 11:35:26 -04:00
softwarefactory-project-zuul[bot]
9a394a5726 Merge pull request #6219 from dsesami/host-detail-labels
Host detail labels

Reviewed-by: Daniel Sami
             https://github.com/dsesami
2020-03-09 13:37:50 +00:00
Marliana Lara
25f4aa19b7 Add disassociate inventory group host modal 2020-03-06 17:48:12 -05:00
softwarefactory-project-zuul[bot]
7ff5bacce5 Merge pull request #6185 from jlmitch5/addWFJTNotAccTabs
add notification and access tabs to wfjt

Reviewed-by: https://github.com/apps/softwarefactory-project-zuul
2020-03-06 22:05:05 +00:00
softwarefactory-project-zuul[bot]
3e820a88e1 Merge pull request #6216 from pilou-/doc_extract_credentials
[doc] Explain how to extract credentials or encrypted settings

Reviewed-by: https://github.com/apps/softwarefactory-project-zuul
2020-03-06 21:54:51 +00:00
John Mitchell
c1ab118481 remove unnecessary org rest calls for notifcation tabs on proj wfjt 2020-03-06 16:04:16 -05:00
John Mitchell
3952be9429 add notification and access tabs to wfjt 2020-03-06 16:04:16 -05:00
Ryan Petrello
35f414ccf2 clarify how AWX stores cerdentials 2020-03-06 16:00:41 -05:00
softwarefactory-project-zuul[bot]
304bf6805b Merge pull request #6217 from ryanpetrello/workflow-approval
record a start time for WorkflowApproval jobs

Reviewed-by: https://github.com/apps/softwarefactory-project-zuul
2020-03-06 20:54:39 +00:00
Daniel Sami
e733506477 Merge branch 'devel' into host-detail-labels 2020-03-06 15:50:58 -05:00
softwarefactory-project-zuul[bot]
f4366be419 Merge pull request #6170 from mabashian/5859-jt-schedule-details
Adds generic schedule detail component and applies it to JT/WFJT/Proj schedules

Reviewed-by: https://github.com/apps/softwarefactory-project-zuul
2020-03-06 20:44:11 +00:00
Daniel Sami
64018a71bb used lint 2020-03-06 15:41:10 -05:00
softwarefactory-project-zuul[bot]
0c9c349fb9 Merge pull request #6211 from marshmalien/6141-inv-host-components
Remove screen -> screen imports in Inventories and Hosts

Reviewed-by: https://github.com/apps/softwarefactory-project-zuul
2020-03-06 20:02:36 +00:00
mabashian
6dd4d04bf5 Move label out to styled component. Removes dependency on internal pf classes. 2020-03-06 14:56:17 -05:00
mabashian
21b4455ee6 Change nestedResource to schedule in setBreadcrumbConfig. As it currently stands the only nested resource that could be passed here is a schedule. 2020-03-06 14:36:35 -05:00
mabashian
314e345825 Tweak Schedules props to include the options request callback needed to load the list after rebasing. 2020-03-06 14:36:35 -05:00
mabashian
90e047821d Makes entire hierarchy of schedule components generic 2020-03-06 14:36:34 -05:00
mabashian
01fe89e43c Moves request to generate preview down into the ScheduleDetail component 2020-03-06 14:32:09 -05:00
mabashian
1f2edd81ab Adds generic schedule detail component and applies it to JT schedules 2020-03-06 14:32:09 -05:00
Ryan Petrello
862de0b6f3 record a start time for WorkflowApproval jobs
see: https://github.com/ansible/awx/issues/6202
2020-03-06 14:16:27 -05:00
Pierre-Louis Bonicoli
d75c2d9b44 Explain how to extract credentials/settings 2020-03-06 18:18:20 +01:00
softwarefactory-project-zuul[bot]
1b8ff1f846 Merge pull request #6194 from jakemcdermott/6191-fix-unnecessary-panel-reload
Fix panel loading and clean up unneeded code

Reviewed-by: https://github.com/apps/softwarefactory-project-zuul
2020-03-06 17:14:27 +00:00
softwarefactory-project-zuul[bot]
a93b1aa339 Merge pull request #6169 from AlexSCorey/5895-SurveyList
Adds the list of the survey questions.

Reviewed-by: https://github.com/apps/softwarefactory-project-zuul
2020-03-06 16:57:41 +00:00
Marliana Lara
4c6191041c Fix credential form action button layout 2020-03-06 11:12:09 -05:00
Alex Corey
edb3f6df55 Adds the list of the survey questions.
TODO:  Add delete functionality.  Add sort functionality.
Add preview functionality.
Toolbar will be built out with the other functionalities.
2020-03-06 11:10:18 -05:00
softwarefactory-project-zuul[bot]
7a3ece7fd2 Merge pull request #6197 from jbradberry/choice-field-metadata
Show API choice field metadata more correctly for null vs ''

Reviewed-by: https://github.com/apps/softwarefactory-project-zuul
2020-03-06 15:30:21 +00:00
softwarefactory-project-zuul[bot]
73e867b6f5 Merge pull request #6205 from ryanpetrello/proj-inv-sched
fix global schedule creation for project and inventory updates too

Reviewed-by: https://github.com/apps/softwarefactory-project-zuul
2020-03-06 14:36:22 +00:00
Ryan Petrello
acc34c1393 fix global schedule creation for project and inventory updates too
related: https://github.com/ansible/awx/pull/6193
2020-03-06 08:55:13 -05:00
Marliana Lara
3d5a002676 Remove all inventory route logic from Host screens 2020-03-06 01:36:38 -05:00
Marliana Lara
bb6d9af90b Create nested inventory host route files and components 2020-03-06 01:35:58 -05:00
Marliana Lara
da94b2dc9e Add InventoryHostAdd route file 2020-03-06 01:25:07 -05:00
softwarefactory-project-zuul[bot]
a1c2db3db5 Merge pull request #6190 from mabashian/5864-jt-schedule-add
Adds Add button to schedules list along with rbac restrictions

Reviewed-by: https://github.com/apps/softwarefactory-project-zuul
2020-03-05 23:26:49 +00:00
Jeff Bradberry
d849e81891 Show API choice field metadata more correctly for null vs '' 2020-03-05 17:10:59 -05:00
Jake McDermott
a5afac62ca Fix panel reloading and clean up unneeded code 2020-03-05 15:36:25 -05:00
softwarefactory-project-zuul[bot]
66c98ca9bc Merge pull request #6193 from ryanpetrello/awx-cli-schedules-options
fix a bug in OPTIONS /api/v2/schedules/

Reviewed-by: https://github.com/apps/softwarefactory-project-zuul
2020-03-05 20:25:47 +00:00
softwarefactory-project-zuul[bot]
a00e7c7050 Merge pull request #6192 from squidboylan/fix_typo
Fix a typo in the collection README

Reviewed-by: https://github.com/apps/softwarefactory-project-zuul
2020-03-05 20:22:51 +00:00
Ryan Petrello
cd1ff6b16a fix a bug in OPTIONS /api/v2/schedules/
a side effect of this bug is that `awx schedules create` doesn't work
properly for non-admin users (i.e., users who have execute access for
a JT)

see: https://github.com/ansible/awx/issues/5717
2020-03-05 14:43:54 -05:00
softwarefactory-project-zuul[bot]
b560a21ca3 Merge pull request #6187 from marshmalien/fix-inv-host-add-save
Fix bug where Inventory Host Add form doesn't save

Reviewed-by: https://github.com/apps/softwarefactory-project-zuul
2020-03-05 19:02:32 +00:00
Caleb Boylan
63fa367e9d Fix a typo in the collection README 2020-03-05 10:23:39 -08:00
mabashian
d33daeee91 Adds Add button to schedules list along with rbac restrictions 2020-03-05 12:34:29 -05:00
Marliana Lara
9d449c419b Fix bug where Inv Host Add form doesn't save due to form error 2020-03-05 10:58:06 -05:00
softwarefactory-project-zuul[bot]
e34e88549f Merge pull request #6152 from ryanpetrello/venv-realpath
respect home directory symlinks for BASE_VENV_PATH

Reviewed-by: Bill Nottingham
             https://github.com/wenottingham
2020-03-05 15:23:38 +00:00
AlanCoding
c073e39c69 Simplify build loop, make demo data in tests 2020-03-05 08:48:48 -05:00
softwarefactory-project-zuul[bot]
4fcd2c594c Merge pull request #6161 from jakemcdermott/6151-missing-cred-types
Fix potentially missing credential type options in multicredential select

Reviewed-by: https://github.com/apps/softwarefactory-project-zuul
2020-03-04 23:16:33 +00:00
Rebeccah
457dc956f1 added a check for the field of finished making sure it's not none, and then breaking the DateTime function 2020-03-04 15:56:46 -05:00
softwarefactory-project-zuul[bot]
3e5428131c Merge pull request #6156 from AlanCoding/collection_install
Unify collection targets for install and ansible-test

Reviewed-by: https://github.com/apps/softwarefactory-project-zuul
2020-03-04 19:10:47 +00:00
softwarefactory-project-zuul[bot]
d08f59272e Merge pull request #6038 from gamuniz/honor_thy_precedence
fixed precedence on ansible.cfg

Reviewed-by: https://github.com/apps/softwarefactory-project-zuul
2020-03-04 18:58:12 +00:00
softwarefactory-project-zuul[bot]
8b95d7be94 Merge pull request #6154 from keithjgrant/6107-column-alignment
fix column widths when action buttons hidden due to permissions

Reviewed-by: https://github.com/apps/softwarefactory-project-zuul
2020-03-04 17:54:14 +00:00
softwarefactory-project-zuul[bot]
6c22ddf608 Merge pull request #5841 from AlexSCorey/5813-WorkflowJTForm
Adds WorkflowJobTemplate Add and Edit form and functionality.

Reviewed-by: https://github.com/apps/softwarefactory-project-zuul
2020-03-04 16:24:49 +00:00
Alex Corey
8227054d11 Adds WorkflowJobTemplate Add and Edit form and functionality.
Fields work and forms save

Adds Webhook form fields and tooltip to VariablesField component

Alpha order

Fixes contentLoading issue in PR and enables Launch on JT List

Adds page section to fix render issue

Adds subform restore functionality and addresses PR issues

Adds pageSection to jt add form and fixes other PR issues

-Fixes spelling error on WFJTDetail
-Adds page section to JT Add Form to fix styling issue
-Adds spacing between functions
-Fixes form submission error by allowing state to handle the lookups while formik
only handles their ids.

Fixes styling issues, navigation, props, and adds useRequest hook
-Add functionality to remove chips from look up fields
-Removes uncessary custome styling from
-Removes uncessary Form Group wrappers
-Adds internationalization to webhook key string.
-Adds field level error handling
-updates tests
-Adds initial null value to form submit error

Adds error handling to submit labels, prevents uncessary api call

The unecessary api call is for the webhook credential id.  If there is no
webhook service we do not want the api to make a call for get the webhook credential
type id.

Adds error handling test to add and edit form. Updates Form component

Updates tests to a real error.

Fixes extra vars bug
2020-03-04 10:36:30 -05:00
softwarefactory-project-zuul[bot]
73b33e1435 Merge pull request #6116 from AlanCoding/early_error
Fail on launch for scenario where job cannot run

Reviewed-by: https://github.com/apps/softwarefactory-project-zuul
2020-03-04 15:19:22 +00:00
Jake McDermott
deaf2028ad Request all types for multicred lookup 2020-03-04 09:08:46 -05:00
Jake McDermott
d812972d3c Wrap credential help text at 110 chars 2020-03-04 09:08:01 -05:00
AlanCoding
54b553b78c Simplify syntax for loop labels 2020-03-04 08:07:10 -05:00
AlanCoding
3e08bbeb93 Make wording on running less perscriptive and more general 2020-03-04 07:42:52 -05:00
AlanCoding
22524589e3 Employ the collection_package var in a few missing places 2020-03-03 22:19:38 -05:00
AlanCoding
85ec73bf4b Unify collection targets for install and ansible-test 2020-03-03 20:19:41 -05:00
softwarefactory-project-zuul[bot]
ccd36b9560 Merge pull request #6155 from jakemcdermott/job-inventory-links
Fix job->inventory links

Reviewed-by: https://github.com/apps/softwarefactory-project-zuul
2020-03-04 00:27:01 +00:00
softwarefactory-project-zuul[bot]
61755e2838 Merge pull request #6149 from jlmitch5/fixMultiSelectCred
update multi select credential logic vault credential logic, add notice, and update multicred tests

Reviewed-by: https://github.com/apps/softwarefactory-project-zuul
2020-03-03 23:51:30 +00:00
Jake McDermott
be56a1d3df Fix job->inventory links 2020-03-03 18:42:49 -05:00
softwarefactory-project-zuul[bot]
46c86ea6c0 Merge pull request #6144 from marshmalien/6049-inv-group-nested-hosts
Add Inventory Group Host list

Reviewed-by: https://github.com/apps/softwarefactory-project-zuul
2020-03-03 22:31:51 +00:00
softwarefactory-project-zuul[bot]
401c7c3da2 Merge pull request #6148 from ryanpetrello/fact-data-error
make fact saving code more robust to unexpected fact data

Reviewed-by: https://github.com/apps/softwarefactory-project-zuul
2020-03-03 22:20:42 +00:00
Keith Grant
f1120d39db fix column widths when action buttons hidden due to permissions 2020-03-03 14:19:49 -08:00
Michael Abashian
80617df22d Merge branch 'devel' into 5813-WorkflowJTForm 2020-03-03 17:10:48 -05:00
Ryan Petrello
b5e5fea117 respect home directory symlinks for BASE_VENV_PATH 2020-03-03 16:47:38 -05:00
softwarefactory-project-zuul[bot]
e3ec63e8e5 Merge pull request #6147 from ryanpetrello/job-host-summary-notification
properly support job host summary data in custom notification templates (and remove a few fields that don't work)

Reviewed-by: https://github.com/apps/softwarefactory-project-zuul
2020-03-03 21:47:24 +00:00
Ryan Petrello
e232cd392c make fact saving code more robust to unexpected fact data
see: https://github.com/ansible/awx/issues/5935
2020-03-03 16:38:33 -05:00
Daniel Sami
8301254f57 further tags 2020-03-03 16:03:16 -05:00
softwarefactory-project-zuul[bot]
9cdfc19215 Merge pull request #6146 from mabashian/upgrade-react-3-2
Upgrade react and react-dom to latest

Reviewed-by: https://github.com/apps/softwarefactory-project-zuul
2020-03-03 20:59:58 +00:00
Daniel Sami
c50705a2dc initial labels for host tests 2020-03-03 15:39:38 -05:00
softwarefactory-project-zuul[bot]
9f948e90d9 Merge pull request #6129 from squidboylan/collection_integration_tests
Add collection integration tests

Reviewed-by: https://github.com/apps/softwarefactory-project-zuul
2020-03-03 20:23:22 +00:00
Ryan Petrello
e7f36eb2ea remove a few custom notification fields that don't work
these aren't top-level serializer fields; they're summary fields
if we want to support these properly, we should treat them as
enhancements, and write support, tests, and documentation
2020-03-03 15:20:41 -05:00
John Mitchell
c261d6acf0 update multi select credential logic vault credential logic, add notice, and update multicred tests 2020-03-03 15:16:57 -05:00
Ryan Petrello
32ef805e23 properly support job host summary data in custom notification templates
see: https://github.com/ansible/tower/issues/4148
2020-03-03 15:00:41 -05:00
softwarefactory-project-zuul[bot]
d009ce49f5 Merge pull request #6145 from ryanpetrello/fix-grafana
fix broken grafana notifications

Reviewed-by: https://github.com/apps/softwarefactory-project-zuul
2020-03-03 19:28:22 +00:00
Caleb Boylan
d14bf00f6c Add collection integration test instructions to the README 2020-03-03 10:59:33 -08:00
softwarefactory-project-zuul[bot]
5dc4e30820 Merge pull request #6130 from mabashian/general-toggle-component
Refactors YamlJsonToggle component into a generic Toggle component

Reviewed-by: https://github.com/apps/softwarefactory-project-zuul
2020-03-03 18:46:05 +00:00
Caleb Boylan
afbeacf499 Fix up the collection integration tests 2020-03-03 10:44:48 -08:00
Caleb Boylan
fc80cf5241 Replace randstr with password plugin in collection tests 2020-03-03 10:44:48 -08:00
Caleb Boylan
4a6db13daa Copy collection integration tests in 2020-03-03 10:44:48 -08:00
mabashian
d5372dae36 Upgrade react and react-dom to latest 2020-03-03 13:07:50 -05:00
softwarefactory-project-zuul[bot]
0b702ede4e Merge pull request #6133 from mabashian/upgrade-pf-deps-3-2
Updates our four patternfly deps to latest

Reviewed-by: https://github.com/apps/softwarefactory-project-zuul
2020-03-03 18:04:59 +00:00
mabashian
3c7f596288 Change currentValue prop to simply value. Adds basic unit test coverage to MultiButtonToggle component. 2020-03-03 12:57:12 -05:00
Ryan Petrello
6207dad226 fix broken grafana notifications
since the custom notification template refactor, grafana notification
support has been broken; this is largely because grafana functions more
like the webhooks, and needs to send JSON in its notification body

see: https://github.com/ansible/awx/issues/6137
2020-03-03 12:44:28 -05:00
Marliana Lara
2b48e43946 Add Inventory Group Host list 2020-03-03 12:01:25 -05:00
Alex Corey
4709f57f46 Updates tests to a real error. 2020-03-03 11:57:36 -05:00
Alex Corey
b055aad641 Adds error handling test to add and edit form. Updates Form component 2020-03-03 11:57:36 -05:00
Alex Corey
acfa6d056f Adds error handling to submit labels, prevents uncessary api call
The unecessary api call is for the webhook credential id.  If there is no
webhook service we do not want the api to make a call for get the webhook credential
type id.
2020-03-03 11:57:36 -05:00
Alex Corey
51a069fcc4 Fixes styling issues, navigation, props, and adds useRequest hook
-Add functionality to remove chips from look up fields
-Removes uncessary custome styling from
-Removes uncessary Form Group wrappers
-Adds internationalization to webhook key string.
-Adds field level error handling
-updates tests
-Adds initial null value to form submit error
2020-03-03 11:57:36 -05:00
Alex Corey
fc89b627d7 Adds pageSection to jt add form and fixes other PR issues
-Fixes spelling error on WFJTDetail
-Adds page section to JT Add Form to fix styling issue
-Adds spacing between functions
-Fixes form submission error by allowing state to handle the lookups while formik
only handles their ids.
2020-03-03 11:57:36 -05:00
Alex Corey
e90ee5113d Adds subform restore functionality and addresses PR issues 2020-03-03 11:57:36 -05:00
Alex Corey
4ccca08cda Adds page section to fix render issue 2020-03-03 11:57:36 -05:00
Alex Corey
b757fdebf8 Fixes contentLoading issue in PR and enables Launch on JT List 2020-03-03 11:57:36 -05:00
Alex Corey
3ee6f1f3c7 Alpha order 2020-03-03 11:57:36 -05:00
Alex Corey
d4ba32d0c5 Adds Webhook form fields and tooltip to VariablesField component 2020-03-03 11:57:36 -05:00
Alex Corey
d97f516c3a Fields work and forms save 2020-03-03 11:57:36 -05:00
Alex Corey
52a8935b20 Adds WorkflowJobTemplate Add and Edit form and functionality. 2020-03-03 11:57:36 -05:00
Rebeccah
07752f48f6 formatted finished time to match microsecond expected output as is in the API. 2020-03-03 11:05:28 -05:00
mabashian
10b5a10728 Jobs that error should trigger us to to update the Recent Jobs list along with successful/failed/canceled 2020-03-03 11:05:28 -05:00
Rebeccah
e11ff69f3e Added in check for the unified_job_template_id attribute to be present and populated in the object.
For anyone reading this later, know that AdHocCommands still have unified_job_template and unified_job_template_id fields, they are just nonetypes because they don't get used by the AdHocCommand objects. Which means you have to actually get the object, not just check that it's there, to use it the way I am in this change.
2020-03-03 11:05:28 -05:00
mabashian
d3fa34c665 Remove tooltip update when job finishes. This will be handled later down the line by buildTooltips(). 2020-03-03 11:05:28 -05:00
mabashian
48a615231b Fix jshint errors 2020-03-03 11:05:28 -05:00
mabashian
b09ac71647 Trims down GET requests made on the dashboard in response to websocket messages 2020-03-03 11:05:28 -05:00
mabashian
d5dd3c521f Consume finished timestamp on org templates list when available via websocket message 2020-03-03 11:05:28 -05:00
mabashian
db43341f96 Consume finished timestamp from websocket message and update the relevant job row. Also adds logic to attempt to re-order the list when the sort order is -finished since we have enough information client-side to do that. 2020-03-03 11:05:28 -05:00
mabashian
3234f246db Consume finished timestamp from websocket message when it's available to update the relevant row in the templates list. 2020-03-03 11:05:28 -05:00
Rebeccah
6d6d99bcf8 fixed the spelling of cancelled to be canceled, note to us later -> we need to stick with a single spelling of the word 2020-03-03 11:05:28 -05:00
mabashian
a6cd32522f Removes GET requests in response to websocket messages on the org job templates list. List is solely updated based on data from the websocket messages. 2020-03-03 11:05:28 -05:00
Rebeccah
1fe28463da added finished job timestamp to websocket emit 2020-03-03 11:05:28 -05:00
mabashian
51a6194b8d Removes logic performing GET requests on api/v2/templates whenever a job status update message comes across the websocket. We now use data exclusively from the websocket to update the UI. 2020-03-03 11:05:28 -05:00
mabashian
e75f7b0beb Stop making rest calls for inventory source sync updates on inventory sources list 2020-03-03 11:05:28 -05:00
mabashian
179c62e2f3 Stop making rest calls for project sync updates on projects list 2020-03-03 11:05:28 -05:00
Rebeccah
98f5525d28 added unified job template ID to websocket emits 2020-03-03 11:05:28 -05:00
mabashian
60a137225a Changes how the jobs list reacts to socket messages. We now only make targeted GET requests for new rows. We use the available information in the socket message to update the relevant row (if visible in the list). 2020-03-03 11:05:28 -05:00
softwarefactory-project-zuul[bot]
c1bfcd73fb Merge pull request #5972 from jainnikhil30/fix_smartinv_duplicate_hosts
fix smart inventory duplicate hosts

Reviewed-by: https://github.com/apps/softwarefactory-project-zuul
2020-03-03 16:03:13 +00:00
mabashian
322b4ee1e4 Updates our four patternfly deps to latest 2020-03-02 13:43:15 -05:00
softwarefactory-project-zuul[bot]
98dc6179f5 Merge pull request #5919 from AlanCoding/good_projects
Fail early processing project factory

Reviewed-by: https://github.com/apps/softwarefactory-project-zuul
2020-03-02 18:41:47 +00:00
AlanCoding
07807c2dec Fail on launch for scenario where job cannot run 2020-03-02 13:14:03 -05:00
softwarefactory-project-zuul[bot]
16ecf17c69 Merge pull request #6115 from jlmitch5/scheduleListInDetailViews
add schedule list to detail views

Reviewed-by: https://github.com/apps/softwarefactory-project-zuul
2020-03-02 16:31:03 +00:00
mabashian
1f0acef844 Changes Toggle to MultiButtonToggle in an attempt to differentiate it from an upstream PF component. Altered props to be a bit more concise as well as support more than two buttons. 2020-03-02 11:22:18 -05:00
softwarefactory-project-zuul[bot]
5a164cae15 Merge pull request #6093 from ryanpetrello/remove-beat-thread
switch the periodic scheduler to a background process (instead of a thread) to avoid a cpython bug

Reviewed-by: https://github.com/apps/softwarefactory-project-zuul
2020-03-02 15:46:27 +00:00
John Mitchell
b57405b696 assorted schedule list fixes:
- remove pagesection and card from component...move to where called for root list
- remove unnecessary placeholder schedule tab on job template detail
2020-03-02 10:00:27 -05:00
John Mitchell
5fdf6cf60f simplify conditional checking if schedules tab should be shown on proj detail 2020-03-02 09:35:34 -05:00
John Mitchell
c1c382a941 update ScheduleList api read to single prop 2020-03-02 09:22:32 -05:00
mabashian
a997b40852 Refactors YamlJsonToggle component into something a little more generic so that it can be used to toggle between local and utc times in the schedule details view. 2020-02-28 17:25:31 -05:00
softwarefactory-project-zuul[bot]
99cd2e601d Merge pull request #6127 from benthomasson/bump_config_json
Bumps the version of config.json to 1.1

Reviewed-by: https://github.com/apps/softwarefactory-project-zuul
2020-02-28 21:09:48 +00:00
Ben Thomasson
fc402aff29 Bumps the version of config.json to 1.1 2020-02-28 15:28:31 -05:00
softwarefactory-project-zuul[bot]
2ec035f918 Merge pull request #6124 from ryanpetrello/awx-login-token-description
cli: add the ability to specify a token description w/ `awx login`

Reviewed-by: Jeff Bradberry
             https://github.com/jbradberry
2020-02-28 20:02:19 +00:00
softwarefactory-project-zuul[bot]
fe046b47b5 Merge pull request #6110 from keithjgrant/qs-empty-strings
Remove "Manual" option from search filters

Reviewed-by: https://github.com/apps/softwarefactory-project-zuul
2020-02-28 17:19:30 +00:00
Ryan Petrello
3e0e4b6c8f cli: add the ability to specify a token description w/ awx login
see: https://github.com/ansible/awx/issues/6122
2020-02-28 11:12:26 -05:00
softwarefactory-project-zuul[bot]
7fe57268f6 Merge pull request #6121 from rooftopcellist/add_total_instances
Add total licensed instances to analytics

Reviewed-by: https://github.com/apps/softwarefactory-project-zuul
2020-02-28 13:56:19 +00:00
Nikhil Jain
9eecb24c32 fix smart inventory duplicate hosts 2020-02-28 09:46:44 +05:30
softwarefactory-project-zuul[bot]
a8a45fca84 Merge pull request #6117 from keithjgrant/6095-redirect-after-host-delete
Fix redirect after host delete

Reviewed-by: https://github.com/apps/softwarefactory-project-zuul
2020-02-28 00:45:05 +00:00
softwarefactory-project-zuul[bot]
33df6f8ad2 Merge pull request #6109 from marshmalien/inventory-host-toggle
Use HostToggle component in InventoryHostList

Reviewed-by: https://github.com/apps/softwarefactory-project-zuul
2020-02-27 23:53:50 +00:00
softwarefactory-project-zuul[bot]
44223003aa Merge pull request #6112 from AlanCoding/the_canceled_dead
Cancel jobs if they were deleted in the database

Reviewed-by: https://github.com/apps/softwarefactory-project-zuul
2020-02-27 22:17:16 +00:00
Keith Grant
a60e7a7855 fix redirect url after host delete 2020-02-27 13:39:56 -08:00
softwarefactory-project-zuul[bot]
e971ec993b Merge pull request #6114 from squidboylan/fix_collection
Fix job_list all_pages next value

Reviewed-by: https://github.com/apps/softwarefactory-project-zuul
2020-02-27 20:58:43 +00:00
John Mitchell
989ef3538e add test for conditional show/hide of schedules tab for project detail view 2020-02-27 15:20:50 -05:00
John Mitchell
4db3e823bf add schedule list to proj, jt and wfjt 2020-02-27 15:05:46 -05:00
Caleb Boylan
c374316648 Collection: Fix job_list all_pages next value 2020-02-27 11:42:33 -08:00
AlanCoding
5dba49a7bc Lower level of log about skipped project update 2020-02-27 14:20:36 -05:00
AlanCoding
7b880c6552 Cancel jobs if they were deleted in the database 2020-02-27 14:12:47 -05:00
Keith Grant
5574cf0595 remove Manual option from project, inventory filters 2020-02-27 11:01:36 -08:00
John Mitchell
e706e0a2e2 update directory structure of schedule list files 2020-02-27 12:37:37 -05:00
Ryan Petrello
5364e78397 switch the periodic scheduler to a child process (instead of a thread)
I have a hunch that our usage of a daemon thread is causing import lock
contention related to https://github.com/ansible/awx/issues/5617
We've encountered similar issues before with threads across dispatcher
processes at fork time, and cpython has had bugs like this in recent
history:

https://bugs.python.org/issue38884

My gut tells me this might be related.

The prior implementation - based on celerybeat - ran its code in
a process (not a thread), and the timing of that merge matches the
period of time we started noticing issues.

Currently testing it to see if it resolves some of the issues we're
seeing.
2020-02-27 12:15:15 -05:00
Christian Adams
f93ca814ac Add total licensed instances to analytics 2020-02-27 10:42:25 -05:00
Marliana Lara
3bf1ad3028 Move HostToggle into shared components directory 2020-02-27 09:56:21 -05:00
softwarefactory-project-zuul[bot]
e096ad18cb Merge pull request #6001 from mabashian/4967-jt-prompt-on-launch
Adds prompt on launch support to the rest of the relevant jt fields

Reviewed-by: https://github.com/apps/softwarefactory-project-zuul
2020-02-27 09:13:29 +00:00
softwarefactory-project-zuul[bot]
5ca73f1101 Merge pull request #6058 from marshmalien/5890-jt-completed-jobs-list
Add Completed Job list tab to multiple resources

Reviewed-by: Alex Corey <Alex.swansboro@gmail.com>
             https://github.com/AlexSCorey
2020-02-27 09:13:22 +00:00
Keith Grant
7e8fb29658 update qs utils to allow empty string params 2020-02-26 16:38:52 -08:00
softwarefactory-project-zuul[bot]
258689a9ed Merge pull request #6090 from marshmalien/6085-hostListItem-remove-duplicates
HostListItem - Remove duplicate action items

Reviewed-by: https://github.com/apps/softwarefactory-project-zuul
2020-02-26 22:39:43 +00:00
mabashian
e80e3f7410 Reapply prompt on launch for job template fields after rebasing. 2020-02-26 16:59:45 -05:00
softwarefactory-project-zuul[bot]
154b9c36ac Merge pull request #5747 from john-westcott-iv/collections
Porting Collections Off of Tower CLI

Reviewed-by: https://github.com/apps/softwarefactory-project-zuul
2020-02-26 20:35:00 +00:00
Marliana Lara
deced917cf Refactor HostListItem into functional component and add test for host toggle 2020-02-26 15:27:19 -05:00
softwarefactory-project-zuul[bot]
88b7256e96 Merge pull request #6087 from jakemcdermott/fix-6054
Fix several ui_next bugs related to unexpected data types

Reviewed-by: Jake McDermott <yo@jakemcdermott.me>
             https://github.com/jakemcdermott
2020-02-26 20:21:06 +00:00
Marliana Lara
033848a605 Refactor Hosts into functional component 2020-02-26 15:10:17 -05:00
mabashian
0e663921d6 Removes rogue comment and marks JT inventory validation string for translation. 2020-02-26 14:47:28 -05:00
mabashian
0582079606 Adds prompt on launch support to the rest of the relevant fields in the Job template form. Adds extra variables field to the job template form. Removes the advanced section in favor of a straight form. 2020-02-26 14:47:28 -05:00
Jake McDermott
6536f5a453 Delete falsey project creds before POST|PATCH
Depending on the permissions of the user submitting the form, the API
might throw an unexpected error if our creation request has a
zero-length string as its credential field. As a work-around,
normalize falsey credential fields by deleting them.
2020-02-26 14:22:58 -05:00
AlanCoding
c5079607aa Fail early processing project factory 2020-02-26 14:21:38 -05:00
softwarefactory-project-zuul[bot]
26dcb000f6 Merge pull request #6081 from ryanpetrello/launch-monitor-rc
cli: make launch with --monitor return code respect the final job status

Reviewed-by: https://github.com/apps/softwarefactory-project-zuul
2020-02-26 19:18:10 +00:00
softwarefactory-project-zuul[bot]
8ba4f728c0 Merge pull request #6083 from dsesami/fix-wf-title-id
Fixed workflow viz title ID

Reviewed-by: https://github.com/apps/softwarefactory-project-zuul
2020-02-26 19:08:29 +00:00
softwarefactory-project-zuul[bot]
ee090d34fa Merge pull request #6071 from ryanpetrello/task-manager-hang-detection
add code for detecting (and killing) a hung task manager task

Reviewed-by: https://github.com/apps/softwarefactory-project-zuul
2020-02-26 19:05:36 +00:00
Jake McDermott
bd30951a4f Pass empty array instead of null to rbac list
The DataListToolbar component expects an array for additional controls
in all cases. When there's no controls to pass into the toolbar,
provide an empty array to avoid type-related errors.
2020-02-26 13:22:22 -05:00
Jake McDermott
43cce83ba1 Handle api error 'detail' key as generic form error 2020-02-26 12:44:20 -05:00
Ryan Petrello
946d643795 cli: make launch with --monitor return code respect the final job status
see: https://github.com/ansible/awx/issues/5920
see: https://github.com/ansible/awx/issues/6079
2020-02-26 12:27:06 -05:00
Daniel Sami
1a6ea99d37 fixed workflow viz title id
prettier fix
2020-02-26 11:50:36 -05:00
gamuniz
350046d495 changed syntax as insert isn't required 2020-02-26 14:28:22 +00:00
beeankha
b532012748 Make non-required params actually optional, fix idempotency issues 2020-02-26 09:26:55 -05:00
beeankha
1c4042340c Update documentation for modules, add deprecation warning for role, send and receive modules
Update variables in tower_inventory to be in dict format
2020-02-26 09:27:02 -05:00
beeankha
787c4af222 Change default values for dict parameter
Removing default of empty dict from variables param on group and host modules

Make modules comply with updated sanity tests
2020-02-26 09:26:58 -05:00
Alan Rominger
768280c9ba [last PR stuff] + Add warning if configs specified in 2 params (#5)
* Lean on API validation for tower_inventory_source arg errors

used for
 - validating needed credential is given
 - missing source_project for scm sources

* Add warning when config is specified in 2 places

Fix up unit tests, address multiple comments re: backwards compatibility, redundant methods, etc.

Update new_name and variables parameters, update unit tests
2020-02-26 09:26:55 -05:00
Alan Rominger
2e4e687d69 Optional tower cli (#3)
* Allow running tests without tower_cli

* patch up test mutability

* Fix test import error, warning mock

* flake8 error

Update documentation for non-converted modules
2020-02-26 09:26:22 -05:00
John Westcott IV
d8513a4e86 Making variables work for hosts
Clear up sanity test and remove redundant import statement
2020-02-26 09:15:04 -05:00
John Westcott IV
badd667efa Removing manual and file source types and correcting default for custom_virtualenv 2020-02-26 09:15:04 -05:00
John Westcott IV
7908f25747 Remove reference to default check mode 2020-02-26 09:15:04 -05:00
John Westcott IV
0eef67713f Only try an ID lookup if we can convert the field name_or_id to an integer
Fix linting issues, update tower_project unit test
2020-02-26 09:15:04 -05:00
John Westcott IV
6591efc160 Fixed issue that caused warning message to always display
Because scm_update_cache_timeout has a default and thus will always be != None
2020-02-26 09:15:04 -05:00
beeankha
fcc679489e Update inventory_source module source_script parameter to be optional
Unitied comment

Fix up inventory_source example, misc comment edits
2020-02-26 09:15:04 -05:00
Caleb Boylan
94df58a55b Fix strtobool casting 2020-02-26 09:15:04 -05:00
John Westcott IV
0685b2fa35 Updates to config file loading
Now supports json or yaml

Depricated multiple k=v on a single line

Remove assert statement and unused import from module_util
2020-02-26 09:15:04 -05:00
beeankha
232ea1e50c Properly cast verify_ssl type to a bool 2020-02-26 09:15:04 -05:00
beeankha
3423db6ed0 Attempt to make validate_certs work in Python2 2020-02-26 09:15:04 -05:00
beeankha
c32452d6b6 Fix Python2 config incompatibility issue 2020-02-26 09:15:04 -05:00
John Westcott IV
018dd4c1c3 Fixing config loading issues when the config has no [general] section
Fixed typo in help documentation

Fix up sanity errors and update converted modules

Remove unnecessary test playbook file
2020-02-26 09:15:04 -05:00
John Westcott IV
4fc2c58ae7 Converted tower_job_cancel 2020-02-26 09:15:04 -05:00
John Westcott IV
b4014ebabf Converted tower_job_launch.py 2020-02-26 09:15:04 -05:00
John Westcott IV
9955ee6548 Converting tower_inventory_source
Fix up inventory_source module changes, fix import yaml sanity error, change inventory_source unit tests to comply with new structure.
2020-02-26 09:15:04 -05:00
John Westcott IV
c08d402e66 Adding mutually exclusive if functionality to support tower_inventory_source 2020-02-26 09:15:04 -05:00
John Westcott IV
1c505beba6 Converted tower_group
Splitting out tower_inventory_source from tower_group

Copy/Paste typo fix and README update for breaking backwards compatability

Update credential_type module and unit tests
2020-02-26 09:15:04 -05:00
beeankha
8a0432efb7 Change config file loading function, add py2 and py3 compatibility 2020-02-26 09:14:01 -05:00
beeankha
320276f8ca Remove JSONDecodeError exception, fix tower_host variable issue 2020-02-26 09:14:01 -05:00
John Westcott IV
f89061da41 Updating tower_org to use the new tower_api format
Pass sanity and unit tests, update tests

Remove placeholder test function, convert tower_host module, fix misc typos
2020-02-26 09:14:01 -05:00
John Westcott IV
c23d605a7a Modified modules to use new tower_api format
Fixed variable name typo
2020-02-26 09:14:01 -05:00
John Westcott IV
6d90cac3f9 Bug fixes for username and delete data 2020-02-26 09:14:01 -05:00
John Westcott IV
89e92bd337 Updating call to create_or_update_if_needed 2020-02-26 09:14:00 -05:00
John Westcott IV
9271127c53 Standardizing CRUD methods
Fixing linting and conversion issues
2020-02-26 09:14:00 -05:00
beeankha
9fa5942791 Fix superclass syntax for < Python3 2020-02-26 09:14:00 -05:00
John Westcott IV
e028ed878e More tower-cli-ish parsing of config files
Clear up test failures/linting errors, update unit test

Update module_utils for linter, add wait time to project module
2020-02-26 09:14:00 -05:00
John Westcott IV
838b2b7d1e Converted tower_project 2020-02-26 09:14:00 -05:00
beeankha
7c0ad461a5 Further module conversion changes, unit test changes
Multiple module changes

Added on_change callback

Added head_endpoint

Added additional error returns

Respond with a try an ID message if multiple assets found by name via return_none_on_404 kwarg

Diferentiated between login and logout token errors

Added is_job_done method
2020-02-26 09:14:00 -05:00
John Westcott IV
68926dad27 Adding team_fields
Convert job_list and inventory modules, other changes to make sanity tests pass
2020-02-26 09:14:00 -05:00
John Westcott IV
ceb6f6c47d Converted tower_settings
Changed comments
2020-02-26 09:14:00 -05:00
John Westcott IV
167e99fce9 Converted tower_user 2020-02-26 09:14:00 -05:00
John Westcott IV
c930011616 Removing default_check_mode 2020-02-26 09:14:00 -05:00
John Westcott IV
aaaca63f83 Converted tower_organization 2020-02-26 09:14:00 -05:00
John Westcott IV
d8a9f663b1 Converted tower_credential_type.py 2020-02-26 09:14:00 -05:00
John Westcott IV
b0d0ccf44f Fix fail_json and remove redundant handle check mode 2020-02-26 09:14:00 -05:00
John Westcott IV
c57754a29b Logout is now handled by exit_json 2020-02-26 09:14:00 -05:00
John Westcott IV
65057c1fb7 Auto-handle check_mode on post, patch, delete and update_if_needed methods 2020-02-26 09:14:00 -05:00
John Westcott IV
d8be6490c2 Only warn if we can't release a tower token 2020-02-26 09:14:00 -05:00
John Westcott IV
b34208d1b6 Attempting to fix logout loop 2020-02-26 09:14:00 -05:00
John Westcott IV
0d5a9e9c8c Initial implementation of Pull #5337 2020-02-26 09:14:00 -05:00
softwarefactory-project-zuul[bot]
22d4e60028 Merge pull request #6080 from ryanpetrello/django-upgrade
update Django to address a few open CVEs

Reviewed-by: https://github.com/apps/softwarefactory-project-zuul
2020-02-26 14:09:12 +00:00
Ryan Petrello
eaa766df77 update Django to address a few open CVEs
https://github.com/advisories/GHSA-hmr4-m2h5-33qx
https://github.com/advisories/GHSA-vfq6-hq5r-27r6
2020-02-26 08:18:14 -05:00
softwarefactory-project-zuul[bot]
7e5776c66f Merge pull request #6077 from Spredzy/fix_linting
awx-api-lint: Fix setup.cfg syntax for linter test

Reviewed-by: https://github.com/apps/softwarefactory-project-zuul
2020-02-26 13:10:21 +00:00
Ryan Petrello
8b1806d4ca add code for detecting (and killing) a hung task manager task 2020-02-26 07:53:04 -05:00
Yanis Guenane
07232f3694 awx-api-lint: Fix setup.cfg syntax for linter test
Signed-off-by: Yanis Guenane <yguenane@redhat.com>
2020-02-26 11:07:46 +01:00
Marliana Lara
37a33f931a Add completed jobs subtab to all resources
* Resources include: Host, InventoryHost, Inventory, SmartInventory, Template, and
WFTemplate
* Move JobList into top-level shared component directory
2020-02-25 21:50:31 -05:00
softwarefactory-project-zuul[bot]
4912cbd2da Merge pull request #6046 from rooftopcellist/update_translations_awx
add translations from memsource

Reviewed-by: https://github.com/apps/softwarefactory-project-zuul
2020-02-25 23:19:07 +00:00
softwarefactory-project-zuul[bot]
4c40819791 Merge pull request #5965 from benthomasson/improve_awx_kit_error_message
Works around bad error message when authentication fails

Reviewed-by: https://github.com/apps/softwarefactory-project-zuul
2020-02-25 23:16:58 +00:00
softwarefactory-project-zuul[bot]
a65fd497c6 Merge pull request #6060 from wenottingham/todo-or-todont
Remove the rax support specified in the linked TODO

Reviewed-by: https://github.com/apps/softwarefactory-project-zuul
2020-02-25 23:11:23 +00:00
softwarefactory-project-zuul[bot]
d824209485 Merge pull request #6063 from philipsd6/setuid-brwap-dev
Set setuid bit on bwrap in development Dockerfile

Reviewed-by: https://github.com/apps/softwarefactory-project-zuul
2020-02-25 21:07:44 +00:00
Philip Douglass
7ae1c7c3d2 Set setuid bit on bwrap in development Dockerfile
Related: #5224

Signed-off-by: Philip Douglass <philip.douglass@amadeus.com>
2020-02-25 15:20:01 -05:00
Bill Nottingham
341c6ae767 Also remove some dangling rax from awxkit and collection modules. 2020-02-25 15:18:26 -05:00
softwarefactory-project-zuul[bot]
e6a94ed0cf Merge pull request #6053 from marshmalien/5866-left-align-modal-buttons
Left align modal buttons

Reviewed-by: https://github.com/apps/softwarefactory-project-zuul
2020-02-25 20:13:07 +00:00
Bill Nottingham
3e6b6c05a6 Remove the rax support specified in the linked TODO 2020-02-25 15:03:05 -05:00
Ryan Petrello
544d4cd3b0 add translations from memsource 2020-02-25 12:24:24 -05:00
softwarefactory-project-zuul[bot]
e0df2f511e Merge pull request #5536 from jbradberry/null-options
Adjust the logic that renders fields with choices on an API Options call

Reviewed-by: https://github.com/apps/softwarefactory-project-zuul
2020-02-25 16:47:02 +00:00
Marliana Lara
255fd0a9cb Align modal buttons to the left 2020-02-25 10:12:58 -05:00
softwarefactory-project-zuul[bot]
f31adf8a85 Merge pull request #6015 from AlexSCorey/5777-JTTabOnProjectsAndTemplateListRefactor
Fixes navigation bug

Reviewed-by: https://github.com/apps/softwarefactory-project-zuul
2020-02-25 14:48:36 +00:00
softwarefactory-project-zuul[bot]
a2b169626a Merge pull request #6035 from keithjgrant/lists-to-hooks
Refactor remaining lists to hooks

Reviewed-by: https://github.com/apps/softwarefactory-project-zuul
2020-02-25 14:05:28 +00:00
Keith Grant
6ffc78bcb0 add missing i18n; fix test names 2020-02-24 14:09:56 -08:00
Keith Grant
8e9fc550f6 convert InventoryList to hooks 2020-02-24 14:09:55 -08:00
Keith Grant
779d190855 convert ProjectList to hooks 2020-02-24 14:09:28 -08:00
Keith Grant
89a4b03d45 convert TeamList to hooks 2020-02-24 14:08:51 -08:00
softwarefactory-project-zuul[bot]
ccd4cdd71a Merge pull request #6044 from fosterseth/fix-4418-failing_successful_updates
Do not fail a successful project update if inventory update fails

Reviewed-by: Seth Foster
             https://github.com/fosterseth
2020-02-24 18:12:52 +00:00
Seth Foster
31dbf38a35 Prevent failing a successful project update if inventory update fails
Scenario - job is launched and spawns inventory update and project update.
If the inventory update fails, then it will fail the job and the project update.
It will fail the project update even if that update already ran and was successful.

This code change will not fail the project update if it has already ran successfully.

In cases where other jobs depend on that project update (but not the failed inventory
update), then we don't want those jobs to fail.
2020-02-24 11:35:57 -05:00
Jeff Bradberry
d0bec97bbb Adjust the logic that renders fields with choices on an API Options call
accounting for the case where blank or null is allowed.

Refs #5099
2020-02-24 10:17:38 -05:00
softwarefactory-project-zuul[bot]
22307bba97 Merge pull request #6026 from marshmalien/5866-remove-custom-tabs-buttons
Remove action button and tab style overrides

Reviewed-by: https://github.com/apps/softwarefactory-project-zuul
2020-02-24 15:17:35 +00:00
softwarefactory-project-zuul[bot]
b4f5d44f65 Merge pull request #6040 from ryanpetrello/i18n-cancel-license
translate a missing string in the license UI

Reviewed-by: https://github.com/apps/softwarefactory-project-zuul
2020-02-24 15:02:34 +00:00
softwarefactory-project-zuul[bot]
d469870686 Merge pull request #5937 from mabashian/5857-all-schedules
Adds the All Schedules list

Reviewed-by: https://github.com/apps/softwarefactory-project-zuul
2020-02-24 14:40:06 +00:00
Alex Corey
f561bf5754 prettier 2020-02-24 09:19:02 -05:00
Ryan Petrello
2e3547d5cf translate a missing string in the license UI
see: https://github.com/ansible/tower/issues/3949
2020-02-24 09:00:07 -05:00
Alex Corey
ce8897d3e8 Fixes naviation bug by create ProjectTemplateList and ProjectTemplateListItem
Adds tests for those new files and removes uncessary test from TemplateListItem
2020-02-23 11:16:44 -05:00
Alex Corey
df77147d65 WIP 2020-02-23 10:23:43 -05:00
Alex Corey
9b11df04b3 Fixes navigation bug 2020-02-23 10:23:43 -05:00
root
58c06d5aea fixed precedence on ansible.cfg 2020-02-23 14:58:47 +00:00
softwarefactory-project-zuul[bot]
1d3bb97b07 Merge pull request #6018 from wenottingham/your-workflow-is-running--you-should-catch-it
Tweak labels for workflow pending approval message

Reviewed-by: https://github.com/apps/softwarefactory-project-zuul
2020-02-22 12:57:07 +00:00
softwarefactory-project-zuul[bot]
ba3253e2e2 Merge pull request #6033 from AlanCoding/job_batch
Make job batch size configurable, fix _by fields

Reviewed-by: https://github.com/apps/softwarefactory-project-zuul
2020-02-21 21:22:58 +00:00
mabashian
e6f0c01aa6 Schedule list now uses useRequest hooks for fetching and deleting. Also rolled a component for schedule toggles that can be used throughout the tree. 2020-02-21 16:14:22 -05:00
Marliana Lara
9310d59e0a Use PF color variables in HostStatus bar 2020-02-21 15:33:13 -05:00
Marliana Lara
f2e1e71302 Remove tab and button style overrides 2020-02-21 15:33:11 -05:00
mabashian
e6e31a9fc6 Updates after removing PF overrides to list components. 2020-02-21 15:30:09 -05:00
mabashian
801aaf9323 Adds the All Schedules list 2020-02-21 15:30:09 -05:00
softwarefactory-project-zuul[bot]
2a8679234a Merge pull request #6014 from keithjgrant/host-list-hooks
Host list hooks

Reviewed-by: https://github.com/apps/softwarefactory-project-zuul
2020-02-21 20:28:41 +00:00
softwarefactory-project-zuul[bot]
54ab671512 Merge pull request #5933 from jlmitch5/credForm
Update form layout, Formik Field use to useField, and add credential form

Reviewed-by: https://github.com/apps/softwarefactory-project-zuul
2020-02-21 20:05:31 +00:00
AlanCoding
866dd6b259 Make job batch size configurable, fix _by fields 2020-02-21 14:49:45 -05:00
Keith Grant
eba893c99b syntax fix after merge 2020-02-21 11:05:59 -08:00
John Mitchell
fd3f410cc6 fix conflict 2020-02-21 13:43:09 -05:00
Keith Grant
03aaf93cef update HostList tests for hooks 2020-02-21 10:36:17 -08:00
Keith Grant
9aef57003a use HostToggle in HostDetail; update tests 2020-02-21 10:36:17 -08:00
Keith Grant
6065eb0e65 Convert HostList to hooks
use useRequest and useDeleteItems
add HostToggle component
2020-02-21 10:31:49 -08:00
softwarefactory-project-zuul[bot]
7e4634c81f Merge pull request #6027 from AlanCoding/jt_shakeup
Randomize JT for each job batch

Reviewed-by: https://github.com/apps/softwarefactory-project-zuul
2020-02-21 17:32:55 +00:00
softwarefactory-project-zuul[bot]
a03d73776f Merge pull request #6016 from AlanCoding/collection_metadata_update
Update metadata for the AWX collection

Reviewed-by: https://github.com/apps/softwarefactory-project-zuul
2020-02-21 17:16:58 +00:00
John Mitchell
f14eb4327d remove unnecessary imports 2020-02-21 10:50:13 -05:00
John Mitchell
4ebd721cc5 remove card padding overrides to project add form 2020-02-21 10:50:13 -05:00
John Mitchell
21a92176b9 Update NodeTypeStep form to new FormLayout components 2020-02-21 10:50:13 -05:00
John Mitchell
ad04b02e24 update project edit test to make sure there is a local_path value set 2020-02-21 10:50:13 -05:00
John Mitchell
bc0511fe66 fix user form non submission error 2020-02-21 10:50:13 -05:00
John Mitchell
1accb9f939 fix warnings by correctly clearing out cred input values 2020-02-21 10:50:13 -05:00
John Mitchell
9253f16e36 update prettyier 2020-02-21 10:50:13 -05:00
John Mitchell
42387166bf update encrypted value display on detail and form views 2020-02-21 10:50:13 -05:00
John Mitchell
0b5f892193 update FormLayout styling 2020-02-21 10:50:13 -05:00
John Mitchell
1a0d36a6fd migrate FormFullWidthLayout from var field to calling forms 2020-02-21 10:50:13 -05:00
John Mitchell
cf3ed0dc88 update prettyier 2020-02-21 10:50:13 -05:00
John Mitchell
8d26d7861e add credential form and add edit routes 2020-02-21 10:50:13 -05:00
John Mitchell
8e0ad2ef6e add cred access tab and update credentials routing 2020-02-21 10:50:13 -05:00
John Mitchell
0aba4c36af move FormField label proptype check to FormField component 2020-02-21 10:50:13 -05:00
John Mitchell
44cd199078 add text area option to FormField component 2020-02-21 10:50:13 -05:00
John Mitchell
ce909093c0 update expanding container to trigger height check when any child changes 2020-02-21 10:50:13 -05:00
John Mitchell
df13a8fea9 update forms from FormRow to using FormLayout components 2020-02-21 10:50:13 -05:00
John Mitchell
ff823c9fdb update forms to useField fomik hook 2020-02-21 10:49:19 -05:00
softwarefactory-project-zuul[bot]
a42ff9865b Merge pull request #6022 from wenottingham/get-a-handle
Tweak workflow error message for clarity

Reviewed-by: https://github.com/apps/softwarefactory-project-zuul
2020-02-21 15:37:57 +00:00
softwarefactory-project-zuul[bot]
7e13f78567 Merge pull request #6017 from ansible/no-screen-screen-imports2
Add section for patterns

Reviewed-by: https://github.com/apps/softwarefactory-project-zuul
2020-02-21 15:21:49 +00:00
Bill Nottingham
e2fb83db98 Tweak workflow error message 2020-02-21 02:37:03 -05:00
AlanCoding
06eb1b6683 Randomize JT for each job batch
Populate some more fields just to have them populated

Include some missing ForeignKey links for data integrity

Add some more to fields from JT set
2020-02-20 22:49:11 -05:00
AlanCoding
d62994ec02 Update metadata for the AWX collection 2020-02-20 22:36:55 -05:00
Bill Nottingham
f20859c85f Tweak labels for workflow pending approval message 2020-02-20 22:19:34 -05:00
softwarefactory-project-zuul[bot]
b5b8adb451 Merge pull request #6025 from rooftopcellist/update_translations
UI translation strings for AWX

Reviewed-by: https://github.com/apps/softwarefactory-project-zuul
2020-02-20 23:23:18 +00:00
softwarefactory-project-zuul[bot]
70b287490b Merge pull request #5982 from marshmalien/5866-remove-datalist-alert-modal
Remove DataList component overrides

Reviewed-by: https://github.com/apps/softwarefactory-project-zuul
2020-02-20 23:17:05 +00:00
Christian Adams
0976e9e569 UI translation strings for AWX 2020-02-20 17:33:35 -05:00
Jake McDermott
83a96757db Add section for patterns 2020-02-20 15:57:08 -05:00
Marliana Lara
9013dcfea7 Move status icon into separate directory
* Remove ButtonGroup export
2020-02-20 15:29:47 -05:00
Marliana Lara
4ebc2573a3 Remove DataList component overrides 2020-02-20 15:21:34 -05:00
Marliana Lara
fe9b03a189 Move ButtonGroup into CodeMirrorInput directory 2020-02-20 15:19:10 -05:00
Marliana Lara
d2f6c367f0 Remove alert modal styles 2020-02-20 15:19:08 -05:00
Marliana Lara
34b717d00c Remove vertical separator 2020-02-20 15:16:14 -05:00
softwarefactory-project-zuul[bot]
0d31b05f98 Merge pull request #6012 from mabashian/workflow-action-ids-2
Expose id's on workflow action items

Reviewed-by: https://github.com/apps/softwarefactory-project-zuul
2020-02-20 19:28:07 +00:00
softwarefactory-project-zuul[bot]
87a0e40331 Merge pull request #5938 from keithjgrant/4239-pagination-on-delete-followup
Pagination on delete (followup)

Reviewed-by: https://github.com/apps/softwarefactory-project-zuul
2020-02-20 19:28:02 +00:00
mabashian
764c0b2e15 Fix unit test failures where we were using workflow action toooltip item id's 2020-02-20 12:57:38 -05:00
mabashian
23677b4963 Explicitly pass id through to the action item 2020-02-20 10:38:01 -05:00
mabashian
96d9d41f19 Expose id's on workflow action items 2020-02-20 10:24:37 -05:00
softwarefactory-project-zuul[bot]
a737f35653 Merge pull request #6007 from donomur/sa-annotations
Add functionality for kubernetes Service Account annotations

Reviewed-by: https://github.com/apps/softwarefactory-project-zuul
2020-02-20 13:56:05 +00:00
Donovan Murphy
ed8133be2d add Service Account annotation
Signed-off-by: Donovan Murphy <dono@dono.email>
2020-02-19 20:29:42 -06:00
softwarefactory-project-zuul[bot]
7c8c6b5333 Merge pull request #6003 from ryanpetrello/more-firehoseyness
scope counter/start/end line updates to the current job for firehose.py

Reviewed-by: https://github.com/apps/softwarefactory-project-zuul
2020-02-19 21:58:35 +00:00
Ryan Petrello
46fceb03a5 scope counter/start/end line updates to the current job for firehose.py 2020-02-19 16:15:33 -05:00
softwarefactory-project-zuul[bot]
4dee5eddeb Merge pull request #5989 from egmar/support-for-imagePullSecrets
Added support for K8S imagePullSecrets

Reviewed-by: https://github.com/apps/softwarefactory-project-zuul
2020-02-19 19:45:12 +00:00
softwarefactory-project-zuul[bot]
709482bdac Merge pull request #5980 from shanemcd/downstream_oc_changes
properly configure AWX loggers for openshift installs

Reviewed-by: https://github.com/apps/softwarefactory-project-zuul
2020-02-19 19:35:12 +00:00
softwarefactory-project-zuul[bot]
62ef1baace Merge pull request #5927 from AlanCoding/null_inventory
Handle case of deleted inventory

Reviewed-by: https://github.com/apps/softwarefactory-project-zuul
2020-02-19 19:30:07 +00:00
softwarefactory-project-zuul[bot]
1fc3d2e914 Merge pull request #5986 from donomur/fix-indent-5949
Fixing kubernetes deployment indentation

Reviewed-by: https://github.com/apps/softwarefactory-project-zuul
2020-02-19 19:12:26 +00:00
softwarefactory-project-zuul[bot]
d271a8c9fa Merge pull request #5978 from ryanpetrello/firehose-start-end-fix
fix start/end line incrementing behavior

Reviewed-by: https://github.com/apps/softwarefactory-project-zuul
2020-02-19 18:54:41 +00:00
Shane McDonald
3bd7b3b0f8 Merge pull request #5999 from wenottingham/reinventing-the-whl
Remove requirements_setup_requires and related handling.
2020-02-19 12:11:31 -05:00
softwarefactory-project-zuul[bot]
8075cda34c Merge pull request #5932 from nixocio/add_test_awxkit
Add test method to Credential and CredentialType - awxkit 

Reviewed-by: https://github.com/apps/softwarefactory-project-zuul
2020-02-19 16:22:56 +00:00
softwarefactory-project-zuul[bot]
09d6da117a Merge pull request #5979 from AlexSCorey/5814-WFJTDetailsView
Adds WFJT Details view

Reviewed-by: https://github.com/apps/softwarefactory-project-zuul
2020-02-19 16:22:51 +00:00
Alex Corey
8f6b679c85 Fixes navigation issue and address ps issues. 2020-02-19 09:40:40 -05:00
Alex Corey
32e017bd03 Adds WFJT Details view
Adds Launch Functinality for WFJT.
2020-02-19 09:36:21 -05:00
Egor Margineanu
74a31224e0 Moved imagePullSecrets into ServiceAccount definition 2020-02-19 10:45:07 +02:00
Egor Margineanu
667b27fe78 Added support for K8S imagePullSecrets 2020-02-19 10:11:34 +02:00
Bill Nottingham
4c8a4013cc Remove requirements_setup_requires and related handling.
Adjust requirements to keep docutils in awx requirements; these
are downstream packaging adjustments. Override azure wheel
dependency to a version that can install bcrypt properly.
2020-02-18 22:34:59 -05:00
Donovan Murphy
5e4d73b6a3 fix indent 2020-02-18 20:27:57 -06:00
nixocio
da486d7788 Add test method to Credential and CredentialType - awxkit
Add test method to Credential and CredentialTypei - awxkit.

The inclusion of this one was discovered when testing the following
issue. https://github.com/ansible/awx/issues/5141
2020-02-18 14:44:16 -05:00
Keith Grant
30d97e2fa8 delete commented code 2020-02-18 10:10:18 -08:00
Ryan Petrello
3a95114c3a properly configure AWX loggers for openshift installs
see: https://github.com/ansible/tower/issues/3793
2020-02-18 12:50:46 -05:00
Keith Grant
1f3ad85403 de-lint 2020-02-18 09:25:15 -08:00
Ryan Petrello
90cb02e0bf fix start/end line incrementing behavior 2020-02-18 11:31:05 -05:00
softwarefactory-project-zuul[bot]
6e2bd828a1 Merge pull request #5973 from ryanpetrello/job-firehose
add the ability to load lots of jobs with firehose.py

Reviewed-by: https://github.com/apps/softwarefactory-project-zuul
2020-02-18 16:30:12 +00:00
softwarefactory-project-zuul[bot]
fbbf5046ac Merge pull request #5949 from jdorel/patch-2
Add openshift label `app`

Reviewed-by: https://github.com/apps/softwarefactory-project-zuul
2020-02-18 16:22:09 +00:00
softwarefactory-project-zuul[bot]
47abb6f85f Merge pull request #5975 from shanemcd/sans-virtualenv
Fix downstream builds

Reviewed-by: https://github.com/apps/softwarefactory-project-zuul
2020-02-18 16:03:52 +00:00
Ryan Petrello
717698b659 properly inherit JT fields when creating many jobs with firehose.py 2020-02-18 10:34:51 -05:00
Shane McDonald
6a29a0898a Pin shellingham
Was seeing:

Collecting shellingham<2.0,>=1.1 (from poetry==0.12.17->-r tower/requirements/requirements_setup_requires.txt (line 12))
  File was already downloaded /ansible-tower/tower/requirements/vendor/shellingham-1.3.2.tar.gz
    Complete output from command python setup.py egg_info:
    Traceback (most recent call last):
      File "<string>", line 1, in <module>
      File "/usr/lib64/python3.6/tokenize.py", line 452, in open
        buffer = _builtin_open(filename, 'rb')
    FileNotFoundError: [Errno 2] No such file or directory: '/tmp/pip-build-jixdv7cx/shellingham/setup.py'
2020-02-18 09:32:24 -05:00
Shane McDonald
1833872be9 Drop virtualenv from requirements_setup_requires.txt 2020-02-18 09:17:53 -05:00
Ryan Petrello
4d06c812e6 add the ability to load lots of jobs with firehose.py
$ awx-python tools/scripts/firehose.py --jobs 5000000 --events 100000000
2020-02-18 08:55:06 -05:00
Keith Grant
3b71d2a37b convert JobList to function comp w/ hooks 2020-02-17 16:11:48 -08:00
Ben Thomasson
0c0cacb0d6 Works around bad error message when authentication fails
related #5964
2020-02-17 15:59:05 -05:00
softwarefactory-project-zuul[bot]
f57fff732e Merge pull request #5712 from mabashian/ui-next-workflows-4
UI Next workflow graph edit/results

Reviewed-by: https://github.com/apps/softwarefactory-project-zuul
2020-02-17 19:03:21 +00:00
Keith Grant
54ddeaf046 Flush out useDeleteItems hook
refactor TemplateList to use useRequest, useDeleteItems hooks
refactor CredentialList and OrganizationList to use useDeleteItems hook
2020-02-17 10:56:50 -08:00
Keith Grant
69a1a02c70 build useDeleteItems hook 2020-02-17 10:56:50 -08:00
Keith Grant
c824f0d590 create updateUrlAfterDelete util 2020-02-17 10:56:50 -08:00
Keith Grant
c336c989e7 convert CredentialList to useRequest 2020-02-17 10:56:50 -08:00
mabashian
f6523ab5a0 Adds id's to the node name and the node type letter 2020-02-17 12:41:24 -05:00
mabashian
47c783da37 Remove Workflow Visualizer text from visualizer toolbar 2020-02-17 12:41:24 -05:00
mabashian
74afc7b424 Run prettier to fix failures 2020-02-17 12:41:24 -05:00
John Hill
4ac5a1e15a Add IDs for nodes badge and template name 2020-02-17 12:41:24 -05:00
mabashian
48eeeea7f3 Disable tools/legend buttons when the workflow has no nodes and is displaying the start screen. 2020-02-17 12:41:24 -05:00
mabashian
aa6857fd38 Hide message stating that the resource has been deleted if the output node is an approval node 2020-02-17 12:41:24 -05:00
mabashian
25fe2a2ce6 Adds tests for the node type step components in the node modal 2020-02-17 12:41:24 -05:00
mabashian
3d1e3741cd Run prettier 2020-02-17 12:41:24 -05:00
mabashian
2ef57e0221 Adds id to workflow visualizer start screen button 2020-02-17 12:41:24 -05:00
mabashian
bc08c02b89 Adds ID's to toolbar(s) and action buttons within visualizer/output toolbars. 2020-02-17 12:41:24 -05:00
mabashian
50c74a2ec8 Adds test coverage for node modal components 2020-02-17 12:41:24 -05:00
mabashian
887469d73e Adds test for link add/edit/delete modals 2020-02-17 12:41:23 -05:00
mabashian
f9debb8f94 Adds tests for delete all nodes and unsaved changes modals 2020-02-17 12:41:23 -05:00
mabashian
b3929d1177 Remove HorizontalSeparator component. Patternfly now has a Divider component that we can use if we need something like this. 2020-02-17 12:41:23 -05:00
mabashian
e3cfdb74ba Adds basic unit test coverage to visualizer components (not including modals). 2020-02-17 12:41:23 -05:00
mabashian
1d0e752989 Adds unit test coverage for workflow reducer 2020-02-17 12:41:23 -05:00
mabashian
05a3bb0622 Removes unnecessary 'type' field on nodes and links 2020-02-17 12:41:23 -05:00
mabashian
bc7fd26af6 Fixes bug where navigating from one output route to another was breaking 2020-02-17 12:41:23 -05:00
mabashian
048d4dbd95 Fixes bug where viewing the workflow output, clicking a node to view the details of that job and then hitting the back button would result in an error and no output graph. 2020-02-17 12:41:23 -05:00
mabashian
c70e5357d3 Mark min/sec labels for translation 2020-02-17 12:41:23 -05:00
mabashian
7576ba2ade Swap history.push for history.replace to alleviate console warning dealing with pushing to the same path. 2020-02-17 12:41:23 -05:00
mabashian
877e630a90 Replace withRouter with useHistory hook 2020-02-17 12:41:23 -05:00
mabashian
ef854aabb7 Adds test coverage to the workflow output and workflow output graph components 2020-02-17 12:41:23 -05:00
mabashian
fc3f19bd2b Fix some styling discrepancies between Chrome and Firefox in the workflow viz/output graphs.
Cleans up deleted job/job template use cases.  Show message indicating that the ujt associated with a node has been deleted.
2020-02-17 12:41:23 -05:00
mabashian
2bbcd2d663 Move visualizer/workflow output state logic out to reducer and refactor some of the larger functions. Introduces contexts for state/dispatch that can be used by descendent components of both the visualizer and the workflow output components. 2020-02-17 12:41:23 -05:00
mabashian
a786118415 Removes reference to the node unified job template name in the view modal for now. This component is really just a placeholder and this change fixes an error that is thrown when the node's unified job template is deleted. 2020-02-17 12:41:23 -05:00
mabashian
65429e581a Properly bookend @constants alias so that it doesn't inadvertently match something we don't want it to match 2020-02-17 12:41:23 -05:00
mabashian
eb6f4dca55 Run prettier 2020-02-17 12:41:22 -05:00
mabashian
ce09c4b3cd Changes "Key" references to "Legend" 2020-02-17 12:41:22 -05:00
mabashian
c971e9d61c Turns all the workflow tool buttons in to pf button components. 2020-02-17 12:41:22 -05:00
mabashian
e34bf90ca7 Vertically center the pause icon in the node type letter component 2020-02-17 12:41:22 -05:00
mabashian
700296a558 Change Exit button text to Exit Without Saving in the unsaved changes modal. 2020-02-17 12:41:22 -05:00
mabashian
492ea0616e Moves util/workflow.jsx to components/Workflow/WorkflowUtils.jsx and updates imports 2020-02-17 12:41:22 -05:00
mabashian
eddb6e1faf Combines the two start node components into one. Removes use of document.getElementById in workflow components in favor of refs. 2020-02-17 12:41:22 -05:00
mabashian
f98b274177 Adds default search and sort columns to the four lists in the workflow node wizard 2020-02-17 12:41:22 -05:00
mabashian
662ff41fe9 Moves inline css to styled component in WorkflowOutputToolbar 2020-02-17 12:41:22 -05:00
mabashian
fd146dde30 Adds unit test coverage to some of the workflow output components. Also adds support for hovering on workflow results links to see the edge type (success/fail/always). 2020-02-17 12:41:22 -05:00
mabashian
e394d0a6f6 Add close button to workflow tools/key 2020-02-17 12:41:22 -05:00
mabashian
5a1a47b7aa Default '---' not needed due to changes to how the prop is passed to CodeMirrorInput 2020-02-17 12:41:22 -05:00
mabashian
3d5c32c354 Update function/callback props passed into workflow components to start with "on" 2020-02-17 12:41:22 -05:00
mabashian
01cc0ac8f1 Adds unit test coverage for shared workflow components 2020-02-17 12:41:22 -05:00
mabashian
5a9248e619 Prettier 2020-02-17 12:41:22 -05:00
mabashian
1d84d03566 Stabilized workflow visualizer and output point. Workflow jobs can be viewed and workflows can be built (without jt prompting). 2020-02-17 12:41:22 -05:00
mabashian
50ba4f9759 Halfway implemented node details. Still need to handle cases where the user has edited the node and cases where the node is brand new. 2020-02-17 12:41:21 -05:00
mabashian
de55af6ae6 Fully functioning workflow editor without read-only view modal and without prompting. 2020-02-17 12:41:21 -05:00
mabashian
ca478ac880 Render workflow results. Extensive refactors of workflow components in general. 2020-02-17 12:41:21 -05:00
softwarefactory-project-zuul[bot]
78ea643460 Merge pull request #5956 from chrismeyersfsu/fix-new_postgres
allow external pg connections

Reviewed-by: https://github.com/apps/softwarefactory-project-zuul
2020-02-17 15:56:25 +00:00
chris meyers
0db0f81e53 allow external pg connections
* Postgres containers now, by default, do not allow passwordless users
to connect remotely. This change explicitly allows that case.
2020-02-17 10:16:20 -05:00
Jonas DOREL
c94680eaba Add openshift label app 2020-02-15 14:49:05 +01:00
softwarefactory-project-zuul[bot]
5b4ed6dd59 Merge pull request #5923 from mabashian/4967-prompt-on-launch-checkboxes
[POC] Adds FieldWithPrompt component to handle fields that are also promptable

Reviewed-by: https://github.com/apps/softwarefactory-project-zuul
2020-02-14 18:51:07 +00:00
mabashian
4e811c744a Use FieldTooltip instead of Tooltip component. Remove promptValidate prop from FieldWithPrompt. This checkbox shouldn't ever need a custom validator function. 2020-02-14 10:56:11 -05:00
softwarefactory-project-zuul[bot]
cd6d2299a9 Merge pull request #5914 from marshmalien/5866-remove-custom-list-btns
Remove Switch, ListActionButton, and ActionButtonCell components

Reviewed-by: https://github.com/apps/softwarefactory-project-zuul
2020-02-13 21:14:31 +00:00
Marliana Lara
590199baff Remove ListActionButton component 2020-02-13 15:36:17 -05:00
Marliana Lara
3b9dd3ba8c Remove ActionButtonCell component 2020-02-13 15:25:25 -05:00
Marliana Lara
446021cf22 Remove Switch component 2020-02-13 15:25:22 -05:00
softwarefactory-project-zuul[bot]
ef3ab29649 Merge pull request #5897 from marshmalien/remove-card-style-overrides
Remove PF style overrides Pt. 1

Reviewed-by: https://github.com/apps/softwarefactory-project-zuul
2020-02-13 20:03:36 +00:00
softwarefactory-project-zuul[bot]
f4e09eee80 Merge pull request #5918 from ryanpetrello/real-event-firehose-values
set actual counter/start/end values in the event generation script

Reviewed-by: https://github.com/apps/softwarefactory-project-zuul
2020-02-13 19:47:42 +00:00
softwarefactory-project-zuul[bot]
af4e4b4064 Merge pull request #5922 from jakemcdermott/fix-4095
Use pod uid as instance uuid

Reviewed-by: https://github.com/apps/softwarefactory-project-zuul
2020-02-13 16:11:35 +00:00
mabashian
10c6297706 Adds basic unit test coverage for the FieldWithPrompt component 2020-02-13 09:42:09 -05:00
softwarefactory-project-zuul[bot]
73a9541e39 Merge pull request #5928 from anxstj/cleanup_docs
Remove docker_remove_local_images from documentation

Reviewed-by: https://github.com/apps/softwarefactory-project-zuul
2020-02-13 14:15:18 +00:00
mabashian
3a2a61af82 Adds tooltip to run type that was previously removed. Fixes unit test failures by adding ask_job_type_on_launch to mock data. 2020-02-13 09:06:35 -05:00
Stefan Jakobs
774e7fb248 Remove docker_remove_local_images from documentation
docker_remove_local_images was removed with commit 28994d4b0b (diff-c12c21a2e99296acf472dc226bc19da8)
(version 9.0.0). This PR removes it from INSTALL and inventory documentation.

Signed-off-by: Stefan Jakobs <sjakobs@anexia-it.com>
2020-02-13 14:34:45 +01:00
AlanCoding
a5e3d9558f Handle case of deleted inventory 2020-02-13 08:29:52 -05:00
softwarefactory-project-zuul[bot]
1ae86ae752 Merge pull request #5912 from keithjgrant/4239-pagination-on-delete
Adjust pagination when deleting final page of items

Reviewed-by: https://github.com/apps/softwarefactory-project-zuul
2020-02-12 21:48:16 +00:00
softwarefactory-project-zuul[bot]
1a30a0e397 Merge pull request #5921 from beeankha/fix_flake8_errors
Fix Misc. flake8 Errors

Reviewed-by: https://github.com/apps/softwarefactory-project-zuul
2020-02-12 21:36:28 +00:00
Keith Grant
490b76b141 fix TemplateList name in tests 2020-02-12 13:10:14 -08:00
Jake McDermott
3831efb3be Use pod uid as instance uuid
Inject the pod uid as an environment variable and use it for the
SYSTEM_UUID in the settings file defined by the configmap.
2020-02-12 13:52:09 -05:00
mabashian
a8fa816165 Adds FieldWithPrompt component to handle fields that are also promptable 2020-02-12 13:50:54 -05:00
beeankha
11ccfd8449 Fix misc. linting errors 2020-02-12 12:34:15 -05:00
Keith Grant
c33cc82d53 go back one page when deleting all items off last page (orgs/creds) 2020-02-12 08:53:34 -08:00
softwarefactory-project-zuul[bot]
c7516ec50e Merge pull request #5917 from ansible/jakemcdermott-order-by-start-line
Order job_event requests by `start_line`

Reviewed-by: Ryan Petrello
             https://github.com/ryanpetrello
2020-02-12 16:07:06 +00:00
Ryan Petrello
92cc597e84 set actual counter/start/end values in the event generation script 2020-02-12 10:22:31 -05:00
Jake McDermott
7402ac29a8 Order job_event requests by start_line
Ordering job_event requests by `start_line` accomplishes the same thing
as ordering by `counter`. The `start_line` field is more performant
because we have indexed it in the database.
2020-02-12 10:11:28 -05:00
softwarefactory-project-zuul[bot]
4a455c7bf7 Merge pull request #5877 from AlanCoding/control_log
Add wording for control message log

Reviewed-by: https://github.com/apps/softwarefactory-project-zuul
2020-02-11 21:41:45 +00:00
softwarefactory-project-zuul[bot]
10167eea8d Merge pull request #5894 from ryanpetrello/version-920
bump version to 9.2.0

Reviewed-by: https://github.com/apps/softwarefactory-project-zuul
2020-02-11 21:32:24 +00:00
softwarefactory-project-zuul[bot]
46ddc84d2a Merge pull request #5898 from AlanCoding/exception_detail
By default, give status code in exception representation

Reviewed-by: Alan Rominger <arominge@redhat.com>
             https://github.com/AlanCoding
2020-02-11 21:18:15 +00:00
Keith Grant
5c3fe51982 fix credential list page number after deleting 2020-02-11 12:09:13 -08:00
Ryan Petrello
b8ec3104a9 bump version to 9.2.0 2020-02-11 14:07:02 -05:00
softwarefactory-project-zuul[bot]
b098127961 Merge pull request #5876 from AlanCoding/public_400
Fix bug where setting toggle did not raise error it needed to

Reviewed-by: https://github.com/apps/softwarefactory-project-zuul
2020-02-11 18:55:16 +00:00
Keith Grant
f61af39f08 fix Template(s)List naming discrepancies 2020-02-11 10:24:08 -08:00
softwarefactory-project-zuul[bot]
1f0294d389 Merge pull request #5847 from marshmalien/4221-job-output-header
Add job event summary toolbar

Reviewed-by: https://github.com/apps/softwarefactory-project-zuul
2020-02-11 18:16:48 +00:00
Keith Grant
1ad7e663a1 fix org list page on delete 2020-02-11 10:11:27 -08:00
AlanCoding
3172176940 By default, give status code in exception representation 2020-02-11 13:08:20 -05:00
Marliana Lara
ca85020b26 Use PF Chip and ChipGroup components 2020-02-11 12:50:25 -05:00
Marliana Lara
5d2912605f Remove PF style overrides from card, breadcrumb, sidenav, tooltip, and modal components 2020-02-11 12:50:12 -05:00
softwarefactory-project-zuul[bot]
b38ec3599b Merge pull request #5887 from ryanpetrello/webhook-cred-summary-field
include credential type details in the webhook credential summary field

Reviewed-by: Jeff Bradberry
             https://github.com/jbradberry
2020-02-11 16:37:57 +00:00
Ryan Petrello
487343a022 include credential type details in the webhook credential summary field
see: https://github.com/ansible/awx/issues/5882
2020-02-11 10:51:07 -05:00
Marliana Lara
69049a4427 Convert elapsed days into hours and add unit test 2020-02-11 10:19:23 -05:00
softwarefactory-project-zuul[bot]
be6b42561f Merge pull request #5848 from marshmalien/5785-form-field-tooltip
Max host form field tooltip should not enable field

Reviewed-by: https://github.com/apps/softwarefactory-project-zuul
2020-02-11 15:12:41 +00:00
AlanCoding
e59cb07064 Add wording for control message log 2020-02-11 10:01:25 -05:00
AlanCoding
0234df055d Raise errors with public galaxy setting early 2020-02-11 09:46:07 -05:00
softwarefactory-project-zuul[bot]
b54c036398 Merge pull request #5875 from AlanCoding/computed_fields_param
Remove argument no longer accepted by computed fields task

Reviewed-by: https://github.com/apps/softwarefactory-project-zuul
2020-02-11 14:32:27 +00:00
softwarefactory-project-zuul[bot]
eafd40291e Merge pull request #5874 from chrismeyersfsu/fix-pin_virtualenv_awx
pin virtualenv < 20 for awx_web builds

Reviewed-by: https://github.com/apps/softwarefactory-project-zuul
2020-02-11 14:16:14 +00:00
AlanCoding
519956f779 Remove argument no longer accepted by computed fields task 2020-02-11 08:55:56 -05:00
chris meyers
0b3e2cc7e3 pin virtualenv < 20 for awx_web builds 2020-02-11 08:43:26 -05:00
softwarefactory-project-zuul[bot]
efa9c84806 Merge pull request #5870 from mabashian/4227-jobs-list-sort
Adds missing job list search fields

Reviewed-by: https://github.com/apps/softwarefactory-project-zuul
2020-02-11 03:06:13 +00:00
softwarefactory-project-zuul[bot]
5ed623d682 Merge pull request #5842 from keithjgrant/4240-form-error-handling
Form error handling

Reviewed-by: https://github.com/apps/softwarefactory-project-zuul
2020-02-11 01:16:47 +00:00
Keith Grant
8f77d15a31 fix errors being logged during tests; de-lint 2020-02-10 16:16:38 -08:00
Keith Grant
d06d4d5a8c update tests for form submit errors 2020-02-10 16:16:38 -08:00
Keith Grant
352c8c3cb1 add FormSubmitError to existing forms 2020-02-10 16:16:38 -08:00
Keith Grant
94f21a3464 add submit error support to Project form 2020-02-10 16:16:38 -08:00
Keith Grant
ac376f9c87 handle __all__ error message from server in FormSubmitError 2020-02-10 16:16:38 -08:00
Keith Grant
44e4263bee add FormActionGroup error message test 2020-02-10 16:16:38 -08:00
Keith Grant
b7f3852ef9 move FormSubmitError to inline beside form buttons; add tests 2020-02-10 16:16:38 -08:00
Keith Grant
a934e146ee add FormSubmitError component 2020-02-10 16:16:38 -08:00
Keith Grant
cab25656eb add JT form error feedback from API errors 2020-02-10 16:16:38 -08:00
Keith Grant
0f9c906a22 add more robust handling of errors thrown by api 2020-02-10 16:16:38 -08:00
Keith Grant
b8226109a7 use optional chaining 2020-02-10 16:16:38 -08:00
Keith Grant
b26de8b922 pass Host form API errors back into Formik for display 2020-02-10 16:16:38 -08:00
Keith Grant
67d8c1a4b5 delete unused/redundant component 2020-02-10 16:16:38 -08:00
softwarefactory-project-zuul[bot]
0ef7ef22eb Merge pull request #5838 from jainnikhil30/fix_slicejob_relaunch
fix the sliced job relaunch

Reviewed-by: https://github.com/apps/softwarefactory-project-zuul
2020-02-10 23:40:06 +00:00
softwarefactory-project-zuul[bot]
47383e05d6 Merge pull request #5837 from ryanpetrello/celery-tastes-gross
get rid of celerybeat (and celery + billiard dependency)

Reviewed-by: https://github.com/apps/softwarefactory-project-zuul
2020-02-10 23:26:20 +00:00
softwarefactory-project-zuul[bot]
3dd97feaa6 Merge pull request #5869 from mabashian/4220-templates-list-sort
Add missing template list sort fields

Reviewed-by: https://github.com/apps/softwarefactory-project-zuul
2020-02-10 23:06:06 +00:00
Nikhil Jain
e530adde67 fix the sliced job relaunch 2020-02-10 17:35:50 -05:00
Ryan Petrello
38a08d163c get rid of celery/celerybeat
alternative to https://github.com/ansible/awx/pull/2530 which makes use
of https://pypi.org/project/schedule/

this doesn't have support for any persistence (like how celery beat uses
a shelve file), because all of our periodic jobs run at most every few
minutes
2020-02-10 17:32:02 -05:00
softwarefactory-project-zuul[bot]
7b4adfcc15 Merge pull request #5868 from ryanpetrello/python-packaging-is-a-disaster
pin virtualenv < 20 for ansible venv builds

Reviewed-by: Ryan Petrello
             https://github.com/ryanpetrello
2020-02-10 22:31:26 +00:00
mabashian
5d6e1284e3 Adds missing job list search fields 2020-02-10 17:00:44 -05:00
mabashian
a0ba125ea9 Brings template list sort fields into parity with the existing UI 2020-02-10 16:34:37 -05:00
Ryan Petrello
ad5d0b92db pin virtualenv < 20 for ansible venv builds
virtualenv version 20 just got released and broken a bunch of stuff
(like the --system-site-packages flag)
2020-02-10 16:31:34 -05:00
Marliana Lara
debbac5c78 Use date object to format elapsed time 2020-02-10 14:25:34 -05:00
softwarefactory-project-zuul[bot]
f4f4a7caec Merge pull request #5851 from xelgand/devel
k8s installer: fix warning when applying deployment

Reviewed-by: https://github.com/apps/softwarefactory-project-zuul
2020-02-10 18:20:36 +00:00
Marliana Lara
b00249b515 Add job event summary toolbar 2020-02-10 12:47:23 -05:00
softwarefactory-project-zuul[bot]
cd49213924 Merge pull request #5846 from wenottingham/proxy-env-y
Use AWX_TASK_ENV when connecting to Red Hat services

Reviewed-by: https://github.com/apps/softwarefactory-project-zuul
2020-02-10 16:31:19 +00:00
softwarefactory-project-zuul[bot]
9a47a28b80 Merge pull request #5850 from ryanpetrello/changelogly
update the changelog in anticipation of a forthcoming AWX release

Reviewed-by: Seth Foster
             https://github.com/fosterseth
2020-02-10 15:31:12 +00:00
xelgand
7b9ad1d69a k8s installer: fix warning when applying deployment 2020-02-09 16:35:10 +01:00
softwarefactory-project-zuul[bot]
6df00e1e4c Merge pull request #5776 from bhundven/5371-Upgrade_to_helm_3_x
Make AWX compatible with Helm 3.x

Reviewed-by: https://github.com/apps/softwarefactory-project-zuul
2020-02-09 14:13:11 +00:00
Bryan Hundven
7d2ed7b763 Bump stable/postgresql to 8.3.0
https://hub.helm.sh/charts/stable/postgresql/8.3.0

Signed-off-by: Bryan Hundven <bryanhundven@gmail.com>
2020-02-07 15:07:44 -08:00
Ryan Petrello
b08e5db267 update the changelog in anticipation of a forthcoming AWX release 2020-02-07 17:12:38 -05:00
softwarefactory-project-zuul[bot]
8991396d23 Merge pull request #5828 from AlanCoding/openstack_bump
Bump openstacksdk version

Reviewed-by: https://github.com/apps/softwarefactory-project-zuul
2020-02-07 20:09:59 +00:00
Bryan Hundven
76a6f84c70 Remove tempfile after running helm
Signed-off-by: Bryan Hundven <bryanhundven@gmail.com>
2020-02-07 12:04:11 -08:00
Bryan Hundven
a984e5df7a Have helm stable repo before running help repo update
It would be nice if the `helm` ansible module allowed you to just manage
helm repos, or maybe a `helm_repo` module... but shell with it ;)

Signed-off-by: Bryan Hundven <bryanhundven@gmail.com>
2020-02-07 11:58:18 -08:00
Bryan Hundven
282d705c43 Remove tiller_namespace from default inventory
Signed-off-by: Bryan Hundven <bryanhundven@gmail.com>
2020-02-07 11:57:34 -08:00
Marliana Lara
43e1b4a7db Max host form field tooltip should not enable field 2020-02-07 14:30:33 -05:00
Bill Nottingham
71ef7cdec1 Use AWX_TASK_ENV when connecting to Red Hat services 2020-02-07 14:29:42 -05:00
softwarefactory-project-zuul[bot]
5decde3f70 Merge pull request #5845 from shanemcd/missing-base-image-deps
Add packages missing from base images

Reviewed-by: https://github.com/apps/softwarefactory-project-zuul
2020-02-07 18:54:46 +00:00
Shane McDonald
3f57061509 Add packages missing from base images
Related:

- https://github.com/ansible/awx/issues/5770
- https://github.com/ansible/awx/issues/5724
2020-02-07 13:06:42 -05:00
softwarefactory-project-zuul[bot]
6395d64681 Merge pull request #5843 from wenottingham/intentionally-adding-database-queries
Bypass memcached to get last gather time to avoid reading cached values.

Reviewed-by: https://github.com/apps/softwarefactory-project-zuul
2020-02-07 13:57:54 +00:00
Bill Nottingham
f3e2caeaa7 Bypass memcached to get last gather time to avoid reading cached values. 2020-02-06 21:41:41 -05:00
softwarefactory-project-zuul[bot]
ce5c4359ee Merge pull request #5787 from fosterseth/tm_processed_field
Improve task manager performance for task dependencies

Reviewed-by: https://github.com/apps/softwarefactory-project-zuul
2020-02-06 21:30:48 +00:00
softwarefactory-project-zuul[bot]
c4ddf50cad Merge pull request #5832 from marshmalien/output-status-bar
Add host status bar

Reviewed-by: https://github.com/apps/softwarefactory-project-zuul
2020-02-06 21:22:23 +00:00
Marliana Lara
d250dd0cd6 Adjust ansi colors to complement the host status bar 2020-02-06 14:12:49 -05:00
softwarefactory-project-zuul[bot]
96bbbdd5c9 Merge pull request #5836 from ryanpetrello/migrate-error
fix scary error message on initial (install time) awx-manage migrate

Reviewed-by: https://github.com/apps/softwarefactory-project-zuul
2020-02-06 17:31:40 +00:00
Seth Foster
9b4b2167b3 TaskManager process dependencies only once
This adds a boolean "dependencies_processed" field to UnifiedJob
model. The default value is False. Once the task manager generates
dependencies for this task, it will not generate them again on
subsequent runs.

The changes also remove .process_dependencies(), as this method repeats
the same code as .process_pending_tasks(), and is not needed. Once
dependencies are generated, they are handled at .process_pending_tasks().

Adds a unit test that should catch regressions for this fix.
2020-02-06 11:47:33 -05:00
Marliana Lara
028a0a9279 Adjust host status colors 2020-02-06 11:21:14 -05:00
softwarefactory-project-zuul[bot]
30354dbcd0 Merge pull request #5621 from rebeccahhh/workflow-convergence
Any/All boolean added in workflow convergence

Reviewed-by: https://github.com/apps/softwarefactory-project-zuul
2020-02-06 15:30:40 +00:00
Ryan Petrello
543a87ac88 fix error message on initial awx-manage migrate 2020-02-06 06:37:05 -05:00
Marliana Lara
4be7cf66ec Add host status bar 2020-02-05 19:57:55 -05:00
softwarefactory-project-zuul[bot]
fd027f87a9 Merge pull request #5810 from keithjgrant/use-endpoint
Add useRequest hook

Reviewed-by: https://github.com/apps/softwarefactory-project-zuul
2020-02-05 23:26:42 +00:00
softwarefactory-project-zuul[bot]
dac6e115c1 Merge pull request #5829 from marshmalien/job-output-navigation
Style job output pagination control bar

Reviewed-by: https://github.com/apps/softwarefactory-project-zuul
2020-02-05 23:17:16 +00:00
Rebeccah
eca516f8ce removed 'self' from positional arguemnt of workflow attribute 'all_parents_must_converge' per Alans suggestion, since django takes it to be verbose_name, which is not needed 2020-02-05 17:38:36 -05:00
Jake McDermott
b06645e125 Show any/all convergence option on root node 2020-02-05 15:51:38 -05:00
Rebeccah
fd60cd1a35 fixed copy functionality to include convergence as is instead of reseting ALL nodes to be default ANY nodes 2020-02-05 15:34:54 -05:00
Rebeccah
ad8bcd0de2 moved migration dependency from 0106 to 0107 2020-02-05 14:28:35 -05:00
Rebeccah
fdc29eebb7 expanded unit test to include 3 root nodes meeting on a convergence node and successfully marking that to node. 2020-02-05 14:28:35 -05:00
Rebeccah
63ae2cac38 Jake McDermott found some behavior that revealed a logical bug that would have caused issues later with ALL convergence nodes in sequential order via the API, although not the UI, and was causing existing issues with Root Nodes spawning repeatedly. To fix this I refactored the code from marking DNR nodes into it's own function that checks parents convergence criteria and leveraged that in bfs_nodes_to_run so that root nodes and convergence nodes can be differentiated but both can be correctly processed, also so that children of convergence nodes can be properly traversed by the function 2020-02-05 14:28:35 -05:00
Rebeccah
4e787cc079 made marking nodes as DNR more 'eager', added more unit tests, and added convergence check to bfs_nodes_to_run with new changes to the eagerness of DNR marking since it needs it to prevent convergence nodes from running too quickly 2020-02-05 14:28:35 -05:00
Apurva Bakshi
2de37ce5df add convergence attribute to awxkit 2020-02-05 14:28:35 -05:00
Rebeccah
a419547731 redid some formatting and syntax per personal preferences, comments on PR, and suggestions from @jrb 2020-02-05 14:28:35 -05:00
Jake McDermott
04844aa44f Add 'ALL' indicator to 'ALL' nodes 2020-02-05 14:28:35 -05:00
Rebeccah
1b3fbee38d workflow convergence toggle migration 2020-02-05 14:28:35 -05:00
Rebeccah
6d2a2ab714 drastically improved performance by removing unnecessary iteration over children of parent nodes, additionally added an extra check that the node didn't already have a job so that it wasn't cycling over nodes that had already run when running through all_nodes 2020-02-05 14:28:35 -05:00
Rebeccah
82dd4a3884 remove node_object comparison and use the full dict to eliminate issues comparing obj and compare instead the whole node object with the node objects in the list 2020-02-05 14:28:35 -05:00
Jake McDermott
4fe9e5da14 Use select for any/all convergence choice 2020-02-05 14:28:35 -05:00
Jake McDermott
bbb4701fa9 Don't show any/all convergence option on root node 2020-02-05 14:28:35 -05:00
Rebeccah
86a39938fe fixed issue where successful convergence wasn't being met due to the not quite correct leveraging of get_children 2020-02-05 14:28:35 -05:00
Jake McDermott
987fc26537 Add any/all option for workflow node convergence 2020-02-05 14:28:34 -05:00
Rebeccah
70cf4cf5d4 added in handling for a parent being DNR so status is only checked if the parent isn't a DNR parent (in which case the parent has no status, which was breaking the logic) also edited a comment and added in a DNR check that @alancoding suggested to cut out duplicates in the DAG list 2020-02-05 14:28:34 -05:00
Rebeccah
2d3172f648 added in support for existing workflow unit tests 2020-02-05 14:28:34 -05:00
Rebeccah
b2c33e3204 redid migration dependency 2020-02-05 14:28:34 -05:00
Rebeccah
f7f648b956 included all_parents_must_converge in the get_workflow_job_fieldnames so that the true/false is copied into the job node and not just in the template node. Also added in the migration for the DB, also relocated logic from bfs_nodes_to_run down into mark_dnr_nodes to prevent nodes not being marked as DNR but not being marked to run, causing them to run anyways 2020-02-05 14:28:34 -05:00
Rebeccah
780f104ab2 shifted from dependants/dependencies to children/parents for clarity in function names, also added in toggle logic 2020-02-05 14:28:34 -05:00
Rebeccah
4c35adad6c added logic to include workflow convergence nodes to nodes to run or not run based on their parents successful statuses 2020-02-05 14:28:34 -05:00
Rebeccah
cf24c81b3e updated syntax from python2 to 3 2020-02-05 14:28:34 -05:00
Marliana Lara
6d792a8234 Style job output navigation control bar 2020-02-05 12:33:30 -05:00
AlanCoding
1558c6f942 Bump openstacksdk version 2020-02-05 11:31:24 -05:00
softwarefactory-project-zuul[bot]
2f75b48c63 Merge pull request #5423 from AlanCoding/pycurl
Add pycurl to container images

Reviewed-by: https://github.com/apps/softwarefactory-project-zuul
2020-02-05 16:21:25 +00:00
softwarefactory-project-zuul[bot]
979418620c Merge pull request #5823 from chrismeyersfsu/fix-instance_migration2
fix instance migration is_isolated() issue

Reviewed-by: https://github.com/apps/softwarefactory-project-zuul
2020-02-05 14:52:18 +00:00
softwarefactory-project-zuul[bot]
482e0ac311 Merge pull request #5768 from AlanCoding/fewer_computed_fields
Remove computed fields artifacts no longer valid

Reviewed-by: https://github.com/apps/softwarefactory-project-zuul
2020-02-05 14:25:10 +00:00
chris meyers
a36bf4af64 fix instance migration is_isolated() issue
* Older versions of Instance model code may not contain the
is_isolated() method. This change accounts for that fact.
2020-02-05 09:16:31 -05:00
AlanCoding
3bbce18173 Remove computed fields artifacts no longer used
Remove deleted field from notification payload
2020-02-04 20:23:37 -05:00
softwarefactory-project-zuul[bot]
e54fd19bca Merge pull request #5817 from chrismeyersfsu/fix-instance_migration
use existing version of Instance

Reviewed-by: https://github.com/apps/softwarefactory-project-zuul
2020-02-05 00:00:06 +00:00
AlanCoding
d2289fe9c6 add pycurl to container images 2020-02-04 14:41:51 -05:00
Bryan Hundven
1c50b8427a Put postgresql values in a tempfile, to be loaded by helm cli
Helm 3.x does not support passing values via stdin:
https://github.com/helm/helm/issues/7002

So setup a tempfile and write the template to the tempfile to be loaded
by helm ... --values <tempfile>

Signed-off-by: Bryan Hundven <bryanhundven@gmail.com>
2020-02-04 09:26:49 -08:00
Bryan Hundven
34d01f02cc Upgrade stable/postgresql to helm chart to 8.1.5
This updated chart supports Helm 3.x

Signed-off-by: Bryan Hundven <bryanhundven@gmail.com>
2020-02-04 09:26:49 -08:00
Bryan Hundven
d182c96c2e Make AWX compatible with Helm 3.x
In issue #5371, AWX has issues with using Helm 3.x.
This commit removes the usage tiller.

Signed-off-by: Bryan Hundven <bryanhundven@gmail.com>
2020-02-04 09:26:49 -08:00
softwarefactory-project-zuul[bot]
e59f3982ae Merge pull request #5796 from rascasoft/devel
Make possible to not start containers on compose

Reviewed-by: https://github.com/apps/softwarefactory-project-zuul
2020-02-04 13:18:30 +00:00
softwarefactory-project-zuul[bot]
5435c6ec73 Merge pull request #5806 from AlexSCorey/5777-JTTabOnProjectsAndTemplateListRefactor
5777 Projects JobTemplateList and template list refactor

Reviewed-by: Alex Corey <Alex.swansboro@gmail.com>
             https://github.com/AlexSCorey
2020-02-04 03:11:42 +00:00
Alex Corey
5f96aee871 Fixes spelling error 2020-02-03 20:09:49 -05:00
Keith Grant
eceeeea22d remove unneeded default value 2020-02-03 12:55:53 -08:00
softwarefactory-project-zuul[bot]
a1a864b27b Merge pull request #5804 from ryanpetrello/iso-healthcheck-failure-capacity
properly handle import errors in the isolated capacity healthcheck

Reviewed-by: https://github.com/apps/softwarefactory-project-zuul
2020-02-03 20:37:18 +00:00
chris meyers
0291c476d4 use existing version of Instance
* Without this change, future modifications to the Instance object may
result in migration fails (i.e. adding a field to the Instance model)
2020-02-03 14:25:06 -05:00
Keith Grant
638e8c7add delete dead code/comments & add useRequest docstring 2020-02-03 09:43:06 -08:00
softwarefactory-project-zuul[bot]
6389ec50a1 Merge pull request #5812 from ryanpetrello/busted-project-update-events
fix broken project update secret filtering for external logging

Reviewed-by: https://github.com/apps/softwarefactory-project-zuul
2020-02-03 17:37:01 +00:00
softwarefactory-project-zuul[bot]
ad53f4f5f6 Merge pull request #5815 from ryanpetrello/fix-cli-settings-py2
fix a py2/py3 compat bug in the settings CLI

Reviewed-by: https://github.com/apps/softwarefactory-project-zuul
2020-02-03 17:24:33 +00:00
softwarefactory-project-zuul[bot]
9718aa711f Merge pull request #5808 from marshmalien/pf-upgrade-react-core-130
Upgrade PatternFly dependencies

Reviewed-by: https://github.com/apps/softwarefactory-project-zuul
2020-02-03 17:24:29 +00:00
softwarefactory-project-zuul[bot]
cacd2c3392 Merge pull request #5805 from fantashley/support-proxy-venv
Add proxy support in custom venv container fixes #5756

Reviewed-by: https://github.com/apps/softwarefactory-project-zuul
2020-02-03 16:40:41 +00:00
Ryan Petrello
1800b49822 fix a py2/py3 compat bug in the settings CLI 2020-02-03 11:37:39 -05:00
Raoul Scarazzini
1e97bb71db Make possible to not start containers on compose
When upgrading from releases it could happen that you need to do some
manual steps (i.e. upgrading from postgres 9.6 to 10). In these cases
you'd want to check the docker-compose.yml and then launch it by
yourself.
Today we don't have any method to get just the files that will be used
while installing via compose, without starting the containers. This
commit adds a variable named "compose_start_containers" (true by
default) that, if false, will make the playbook just generate the files
in the compose directory and not start the containers.
2020-02-03 16:46:52 +01:00
Ryan Petrello
7055460c4c fix broken project update secret filtering for external logging 2020-02-03 10:27:31 -05:00
softwarefactory-project-zuul[bot]
864767d74a Merge pull request #5809 from AlexSCorey/5799-TeamEditUpdate
Fixes update failure on TeamEdit

Reviewed-by: https://github.com/apps/softwarefactory-project-zuul
2020-02-03 14:43:49 +00:00
Alex Corey
5170948241 Fix error naming issue 2020-01-31 15:03:13 -05:00
Keith Grant
370a7f9b25 move useRequest to util folder, add tests 2020-01-31 11:39:23 -08:00
Alex Corey
1368835a29 Fixes update failure on TeamEdit 2020-01-31 14:07:24 -05:00
Marliana Lara
48fa5bb2cd Upgrade PatternFly dependencies 2020-01-31 12:10:06 -05:00
Alex Corey
25105d813d Refactors TemplateLiost into a functional component 2020-01-31 12:09:38 -05:00
Alex Corey
bbea43b1fe Addresses needed styling changes to Card and Page Section
These changes were necessary to remove an additional page section
to ProjectJobTemplateList.
2020-01-31 11:28:33 -05:00
Alex Corey
5790aa9780 Adds TemplateList of Project 2020-01-31 11:20:53 -05:00
Ashley Nelson
bc97d11270 Add support for no_proxy 2020-01-31 10:05:32 -06:00
Ryan Petrello
326ed22efe properly handle import errors in the isolated capacity healthcheck
if the awx_capacity module runs on an isolated node with missing
libraries (i.e., psutil) or bad permissions, then the runner status will
be "failed"

in this scenario, we *still* want to react by recording a capacity=0
2020-01-31 10:17:20 -05:00
softwarefactory-project-zuul[bot]
b942411dcc Merge pull request #5598 from jainnikhil30/sat6_want_ssh_host
add ability to read ansible_want_ssh_host, rich_params and want_facts to source vars for satellite dynamic inventory

Reviewed-by: https://github.com/apps/softwarefactory-project-zuul
2020-01-31 13:51:20 +00:00
Nikhil Jain
374c17751f add ansible_want_ssh_host, rich_params and want_facts to be read from source vars 2020-01-31 08:18:01 -05:00
Keith Grant
ef2fa26126 rename useFetch to useRequest 2020-01-30 16:13:19 -08:00
softwarefactory-project-zuul[bot]
b611164422 Merge pull request #5801 from ryanpetrello/tower-url-base-ctint
when a license is installed, only set TOWER_URL_BASE if necessary

Reviewed-by: https://github.com/apps/softwarefactory-project-zuul
2020-01-30 21:55:19 +00:00
Ryan Petrello
c7c899375b when a license is installed, only set TOWER_URL_BASE if necessary
it's possible for users to set this manually in /etc/tower/conf.d
prior to license application
2020-01-30 16:23:25 -05:00
Ashley Nelson
ab3a728032 Add custom venv support for proxies 2020-01-29 18:08:38 -06:00
Keith Grant
aaf371ee23 add useFetch demo 2020-01-29 12:04:52 -08:00
softwarefactory-project-zuul[bot]
d6c70e8d3a Merge pull request #5793 from squidboylan/fix_py3_k_v_config
Open collection config 'r' for py3 compatibility

Reviewed-by: https://github.com/apps/softwarefactory-project-zuul
2020-01-29 19:33:09 +00:00
Caleb Boylan
79e65e3e84 Open collection config 'r' for py3 compatibility 2020-01-29 08:17:07 -08:00
softwarefactory-project-zuul[bot]
42c45367a0 Merge pull request #5585 from AlanCoding/blank_galaxy
Do not allow state where no Galaxy servers are enabled

Reviewed-by: https://github.com/apps/softwarefactory-project-zuul
2020-01-28 21:53:12 +00:00
AlanCoding
d759aff4e9 Do not allow state where no Galaxy servers are enabled 2020-01-28 16:01:55 -05:00
softwarefactory-project-zuul[bot]
6b63f0ac9e Merge pull request #5788 from marshmalien/5693-inv-host-sparkline
Fix host details and list sparkline links

Reviewed-by: https://github.com/apps/softwarefactory-project-zuul
2020-01-28 20:53:26 +00:00
softwarefactory-project-zuul[bot]
2df6eab472 Merge pull request #5786 from marshmalien/4951-org-team-links
Fix organization team links

Reviewed-by: https://github.com/apps/softwarefactory-project-zuul
2020-01-28 20:26:04 +00:00
Marliana Lara
1c7afb66f7 Fix host job sparkline links 2020-01-28 15:09:01 -05:00
softwarefactory-project-zuul[bot]
1fbb714cbc Merge pull request #5784 from ryanpetrello/issue/5528
Fix to handle Str and JSON mix-in data correctly with settings API

Reviewed-by: https://github.com/apps/softwarefactory-project-zuul
2020-01-28 17:35:40 +00:00
Marliana Lara
de75592f2a Refactor Teams and Team components 2020-01-28 12:26:40 -05:00
Marliana Lara
9cb7b0902a Fix org team link url 2020-01-28 12:26:21 -05:00
Hideki Saito
437d9843d1 Fix to handle Str and JSON mix-in data correctly with settings API
- Fixed issue #5528

Signed-off-by: Hideki Saito <saito@fgrep.org>
2020-01-28 11:51:35 -05:00
softwarefactory-project-zuul[bot]
490492e505 Merge pull request #5782 from Spredzy/top_limit_wheel
requirements_setup_requires: Top limit wheel to less than 42.0.0

Reviewed-by: https://github.com/apps/softwarefactory-project-zuul
2020-01-28 15:18:05 +00:00
softwarefactory-project-zuul[bot]
3dd8e490c6 Merge pull request #5781 from AlanCoding/settings_stack
Reduce logging complexity with setting DB errors

Reviewed-by: https://github.com/apps/softwarefactory-project-zuul
2020-01-28 13:18:06 +00:00
Yanis Guenane
75c9702caa requirements_setup_requires: Top limit wheel to less than 42.0.0
Signed-off-by: Yanis Guenane <yguenane@redhat.com>
2020-01-28 13:24:31 +01:00
softwarefactory-project-zuul[bot]
accf000bdf Merge pull request #5779 from ryanpetrello/callback-fine-tuning
some more minor callback cleanup and development tweaks

Reviewed-by: https://github.com/apps/softwarefactory-project-zuul
2020-01-28 03:57:06 +00:00
AlanCoding
a94b30be9f Reduce logging complexity with setting DB errors 2020-01-27 18:08:09 -05:00
Ryan Petrello
3c31e0ed16 some more minor callback cleanup and development tweaks 2020-01-27 17:18:09 -05:00
softwarefactory-project-zuul[bot]
7d74999851 Merge pull request #5772 from AlexSCorey/4515-MultiSelectGenerateLabels
JT Form Generate Labels

Reviewed-by: https://github.com/apps/softwarefactory-project-zuul
2020-01-27 21:19:09 +00:00
softwarefactory-project-zuul[bot]
b7ca369356 Merge pull request #5742 from marshmalien/babel-upgrade
Upgrade babel in ui_next

Reviewed-by: https://github.com/apps/softwarefactory-project-zuul
2020-01-27 19:56:18 +00:00
Keith Grant
d15f7b76fa add useEndpoint hook 2020-01-27 10:20:47 -08:00
softwarefactory-project-zuul[bot]
4e4a535178 Merge pull request #5773 from ryanpetrello/callback-profiling
add the ability to enable profiling for the callback receiver workers

Reviewed-by: https://github.com/apps/softwarefactory-project-zuul
2020-01-27 17:44:14 +00:00
Ryan Petrello
78b00652bd add the ability to enable profiling for the callback receiver workers 2020-01-27 12:03:53 -05:00
softwarefactory-project-zuul[bot]
473ab7c01c Merge pull request #5754 from marshmalien/delete-btn-user-team-details
Add delete button to User and Team details

Reviewed-by: https://github.com/apps/softwarefactory-project-zuul
2020-01-27 15:15:44 +00:00
softwarefactory-project-zuul[bot]
ae82ba53e7 Merge pull request #5752 from marshmalien/details-delete-job-template
Add delete button to Job Template details

Reviewed-by: https://github.com/apps/softwarefactory-project-zuul
2020-01-27 15:08:54 +00:00
Alex Corey
d69174b1a6 Removes unnecessary and dead code.
Generate Label was not being called so I removed it in favor of associate
label.  Plus: less code in JT Add and JT Edit and can remove a promise.
Minus: Now when we generate/associate a label we always send a long the orgId.
OrgId is not necessary when associating a label.
2020-01-27 09:50:49 -05:00
Alex Corey
570f549cf4 Allows user to generate a label on the JTForm 2020-01-27 09:27:27 -05:00
softwarefactory-project-zuul[bot]
55e720e25d Merge pull request #5762 from ryanpetrello/even-more-callback-optimization
remove another expensive logging lookup in the parent callback process

Reviewed-by: Ryan Petrello
             https://github.com/ryanpetrello
2020-01-24 22:27:08 +00:00
Ryan Petrello
8f33f1a6c2 remove another expensive logging lookup in the parent callback process 2020-01-24 16:46:32 -05:00
softwarefactory-project-zuul[bot]
7be924d155 Merge pull request #5751 from keithjgrant/5502-inventory-group-refresh
Inventory details refresh

Reviewed-by: https://github.com/apps/softwarefactory-project-zuul
2020-01-24 19:24:25 +00:00
softwarefactory-project-zuul[bot]
65f226960f Merge pull request #5750 from gamuniz/license_errors_begone
made licensing a warning and not trigger on periodic scheduler

Reviewed-by: https://github.com/apps/softwarefactory-project-zuul
2020-01-24 14:49:55 +00:00
Marliana Lara
84f056294d Fix unit test error in HostAdd 2020-01-23 17:54:06 -05:00
Marliana Lara
b906f8d757 Add delete button to user details 2020-01-23 17:53:34 -05:00
Marliana Lara
2fae523fd4 Add delete button to team details 2020-01-23 17:53:08 -05:00
softwarefactory-project-zuul[bot]
4d519155bc Merge pull request #5753 from wenottingham/a-good-help-message-is-hard-to-find
Fix help for new analytics gather interval

Reviewed-by: https://github.com/apps/softwarefactory-project-zuul
2020-01-23 22:20:56 +00:00
Bill Nottingham
ea8a91893a Fix help for new analytics gather interval 2020-01-23 16:26:15 -05:00
softwarefactory-project-zuul[bot]
145476c7d9 Merge pull request #5748 from marshmalien/delete-org-proj-details
Add delete button to Organization and Project Details

Reviewed-by: https://github.com/apps/softwarefactory-project-zuul
2020-01-23 20:40:36 +00:00
softwarefactory-project-zuul[bot]
c6595786f5 Merge pull request #5734 from marshmalien/5264-inv-host-edit-form
Add inventory host edit form

Reviewed-by: https://github.com/apps/softwarefactory-project-zuul
2020-01-23 20:10:36 +00:00
Keith Grant
c6159a7c3e add more VariablesDetail tests 2020-01-23 11:15:33 -08:00
softwarefactory-project-zuul[bot]
52638c709a Merge pull request #5749 from ryanpetrello/search-distinct
fix a bug that causes __search filters to not properly call .distinct()

Reviewed-by: https://github.com/apps/softwarefactory-project-zuul
2020-01-23 19:09:52 +00:00
Gabe Muniz
a264b1db1f made licensing a warning and not trigger on periodic scheduler 2020-01-23 14:08:23 -05:00
Keith Grant
49907e337a prevent inventory updates after unmount 2020-01-23 11:02:29 -08:00
Marliana Lara
afc1f85668 Update job template detail unit tests 2020-01-23 13:59:20 -05:00
Marliana Lara
6efa751157 Add DeleteButton component to job template details 2020-01-23 13:58:49 -05:00
Marliana Lara
10131432b5 Refactor job template detail into functional component 2020-01-23 13:52:29 -05:00
softwarefactory-project-zuul[bot]
0d365068ff Merge pull request #5740 from AlexSCorey/5257-WFJTMissingResource
Fixes InvGroup Form submission error and TemplateList Missing Resource Bug

Reviewed-by: https://github.com/apps/softwarefactory-project-zuul
2020-01-23 18:43:44 +00:00
Ryan Petrello
256404ba03 fix a bug that causes __search filters to not properly call .distinct() 2020-01-23 13:40:22 -05:00
Marliana Lara
3b430c8bdf Add delete to project details 2020-01-23 12:13:12 -05:00
Marliana Lara
627dae6580 Add delete button to organization details 2020-01-23 12:12:15 -05:00
Alex Corey
44db9ad033 Moves TemplateListItem to a functional component 2020-01-23 11:22:05 -05:00
Alex Corey
21890efca6 Ensures no missingResourceIcon for WFJT on JTList
Adds a test to ensure that missingResourceIcon is not shown for WFJTs.
2020-01-23 11:21:57 -05:00
Alex Corey
0a8fe4d812 Fixes InvGroupForm submission bug.
The inventory id now comes through useParams instead of through props.
Also updates tests to reflect those changes
2020-01-23 11:14:41 -05:00
Keith Grant
a1d7beca83 update VariablesDetail properly if value prop changes (preserving current mode) 2020-01-22 15:42:28 -08:00
softwarefactory-project-zuul[bot]
c35c80b06c Merge pull request #5732 from ryanpetrello/callback-stats-faster
optimize awx-manage callback_stats for larger datasets

Reviewed-by: https://github.com/apps/softwarefactory-project-zuul
2020-01-22 22:34:38 +00:00
softwarefactory-project-zuul[bot]
3c5e9da9a1 Merge pull request #5739 from ryanpetrello/optimize-callback-with-logging
further optimize conf.settings access when logging is enabled

Reviewed-by: https://github.com/apps/softwarefactory-project-zuul
2020-01-22 21:53:04 +00:00
Ryan Petrello
f9af5e8959 optimize awx-manage callback_stats for larger datasets
to monitor this historically, we'd probably need to introduce a new
index on the modified column of all our event types
2020-01-22 16:52:38 -05:00
softwarefactory-project-zuul[bot]
c983b6a755 Merge pull request #5729 from ryanpetrello/you-get-to-drink-from-the-firehose
add a script for quickly inserting lots of events

Reviewed-by: https://github.com/apps/softwarefactory-project-zuul
2020-01-22 21:43:52 +00:00
Ryan Petrello
e18639b26b further optimize conf.settings access when logging is enabled
the callback receiver is still fairly slow when logging is enabled due
to constant setting lookups; this speeds things up considerably

related: https://github.com/ansible/awx/pull/5618
2020-01-22 16:17:33 -05:00
Marliana Lara
6d8b843ad0 Upgrade to babel 7.8.0 2020-01-22 15:03:02 -05:00
Marliana Lara
00a9e42001 Wrap floating loading text in a card 2020-01-22 13:36:15 -05:00
Marliana Lara
fc5363a140 Replace loadHost with an IIFE 2020-01-22 12:25:43 -05:00
Ryan Petrello
d8d1ccf810 add a script for quickly inserting lots of events 2020-01-22 11:57:47 -05:00
softwarefactory-project-zuul[bot]
046518ab8f Merge pull request #5699 from keithjgrant/5235-variables-field
Make VariablesField detect correct mode on mount

Reviewed-by: https://github.com/apps/softwarefactory-project-zuul
2020-01-22 16:54:52 +00:00
Marliana Lara
d33bbdd4f6 Add inventory host edit form 2020-01-22 11:06:45 -05:00
softwarefactory-project-zuul[bot]
46e530ceeb Merge pull request #5733 from kdelee/fix_my_mistake
Fix overzealous cleanup in awxkit

Reviewed-by: https://github.com/apps/softwarefactory-project-zuul
2020-01-22 15:56:42 +00:00
Elijah DeLee
2a77b8b4b9 Fix overzealous cleanup in awxkit
Introduced this problem with 1e796076f5
2020-01-22 10:23:40 -05:00
softwarefactory-project-zuul[bot]
23b2b136d6 Merge pull request #5707 from AlanCoding/bulk_create_logs
Allow CTiT log level to log bulk_create lines

Reviewed-by: https://github.com/apps/softwarefactory-project-zuul
2020-01-22 15:04:17 +00:00
softwarefactory-project-zuul[bot]
d83a786c12 Merge pull request #5714 from mabashian/mount-with-contexts-import
Use directory alias for imports instead of relative path in our test files

Reviewed-by: https://github.com/apps/softwarefactory-project-zuul
2020-01-22 14:39:18 +00:00
mabashian
5d162b739b Fix import order to address linting error 2020-01-22 09:01:35 -05:00
softwarefactory-project-zuul[bot]
55e37b4eaa Merge pull request #5728 from wenottingham/i-waited-for-days
Fix timedelta comparison to account for large intervals

Reviewed-by: https://github.com/apps/softwarefactory-project-zuul
2020-01-21 21:47:13 +00:00
Bill Nottingham
b2a0b3fc29 Fix timedelta comparison to account for large intervals
It would fail if you set the interval to > 1 day.
2020-01-21 16:14:33 -05:00
softwarefactory-project-zuul[bot]
d1e1bc7108 Merge pull request #5721 from wenottingham/a-thundering-herd-is-only-good-if-you-are-marshall
Change how analytics is gathered to only gather once per interval

Reviewed-by: https://github.com/apps/softwarefactory-project-zuul
2020-01-21 20:31:39 +00:00
softwarefactory-project-zuul[bot]
cb88ea8fd1 Merge pull request #5718 from AlanCoding/pin_pin
Pin zipp to avoid setuptools upgrade

Reviewed-by: https://github.com/apps/softwarefactory-project-zuul
2020-01-21 19:13:35 +00:00
softwarefactory-project-zuul[bot]
c2fe3fcf13 Merge pull request #5706 from appuk/apurva-new-ids
Add data-cy attributes for Inventory and Inventory Hosts

Reviewed-by: Jake McDermott <yo@jakemcdermott.me>
             https://github.com/jakemcdermott
2020-01-21 17:57:41 +00:00
softwarefactory-project-zuul[bot]
6654a116d0 Merge pull request #5715 from marshmalien/routed-tabs-remove-history
RoutedTabs component - Replace router HOC with hooks

Reviewed-by: https://github.com/apps/softwarefactory-project-zuul
2020-01-21 17:55:00 +00:00
softwarefactory-project-zuul[bot]
b77ab8a6ca Merge pull request #5723 from kdelee/scan_job_cleanup
Scan jobs have been removed, clean up old refs

Reviewed-by: https://github.com/apps/softwarefactory-project-zuul
2020-01-21 17:54:14 +00:00
Elijah DeLee
1e796076f5 Scan jobs have been removed, clean up old refs
see https://github.com/ansible/awx/issues/5603
2020-01-21 12:11:53 -05:00
Apurva Bakshi
8fa38d1a2e Add data-cy attributes for Inventory and Inventory Hosts 2020-01-21 12:05:27 -05:00
Bill Nottingham
44e176dde8 Change how analytics is gathered to only gather once per interval 2020-01-21 11:40:51 -05:00
softwarefactory-project-zuul[bot]
1ce197041f Merge pull request #5710 from ryanpetrello/job-event-api-perf
drastically speed up /api/v2/jobs/N/job_events/ with large counts

Reviewed-by: https://github.com/apps/softwarefactory-project-zuul
2020-01-21 15:54:02 +00:00
Marliana Lara
0952bae09f Replace withRouter with react-router hooks in RoutedTabs 2020-01-21 10:06:46 -05:00
softwarefactory-project-zuul[bot]
12509cd652 Merge pull request #5713 from wenottingham/devel
Fix some lint

Reviewed-by: https://github.com/apps/softwarefactory-project-zuul
2020-01-20 23:50:03 +00:00
mabashian
b094c063ae Use directory alias for imports instead of relative path in our test files 2020-01-20 17:32:05 -05:00
Bill Nottingham
4e46d5d7cd Fix some lint 2020-01-20 17:15:27 -05:00
softwarefactory-project-zuul[bot]
8b10da9589 Merge pull request #5709 from benthomasson/fix_gather_analytics_crontab
Fixes crontab for gather_analytics to run once every 4 hours

Reviewed-by: Christian Adams <rooftopcellist@gmail.com>
             https://github.com/rooftopcellist
2020-01-20 19:21:27 +00:00
softwarefactory-project-zuul[bot]
99ce277b06 Merge pull request #5705 from AlexSCorey/5599-VariableDetailsAbsent
Ensures Variables Details renders even when no value.

Reviewed-by: https://github.com/apps/softwarefactory-project-zuul
2020-01-20 18:40:04 +00:00
AlanCoding
5db6906212 pin zipp to avoid setuptools upgrade 2020-01-20 13:39:48 -05:00
Ben Thomasson
652a428438 Fixes crontab for gather_analytics to run once every 4 hours 2020-01-20 13:30:10 -05:00
Ryan Petrello
dfc769b8fe drastically speed up /api/v2/jobs/N/job_events/ with large counts 2020-01-20 13:24:39 -05:00
Alex Corey
c45b1ffca6 Ensures Variables Details renders even when no value.
When there is no value VariablesDetails will show ---.
2020-01-20 12:58:58 -05:00
AlanCoding
ceed6f8d9b Allow CTiT log level to log bulk_create lines 2020-01-20 12:41:10 -05:00
softwarefactory-project-zuul[bot]
03cfb7bf9a Merge pull request #5669 from AlanCoding/no_parent_or_host
Remove two unused parent relationships from JobEvent model

Reviewed-by: https://github.com/apps/softwarefactory-project-zuul
2020-01-20 16:58:46 +00:00
softwarefactory-project-zuul[bot]
49d1fa82d3 Merge pull request #5678 from marshmalien/5657-update-inventory-detail
Fetch new inventory when location changes

Reviewed-by: https://github.com/apps/softwarefactory-project-zuul
2020-01-20 13:58:18 +00:00
softwarefactory-project-zuul[bot]
08a195ba08 Merge pull request #5694 from beeankha/tower_job_launch_module_exception
Fail Gracefully on tower_job_launch Module When JT is Not Found

Reviewed-by: https://github.com/apps/softwarefactory-project-zuul
2020-01-17 23:46:49 +00:00
softwarefactory-project-zuul[bot]
77d1c711bf Merge pull request #5695 from jlmitch5/redirectToLogin
add redirect to login on 401

Reviewed-by: https://github.com/apps/softwarefactory-project-zuul
2020-01-17 23:31:38 +00:00
softwarefactory-project-zuul[bot]
ad73174029 Merge pull request #5696 from jakemcdermott/fix-5138-2
Use the default cloud name if one isn't passed to azure plugin

Reviewed-by: https://github.com/apps/softwarefactory-project-zuul
2020-01-17 22:38:55 +00:00
softwarefactory-project-zuul[bot]
a6539d66d4 Merge pull request #5654 from AlexSCorey/5619-BranchFieldMissing
Fixes navigation bug in InventoryAdd Adds SCM Branch field on JTForm

Reviewed-by: https://github.com/apps/softwarefactory-project-zuul
2020-01-17 21:46:45 +00:00
Keith Grant
cb3ab67361 make VariablesField detect correct mode on mount 2020-01-17 13:39:18 -08:00
Alex Corey
078dc666c1 Removes code from serializer in favor to api call of Project.readDetails
Adds necessary tests.
2020-01-17 15:57:14 -05:00
softwarefactory-project-zuul[bot]
e806da25c1 Merge pull request #5697 from ryanpetrello/some-settings-cleanup
remove an unnecessary settings optimzation

Reviewed-by: https://github.com/apps/softwarefactory-project-zuul
2020-01-17 20:43:28 +00:00
beeankha
ef36b4fffd Reduce number of requests running in the try/except block 2020-01-17 15:08:05 -05:00
Ryan Petrello
cc2ba09d3a remove an unnecessary settings optimzation 2020-01-17 14:59:56 -05:00
Alex Corey
790942c0f2 Fixes navigation bug in InventoryAdd Adds SCM Branch field on JTForm 2020-01-17 14:44:56 -05:00
Alex Corey
fd1e574fcb Resets playbook and scm-branch fields when project is changed
The playbook field becomes undefined and the scm-branch
field becomes ''.  This ensures that the user has to assign
a playbook to the template that is associated with the project
and suggests to the user to review their scm-branch.
TODO: when the user updates project with scm-branch override
allow the user to type in playbook in dropdown. Then, check if
playbook is present in list of playbooks.  If no, add it to the
list of playbooks.
2020-01-17 14:44:56 -05:00
Alex Corey
2daefcd94e Removes code from serializer in favor to api call of Project.readDetails
Adds necessary tests.
2020-01-17 14:44:28 -05:00
Alex Corey
46a7ca4dc3 Fixes navigation bug in InventoryAdd Adds SCM Branch field on JTForm 2020-01-17 14:44:28 -05:00
softwarefactory-project-zuul[bot]
5e4c997c41 Merge pull request #5688 from keithjgrant/5235-variables-field-json
Upgrade to Formik 2.1.2

Reviewed-by: John Mitchell
             https://github.com/jlmitch5
2020-01-17 19:35:27 +00:00
John Mitchell
8d4d718f7d add redirect to login on 401 2020-01-17 13:41:06 -05:00
softwarefactory-project-zuul[bot]
cf34a81af7 Merge pull request #5622 from keithjgrant/4613-pf-select-component
PatternFly select component

Reviewed-by: https://github.com/apps/softwarefactory-project-zuul
2020-01-17 17:53:31 +00:00
Jake McDermott
11af21972d Use default cloud name if one isn't passed to azure plugin 2020-01-17 12:36:04 -05:00
Keith Grant
8850687d1b rename usePFSelect to useSyncedSelectValue 2020-01-17 08:47:31 -08:00
beeankha
792f68eaec When Job Template is not found, fail more gracefully 2020-01-17 11:40:41 -05:00
Keith Grant
113aa2e11e fix lint errors 2020-01-17 08:34:42 -08:00
softwarefactory-project-zuul[bot]
1bf0bc8203 Merge pull request #5600 from AlexSCorey/5266-InventoryHostDetails
Adds Toggle, Variables, User Link and Delete to Inventory Host/Host Details

Reviewed-by: https://github.com/apps/softwarefactory-project-zuul
2020-01-17 15:03:23 +00:00
softwarefactory-project-zuul[bot]
03cd7472af Merge pull request #5677 from ryanpetrello/less-distinct
do not apply distinct for series of only static fields

Reviewed-by: https://github.com/apps/softwarefactory-project-zuul
2020-01-17 13:28:27 +00:00
Marliana Lara
d549c217bb Fetch new inventory on location change 2020-01-16 22:55:47 -05:00
Keith Grant
e7fead0f2c update Formik formatting to remove warnings 2020-01-16 16:46:13 -08:00
Keith Grant
14990f7e98 finish updating tests for Formik 2.x 2020-01-16 16:21:35 -08:00
softwarefactory-project-zuul[bot]
d35eba8afb Merge pull request #5681 from marshmalien/remove-form-close-buttons
Remove form card header close button

Reviewed-by: Marliana Lara <marliana.lara@gmail.com>
             https://github.com/marshmalien
2020-01-16 21:25:00 +00:00
AlanCoding
b0722311e8 dDo not apply distinct for series of only static fields 2020-01-16 14:36:38 -05:00
Marliana Lara
946c16916f Remove form card header close button 2020-01-16 13:47:02 -05:00
Keith Grant
8ef5a6b0e1 upgrade Formik to 2.x; fix some tests with upgrade 2020-01-16 10:13:19 -08:00
softwarefactory-project-zuul[bot]
6fa4d6462d Merge pull request #5664 from marshmalien/5276-credential-details
Add Credential Detail view

Reviewed-by: https://github.com/apps/softwarefactory-project-zuul
2020-01-16 16:37:51 +00:00
softwarefactory-project-zuul[bot]
525fd889e9 Merge pull request #5584 from AlanCoding/vault_credential_test
[AWX collection] Add test for deprecated vault_credential

Reviewed-by: https://github.com/apps/softwarefactory-project-zuul
2020-01-16 14:53:35 +00:00
softwarefactory-project-zuul[bot]
93a4e5ef05 Merge pull request #5661 from beeankha/extra_vars_warn_louder_at_launch
Warn Louder When ask_extra_vars Should be Set to True But is Not

Reviewed-by: https://github.com/apps/softwarefactory-project-zuul
2020-01-16 14:19:30 +00:00
Marliana Lara
06ce5a16ce Show credential type link if it's managed by tower 2020-01-16 09:14:37 -05:00
Marliana Lara
15c665ea52 Add Credential Detail view
* Add Credential and CredentialDetail unit tests
* Add credential_type mock data
2020-01-16 09:14:30 -05:00
AlanCoding
9a420820eb Remove event hosts relationship
and parent event-event ForeignKey relationship
2020-01-16 08:44:36 -05:00
beeankha
fa043100bd Add info to changelog 2020-01-16 08:35:56 -05:00
beeankha
db0d748302 Also check for survey_enabled parameter 2020-01-16 08:35:56 -05:00
beeankha
e8a95a1dac Fail the task if extra_vars is set on launch but ask_extra_vars is not set to True on the Job Template 2020-01-16 08:35:56 -05:00
softwarefactory-project-zuul[bot]
f911fb2046 Merge pull request #5674 from jakemcdermott/fix-5142-2
Set reasonable default for MAX_FORKS

Reviewed-by: https://github.com/apps/softwarefactory-project-zuul
2020-01-16 00:12:43 +00:00
softwarefactory-project-zuul[bot]
a0304eeb16 Merge pull request #5597 from shanemcd/kube-oc-specific-server-version-logic
Add kubectl / oc-specific API server version logic

Reviewed-by: https://github.com/apps/softwarefactory-project-zuul
2020-01-15 22:49:07 +00:00
Jake McDermott
a6f063b199 Set default MAX_FORKS to 200 2020-01-15 17:26:15 -05:00
Shane McDonald
3977ec42e1 Add kubectl / oc-specific API server version logic 2020-01-15 17:12:53 -05:00
softwarefactory-project-zuul[bot]
b7a064b05d Merge pull request #5671 from ryanpetrello/bye-bye-scan
remove JobTemplate.job_type = 'scan'

Reviewed-by: https://github.com/apps/softwarefactory-project-zuul
2020-01-15 21:06:20 +00:00
softwarefactory-project-zuul[bot]
aa5532f7b5 Merge pull request #5665 from wenottingham/warn-only
Only warn when license is exceeded non-fatally

Reviewed-by: https://github.com/apps/softwarefactory-project-zuul
2020-01-15 20:16:13 +00:00
Ryan Petrello
f79b6d3708 remove JobTemplate.job_type = 'scan'
see: https://github.com/ansible/awx/issues/5603
2020-01-15 15:05:01 -05:00
softwarefactory-project-zuul[bot]
6d075b8874 Merge pull request #5448 from ryanpetrello/remove-computed-group-and-host-fields
remove computed inventory fields from Host and Group

Reviewed-by: https://github.com/apps/softwarefactory-project-zuul
2020-01-15 19:53:30 +00:00
softwarefactory-project-zuul[bot]
3040a25932 Merge pull request #5604 from jakemcdermott/fix-5142
Add configurable MAX_FORKS for jobs

Reviewed-by: https://github.com/apps/softwarefactory-project-zuul
2020-01-15 19:35:10 +00:00
Ryan Petrello
0f0d9ba00d send inv computed tasks *after* commit to avoid a race condition 2020-01-15 14:14:26 -05:00
softwarefactory-project-zuul[bot]
053897042f Merge pull request #5668 from ryanpetrello/vmware-inv-script-update
update to latest vmware_inventory.py

Reviewed-by: https://github.com/apps/softwarefactory-project-zuul
2020-01-15 19:09:38 +00:00
softwarefactory-project-zuul[bot]
64186e881e Merge pull request #5602 from jakemcdermott/fix-5138
Support AzureCloud choices for azure keyvault

Reviewed-by: https://github.com/apps/softwarefactory-project-zuul
2020-01-15 19:07:19 +00:00
Jake McDermott
0d98a1980e Add a configurable limit for job forks 2020-01-15 13:51:59 -05:00
Jake McDermott
2b02b1affd Support AzureCloud choices for azure keyvault 2020-01-15 13:13:10 -05:00
softwarefactory-project-zuul[bot]
bf3042e85a Merge pull request #5666 from ryanpetrello/inv-sync-wf
fix a bug that breaks inv sync stdout within workflows

Reviewed-by: https://github.com/apps/softwarefactory-project-zuul
2020-01-15 17:03:01 +00:00
softwarefactory-project-zuul[bot]
bdc25c14f6 Merge pull request #5663 from ryanpetrello/how-to-upgrade
update data migration instructions

Reviewed-by: https://github.com/apps/softwarefactory-project-zuul
2020-01-15 16:48:21 +00:00
Ryan Petrello
6e5028587a update to latest vmware_inventory.py
63737ec0f8/contrib/inventory/vmware_inventory.py

see: https://github.com/ansible/awx/issues/5648
2020-01-15 10:40:01 -05:00
Ryan Petrello
8c8713885b fix a bug that breaks inv sync stdout within workflows
see: https://github.com/ansible/tower/issues/4068
2020-01-15 10:12:27 -05:00
Bill Nottingham
bc5ef7f1c8 Only warn when license is exceeded non-fatally 2020-01-15 10:05:20 -05:00
AlanCoding
b9b6dad0b3 Add test for deprecated vault_credential 2020-01-15 09:44:28 -05:00
Ryan Petrello
829e9054d6 update data migration instructions 2020-01-15 09:03:36 -05:00
Ryan Petrello
be68a199ec reorder migrations after a rebase 2020-01-14 16:58:33 -05:00
Jake McDermott
44c0eb867b Remove group status cols
Remove the status col from any group list that used the now-removed
computed fields.
2020-01-14 16:37:20 -05:00
Jake McDermott
773b976f8a Remove dead code 2020-01-14 16:37:20 -05:00
Jake McDermott
1220847c27 Remove unused host status service 2020-01-14 16:37:20 -05:00
Jake McDermott
ec1c2a8391 Remove status icons from group nested groups 2020-01-14 16:37:19 -05:00
Jake McDermott
2bc6521eee Use related hosts and group counts for delete modal 2020-01-14 16:37:19 -05:00
Jake McDermott
107d2da845 Remove status icons from host nested groups 2020-01-14 16:37:19 -05:00
Ryan Petrello
568606d2c8 remove computed inventory fields from Host and Group 2020-01-14 16:37:16 -05:00
softwarefactory-project-zuul[bot]
78e2cd7084 Merge pull request #5610 from thedoubl3j/canceled_jobs
Added canceled_on field to unified_jobs model

Reviewed-by: https://github.com/apps/softwarefactory-project-zuul
2020-01-14 21:35:11 +00:00
Ryan Petrello
79b8e6b6f0 renumber migrations correctly 2020-01-14 16:00:33 -05:00
Jake Jackson
d72896f9a6 Added canceled_on field to unified_jobs model
- When a job is canceled, the canceled_on field will populate with date/time
2020-01-14 15:56:30 -05:00
softwarefactory-project-zuul[bot]
7b3d36ba53 Merge pull request #5639 from jlmitch5/searchLabelImprovement
update select-based search items to utilize labels, not just the api value

Reviewed-by: https://github.com/apps/softwarefactory-project-zuul
2020-01-14 20:39:09 +00:00
John Mitchell
9ecb704e10 fix prettier issues 2020-01-14 13:28:53 -05:00
John Mitchell
1b726a1b2f fix malformed search key column array on project lookup 2020-01-14 13:28:44 -05:00
Keith Grant
1cc4e302f9 update tests to check for PF Select 2020-01-13 14:41:43 -08:00
Keith Grant
1289ca9103 update TagMultiSelect to use PF <Select> 2020-01-13 14:41:43 -08:00
Keith Grant
b18ca5ac1f begin converting TagMultiSelect to usePFSelect 2020-01-13 14:41:43 -08:00
Keith Grant
193a041ef9 finish usePFSelect hook 2020-01-13 14:41:43 -08:00
Keith Grant
7219c17d30 start usePFSelect hook 2020-01-13 14:41:43 -08:00
Keith Grant
79f0f1940f update LabelSelect to use PF Select component 2020-01-13 14:41:43 -08:00
John Mitchell
edc65cdc36 update breakpoint for mobile-ifying search 2020-01-13 16:05:01 -05:00
John Mitchell
3684975ef9 remove todo label-ify search dropdown note 2020-01-13 16:04:15 -05:00
Alex Corey
8bfcef01df Fixes Breaedcrumb 2020-01-13 12:24:15 -05:00
John Mitchell
bbf9c13952 update select-based items to utilize labels 2020-01-13 12:10:36 -05:00
Alex Corey
dfa578fcde Utilizes React Router Hooks and removes No-op function 2020-01-13 11:28:31 -05:00
Alex Corey
33bc9e63c4 Addresses Console Errors related to functions and test data
Also Adds speecificity to link URLs by add /details for urls that should
redireect to details pages instead of them ending in /:id
2020-01-13 11:28:31 -05:00
Alex Corey
919475a4c7 Improves NestedTabs, Refactors PR, Adds Delete/DeleteError Functionality to HostDetail 2020-01-13 11:28:31 -05:00
Alex Corey
1db88fe4f6 Adds Toggle, Variables, user Link and Delete to Inventory Host and Host Details
If the user comes to Host details through Inventory Host they will get a
Return To Host tab in addition to the others.  This PR allows Inventory Host
to share many of the same components with Host but does add some complexity
to the routing files in Host.jsx
2020-01-13 11:28:31 -05:00
773 changed files with 80796 additions and 36596 deletions

View File

@@ -2,6 +2,46 @@
This is a list of high-level changes for each release of AWX. A full list of commits can be found at `https://github.com/ansible/awx/releases/tag/<version>`.
## 9.3.0 (Mar 12, 2020)
- Added the ability to specify an OAuth2 token description in the AWX CLI (https://github.com/ansible/awx/issues/6122)
- Added support for K8S service account annotations to the installer (https://github.com/ansible/awx/pull/6007)
- Added support for K8S imagePullSecrets to the installer (https://github.com/ansible/awx/pull/5989)
- Launching jobs (and workflows) using the --monitor flag in the AWX CLI now returns a non-zero exit code on job failure (https://github.com/ansible/awx/issues/5920)
- Improved UI performance for various job views when many simultaneous users are logged into AWX (https://github.com/ansible/awx/issues/5883)
- Updated to the latest version of Django to address a few open CVEs (https://github.com/ansible/awx/pull/6080)
- Fixed a critical bug which can cause AWX to hang and stop launching playbooks after a periodic of time (https://github.com/ansible/awx/issues/5617)
- Fixed a bug which caused delays in project update stdout for certain large SCM clones (as of Ansible 2.9+) (https://github.com/ansible/awx/pull/6254)
- Fixed a bug which caused certain smart inventory filters to mistakenly return duplicate hosts (https://github.com/ansible/awx/pull/5972)
- Fixed an unclear server error when creating smart inventories with the AWX collection (https://github.com/ansible/awx/issues/6250)
- Fixed a bug that broke Grafana notification support (https://github.com/ansible/awx/issues/6137)
- Fixed a UI bug which prevent users with read access to an organization from editing credentials for that organization (https://github.com/ansible/awx/pull/6241)
- Fixed a bug which prevent workflow approval records from recording a `started` and `elapsed` date (https://github.com/ansible/awx/issues/6202)
- Fixed a bug which caused workflow nodes to have a confusing option for `verbosity` (https://github.com/ansible/awx/issues/6196)
- Fixed an RBAC bug which prevented projects and inventory schedules from being created by certain users in certain contexts (https://github.com/ansible/awx/issues/5717)
- Fixed a bug that caused `role_path` in a project's config to not be respected due to an error processing `/etc/ansible/ansible.cfg` (https://github.com/ansible/awx/pull/6038)
- Fixed a bug that broke inventory updates for installs with custom home directories for the awx user (https://github.com/ansible/awx/pull/6152)
- Fixed a bug that broke fact data collection when AWX encounters invalid/unexpected fact data (https://github.com/ansible/awx/issues/5935)
## 9.2.0 (Feb 12, 2020)
- Added the ability to configure the convergence behavior of workflow nodes https://github.com/ansible/awx/issues/3054
- AWX now allows for a configurable global limit for fork count (per-job run). The default maximum is 200. https://github.com/ansible/awx/pull/5604
- Added the ability to specify AZURE_PUBLIC_CLOUD (for e.g., Azure Government KeyVault support) for the Azure credential plugin https://github.com/ansible/awx/issues/5138
- Added support for several additional parameters for Satellite dynamic inventory https://github.com/ansible/awx/pull/5598
- Added a new field to jobs for tracking the date/time a job is cancelled https://github.com/ansible/awx/pull/5610
- Made a series of additional optimizations to the callback receiver to further improve stdout write speed for running playbooks https://github.com/ansible/awx/pull/5677 https://github.com/ansible/awx/pull/5739
- Updated AWX to be compatible with Helm 3.x (https://github.com/ansible/awx/pull/5776)
- Optimized AWX's job dependency/scheduling code to drastically improve processing time in scenarios where there are many pending jobs scheduled simultaneously https://github.com/ansible/awx/issues/5154
- Fixed a bug which could cause SCM authentication details (basic auth passwords) to be reported to external loggers in certain failure scenarios (e.g., when a git clone fails and ansible itself prints an error message to stdout) https://github.com/ansible/awx/pull/5812
- Fixed a k8s installer bug that caused installs to fail in certain situations https://github.com/ansible/awx/issues/5574
- Fixed a number of issues that caused analytics gathering and reporting to run more often than necessary https://github.com/ansible/awx/pull/5721
- Fixed a bug in the AWX CLI that prevented JSON-type settings from saving properly https://github.com/ansible/awx/issues/5528
- Improved support for fetching custom virtualenv dependencies when AWX is installed behind a proxy https://github.com/ansible/awx/pull/5805
- Updated the bundled version of openstacksdk to address a known issue https://github.com/ansible/awx/issues/5821
- Updated the bundled vmware_inventory plugin to the latest version to address a bug https://github.com/ansible/awx/pull/5668
- Fixed a bug that can cause inventory updates to fail to properly save their output when run within a workflow https://github.com/ansible/awx/pull/5666
- Removed a number of pre-computed fields from the Host and Group models to improve AWX performance. As part of this change, inventory group UIs throughout the interface no longer display status icons https://github.com/ansible/awx/pull/5448
## 9.1.1 (Jan 14, 2020)
- Fixed a bug that caused database migrations on Kubernetes installs to hang https://github.com/ansible/awx/pull/5579

View File

@@ -2,96 +2,8 @@
## Introduction
Upgrades using Django migrations are not expected to work in AWX. As a result, to upgrade to a new version, it is necessary to export resources from the old AWX node and import them into a freshly-installed node with the new version. The recommended way to do this is to use the tower-cli send/receive feature.
Early versions of AWX did not support seamless upgrades between major versions and required the use of a backup and restore tool to perform upgrades.
This tool does __not__ support export/import of the following:
* Logs/history
* Credential passwords
* LDAP/AWX config
Users who wish to upgrade modern AWX installations should follow the instructions at:
### Install & Configure Tower-CLI
In terminal, pip install tower-cli (if you do not have pip already, install [here](https://pip.pypa.io/en/stable/installing/)):
```
$ pip install --upgrade ansible-tower-cli
```
The AWX host URL, user, and password must be set for the AWX instance to be exported:
```
$ tower-cli config host http://<old-awx-host.example.com>
$ tower-cli config username <user>
$ tower-cli config password <pass>
```
For more information on installing tower-cli look [here](http://tower-cli.readthedocs.io/en/latest/quickstart.html).
### Export Resources
Export all objects
```$ tower-cli receive --all > assets.json```
### Teardown Old AWX
Clean up remnants of the old AWX install:
```docker rm -f $(docker ps -aq)``` # remove all old awx containers
```make clean-ui``` # clean up ui artifacts
### Install New AWX version
If you are installing AWX as a dev container, pull down the latest code or version you want from GitHub, build
the image locally, then start the container
```
git pull # retrieve latest AWX changes from repository
make docker-compose-build # build AWX image
make docker-compose # run container
```
For other install methods, refer to the [Install.md](https://github.com/ansible/awx/blob/devel/INSTALL.md).
### Import Resources
Configure tower-cli for your new AWX host as shown earlier. Import from a JSON file named assets.json
```
$ tower-cli config host http://<new-awx-host.example.com>
$ tower-cli config username <user>
$ tower-cli config password <pass>
$ tower-cli send assets.json
```
--------------------------------------------------------------------------------
## Additional Info
If you have two running AWX hosts, it is possible to copy all assets from one instance to another
```$ tower-cli receive --tower-host old-awx-host.example.com --all | tower-cli send --tower-host new-awx-host.example.com```
#### More Granular Exports:
Export all credentials
```$ tower-cli receive --credential all > credentials.json```
> Note: This exports the credentials with blank strings for passwords and secrets
Export a credential named "My Credential"
```$ tower-cli receive --credential "My Credential"```
#### More Granular Imports:
You could import anything except an organization defined in a JSON file named assets.json
```$ tower-cli send --prevent organization assets.json```
https://github.com/ansible/awx/blob/devel/INSTALL.md#upgrading-from-previous-versions

View File

@@ -506,10 +506,6 @@ If you wish to tag and push built images to a Docker registry, set the following
> Username of the user that will push images to the registry. Defaults to *developer*.
*docker_remove_local_images*
> Due to the way that the docker_image module behaves, images will not be pushed to a remote repository if they are present locally. Set this to delete local versions of the images that will be pushed to the remote. This will fail if containers are currently running from those images.
**Note**
> These settings are ignored if using official images

View File

@@ -122,7 +122,7 @@ clean-api:
rm -rf awx/projects
clean-awxkit:
rm -rf awxkit/*.egg-info awxkit/.tox
rm -rf awxkit/*.egg-info awxkit/.tox awxkit/build/*
# convenience target to assert environment variables are defined
guard-%:
@@ -167,8 +167,7 @@ virtualenv_awx:
fi; \
if [ ! -d "$(VENV_BASE)/awx" ]; then \
virtualenv -p $(PYTHON) $(VENV_BASE)/awx; \
$(VENV_BASE)/awx/bin/pip install $(PIP_OPTIONS) $(VENV_BOOTSTRAP) && \
$(VENV_BASE)/awx/bin/pip install $(PIP_OPTIONS) flit; \
$(VENV_BASE)/awx/bin/pip install $(PIP_OPTIONS) $(VENV_BOOTSTRAP); \
fi; \
fi
@@ -403,6 +402,7 @@ prepare_collection_venv:
COLLECTION_TEST_DIRS ?= awx_collection/test/awx
COLLECTION_PACKAGE ?= awx
COLLECTION_NAMESPACE ?= awx
COLLECTION_INSTALL = ~/.ansible/collections/ansible_collections/$(COLLECTION_NAMESPACE)/$(COLLECTION_PACKAGE)
test_collection:
@if [ "$(VENV_BASE)" ]; then \
@@ -415,20 +415,26 @@ flake8_collection:
test_collection_all: prepare_collection_venv test_collection flake8_collection
test_collection_sanity:
rm -rf sanity
mkdir -p sanity/ansible_collections/awx
cp -Ra awx_collection sanity/ansible_collections/awx/awx # symlinks do not work
cd sanity/ansible_collections/awx/awx && git init && git add . # requires both this file structure and a git repo, so there you go
cd sanity/ansible_collections/awx/awx && ansible-test sanity
# WARNING: symlinking a collection is fundamentally unstable
# this is for rapid development iteration with playbooks, do not use with other test targets
symlink_collection:
rm -rf $(COLLECTION_INSTALL)
mkdir -p ~/.ansible/collections/ansible_collections/$(COLLECTION_NAMESPACE) # in case it does not exist
ln -s $(shell pwd)/awx_collection $(COLLECTION_INSTALL)
build_collection:
ansible-playbook -i localhost, awx_collection/template_galaxy.yml -e collection_package=$(COLLECTION_PACKAGE) -e collection_namespace=$(COLLECTION_NAMESPACE) -e collection_version=$(VERSION)
ansible-galaxy collection build awx_collection --force --output-path=awx_collection
install_collection: build_collection
rm -rf ~/.ansible/collections/ansible_collections/awx/awx
ansible-galaxy collection install awx_collection/awx-awx-$(VERSION).tar.gz
rm -rf $(COLLECTION_INSTALL)
ansible-galaxy collection install awx_collection/$(COLLECTION_NAMESPACE)-$(COLLECTION_PACKAGE)-$(VERSION).tar.gz
test_collection_sanity: install_collection
cd $(COLLECTION_INSTALL) && ansible-test sanity
test_collection_integration: install_collection
cd $(COLLECTION_INSTALL) && ansible-test integration
test_unit:
@if [ "$(VENV_BASE)" ]; then \

View File

@@ -1 +1 @@
9.1.1
9.3.0

View File

@@ -9,7 +9,7 @@ from functools import reduce
# Django
from django.core.exceptions import FieldError, ValidationError
from django.db import models
from django.db.models import Q
from django.db.models import Q, CharField, IntegerField, BooleanField
from django.db.models.fields import FieldDoesNotExist
from django.db.models.fields.related import ForeignObjectRel, ManyToManyField, ForeignKey
from django.contrib.contenttypes.models import ContentType
@@ -63,19 +63,19 @@ class TypeFilterBackend(BaseFilterBackend):
raise ParseError(*e.args)
def get_field_from_path(model, path):
def get_fields_from_path(model, path):
'''
Given a Django ORM lookup path (possibly over multiple models)
Returns the last field in the line, and also the revised lookup path
Returns the fields in the line, and also the revised lookup path
ex., given
model=Organization
path='project__timeout'
returns tuple of field at the end of the line as well as a corrected
path, for special cases we do substitutions
(<IntegerField for timeout>, 'project__timeout')
returns tuple of fields traversed as well and a corrected path,
for special cases we do substitutions
([<IntegerField for timeout>], 'project__timeout')
'''
# Store of all the fields used to detect repeats
field_set = set([])
field_list = []
new_parts = []
for name in path.split('__'):
if model is None:
@@ -111,13 +111,24 @@ def get_field_from_path(model, path):
raise PermissionDenied(_('Filtering on %s is not allowed.' % name))
elif getattr(field, '__prevent_search__', False):
raise PermissionDenied(_('Filtering on %s is not allowed.' % name))
if field in field_set:
if field in field_list:
# Field traversed twice, could create infinite JOINs, DoSing Tower
raise ParseError(_('Loops not allowed in filters, detected on field {}.').format(field.name))
field_set.add(field)
field_list.append(field)
model = getattr(field, 'related_model', None)
return field, '__'.join(new_parts)
return field_list, '__'.join(new_parts)
def get_field_from_path(model, path):
'''
Given a Django ORM lookup path (possibly over multiple models)
Returns the last field in the line, and the revised lookup path
ex.
(<IntegerField for timeout>, 'project__timeout')
'''
field_list, new_path = get_fields_from_path(model, path)
return (field_list[-1], new_path)
class FieldLookupBackend(BaseFilterBackend):
@@ -133,7 +144,11 @@ class FieldLookupBackend(BaseFilterBackend):
'regex', 'iregex', 'gt', 'gte', 'lt', 'lte', 'in',
'isnull', 'search')
def get_field_from_lookup(self, model, lookup):
# A list of fields that we know can be filtered on without the possiblity
# of introducing duplicates
NO_DUPLICATES_WHITELIST = (CharField, IntegerField, BooleanField)
def get_fields_from_lookup(self, model, lookup):
if '__' in lookup and lookup.rsplit('__', 1)[-1] in self.SUPPORTED_LOOKUPS:
path, suffix = lookup.rsplit('__', 1)
@@ -147,11 +162,16 @@ class FieldLookupBackend(BaseFilterBackend):
# FIXME: Could build up a list of models used across relationships, use
# those lookups combined with request.user.get_queryset(Model) to make
# sure user cannot query using objects he could not view.
field, new_path = get_field_from_path(model, path)
field_list, new_path = get_fields_from_path(model, path)
new_lookup = new_path
new_lookup = '__'.join([new_path, suffix])
return field, new_lookup
return field_list, new_lookup
def get_field_from_lookup(self, model, lookup):
'''Method to match return type of single field, if needed.'''
field_list, new_lookup = self.get_fields_from_lookup(model, lookup)
return (field_list[-1], new_lookup)
def to_python_related(self, value):
value = force_text(value)
@@ -182,7 +202,10 @@ class FieldLookupBackend(BaseFilterBackend):
except UnicodeEncodeError:
raise ValueError("%r is not an allowed field name. Must be ascii encodable." % lookup)
field, new_lookup = self.get_field_from_lookup(model, lookup)
field_list, new_lookup = self.get_fields_from_lookup(model, lookup)
field = field_list[-1]
needs_distinct = (not all(isinstance(f, self.NO_DUPLICATES_WHITELIST) for f in field_list))
# Type names are stored without underscores internally, but are presented and
# and serialized over the API containing underscores so we remove `_`
@@ -211,10 +234,10 @@ class FieldLookupBackend(BaseFilterBackend):
for rm_field in related_model._meta.fields:
if rm_field.name in ('username', 'first_name', 'last_name', 'email', 'name', 'description', 'playbook'):
new_lookups.append('{}__{}__icontains'.format(new_lookup[:-8], rm_field.name))
return value, new_lookups
return value, new_lookups, needs_distinct
else:
value = self.value_to_python_for_field(field, value)
return value, new_lookup
return value, new_lookup, needs_distinct
def filter_queryset(self, request, queryset, view):
try:
@@ -225,6 +248,7 @@ class FieldLookupBackend(BaseFilterBackend):
chain_filters = []
role_filters = []
search_filters = {}
needs_distinct = False
# Can only have two values: 'AND', 'OR'
# If 'AND' is used, an iterm must satisfy all condition to show up in the results.
# If 'OR' is used, an item just need to satisfy one condition to appear in results.
@@ -256,9 +280,12 @@ class FieldLookupBackend(BaseFilterBackend):
search_filter_relation = 'AND'
values = reduce(lambda list1, list2: list1 + list2, [i.split(',') for i in values])
for value in values:
search_value, new_keys = self.value_to_python(queryset.model, key, force_text(value))
search_value, new_keys, _ = self.value_to_python(queryset.model, key, force_text(value))
assert isinstance(new_keys, list)
search_filters[search_value] = new_keys
# by definition, search *only* joins across relations,
# so it _always_ needs a .distinct()
needs_distinct = True
continue
# Custom chain__ and or__ filters, mutually exclusive (both can
@@ -282,7 +309,9 @@ class FieldLookupBackend(BaseFilterBackend):
for value in values:
if q_int:
value = int(value)
value, new_key = self.value_to_python(queryset.model, key, value)
value, new_key, distinct = self.value_to_python(queryset.model, key, value)
if distinct:
needs_distinct = True
if q_chain:
chain_filters.append((q_not, new_key, value))
elif q_or:
@@ -332,7 +361,9 @@ class FieldLookupBackend(BaseFilterBackend):
else:
q = Q(**{k:v})
queryset = queryset.filter(q)
queryset = queryset.filter(*args).distinct()
queryset = queryset.filter(*args)
if needs_distinct:
queryset = queryset.distinct()
return queryset
except (FieldError, FieldDoesNotExist, ValueError, TypeError) as e:
raise ParseError(e.args[0])

View File

@@ -192,7 +192,7 @@ class APIView(views.APIView):
response.data['detail'] += ' To establish a login session, visit /api/login/.'
logger.info(status_msg)
else:
logger.warn(status_msg)
logger.warning(status_msg)
response = super(APIView, self).finalize_response(request, response, *args, **kwargs)
time_started = getattr(self, 'time_started', None)
response['X-API-Node'] = settings.CLUSTER_HOST_ID

View File

@@ -20,6 +20,7 @@ from rest_framework.fields import JSONField as DRFJSONField
from rest_framework.request import clone_request
# AWX
from awx.api.fields import ChoiceNullField
from awx.main.fields import JSONField, ImplicitRoleField
from awx.main.models import InventorySource, NotificationTemplate
from awx.main.scheduler.kubernetes import PodManager
@@ -96,7 +97,15 @@ class Metadata(metadata.SimpleMetadata):
field_info['children'] = self.get_serializer_info(field)
if not isinstance(field, (RelatedField, ManyRelatedField)) and hasattr(field, 'choices'):
field_info['choices'] = [(choice_value, choice_name) for choice_value, choice_name in field.choices.items()]
choices = [
(choice_value, choice_name) for choice_value, choice_name in field.choices.items()
]
if not any(choice in ('', None) for choice, _ in choices):
if field.allow_blank:
choices = [("", "---------")] + choices
if field.allow_null and not isinstance(field, ChoiceNullField):
choices = [(None, "---------")] + choices
field_info['choices'] = choices
# Indicate if a field is write-only.
if getattr(field, 'write_only', False):

View File

@@ -98,26 +98,19 @@ SUMMARIZABLE_FK_FIELDS = {
'total_hosts',
'hosts_with_active_failures',
'total_groups',
'groups_with_active_failures',
'has_inventory_sources',
'total_inventory_sources',
'inventory_sources_with_failures',
'organization_id',
'kind',
'insights_credential_id',),
'host': DEFAULT_SUMMARY_FIELDS + ('has_active_failures',
'has_inventory_sources'),
'group': DEFAULT_SUMMARY_FIELDS + ('has_active_failures',
'total_hosts',
'hosts_with_active_failures',
'total_groups',
'groups_with_active_failures',
'has_inventory_sources'),
'host': DEFAULT_SUMMARY_FIELDS,
'group': DEFAULT_SUMMARY_FIELDS,
'project': DEFAULT_SUMMARY_FIELDS + ('status', 'scm_type'),
'source_project': DEFAULT_SUMMARY_FIELDS + ('status', 'scm_type'),
'project_update': DEFAULT_SUMMARY_FIELDS + ('status', 'failed',),
'credential': DEFAULT_SUMMARY_FIELDS + ('kind', 'cloud', 'kubernetes', 'credential_type_id'),
'job': DEFAULT_SUMMARY_FIELDS + ('status', 'failed', 'elapsed', 'type'),
'job': DEFAULT_SUMMARY_FIELDS + ('status', 'failed', 'elapsed', 'type', 'canceled_on'),
'job_template': DEFAULT_SUMMARY_FIELDS,
'workflow_job_template': DEFAULT_SUMMARY_FIELDS,
'workflow_job': DEFAULT_SUMMARY_FIELDS,
@@ -125,7 +118,7 @@ SUMMARIZABLE_FK_FIELDS = {
'workflow_approval': DEFAULT_SUMMARY_FIELDS + ('timeout',),
'schedule': DEFAULT_SUMMARY_FIELDS + ('next_run',),
'unified_job_template': DEFAULT_SUMMARY_FIELDS + ('unified_job_type',),
'last_job': DEFAULT_SUMMARY_FIELDS + ('finished', 'status', 'failed', 'license_error'),
'last_job': DEFAULT_SUMMARY_FIELDS + ('finished', 'status', 'failed', 'license_error', 'canceled_on'),
'last_job_host_summary': DEFAULT_SUMMARY_FIELDS + ('failed',),
'last_update': DEFAULT_SUMMARY_FIELDS + ('status', 'failed', 'license_error'),
'current_update': DEFAULT_SUMMARY_FIELDS + ('status', 'failed', 'license_error'),
@@ -139,7 +132,7 @@ SUMMARIZABLE_FK_FIELDS = {
'insights_credential': DEFAULT_SUMMARY_FIELDS,
'source_credential': DEFAULT_SUMMARY_FIELDS + ('kind', 'cloud', 'credential_type_id'),
'target_credential': DEFAULT_SUMMARY_FIELDS + ('kind', 'cloud', 'credential_type_id'),
'webhook_credential': DEFAULT_SUMMARY_FIELDS,
'webhook_credential': DEFAULT_SUMMARY_FIELDS + ('kind', 'cloud', 'credential_type_id'),
'approved_or_denied_by': ('id', 'username', 'first_name', 'last_name'),
'credential_type': DEFAULT_SUMMARY_FIELDS,
}
@@ -719,7 +712,7 @@ class UnifiedJobSerializer(BaseSerializer):
class Meta:
model = UnifiedJob
fields = ('*', 'unified_job_template', 'launch_type', 'status',
'failed', 'started', 'finished', 'elapsed', 'job_args',
'failed', 'started', 'finished', 'canceled_on', 'elapsed', 'job_args',
'job_cwd', 'job_env', 'job_explanation',
'execution_node', 'controller_node',
'result_traceback', 'event_processing_finished')
@@ -1549,20 +1542,15 @@ class InventorySerializer(BaseSerializerWithVariables):
'admin', 'adhoc',
{'copy': 'organization.inventory_admin'}
]
groups_with_active_failures = serializers.IntegerField(
read_only=True,
min_value=0,
help_text=_('This field has been deprecated and will be removed in a future release')
)
class Meta:
model = Inventory
fields = ('*', 'organization', 'kind', 'host_filter', 'variables', 'has_active_failures',
'total_hosts', 'hosts_with_active_failures', 'total_groups',
'groups_with_active_failures', 'has_inventory_sources',
'total_inventory_sources', 'inventory_sources_with_failures',
'insights_credential', 'pending_deletion',)
'has_inventory_sources', 'total_inventory_sources',
'inventory_sources_with_failures', 'insights_credential',
'pending_deletion',)
def get_related(self, obj):
res = super(InventorySerializer, self).get_related(obj)
@@ -1612,7 +1600,7 @@ class InventorySerializer(BaseSerializerWithVariables):
})
SmartFilter().query_from_string(host_filter)
except RuntimeError as e:
raise models.base.ValidationError(e)
raise models.base.ValidationError(str(e))
return host_filter
def validate(self, attrs):
@@ -1644,6 +1632,9 @@ class HostSerializer(BaseSerializerWithVariables):
show_capabilities = ['edit', 'delete']
capabilities_prefetch = ['inventory.admin']
has_active_failures = serializers.SerializerMethodField()
has_inventory_sources = serializers.SerializerMethodField()
class Meta:
model = Host
fields = ('*', 'inventory', 'enabled', 'instance_id', 'variables',
@@ -1757,6 +1748,14 @@ class HostSerializer(BaseSerializerWithVariables):
ret['last_job_host_summary'] = None
return ret
def get_has_active_failures(self, obj):
return bool(
obj.last_job_host_summary and obj.last_job_host_summary.failed
)
def get_has_inventory_sources(self, obj):
return obj.inventory_sources.exists()
class AnsibleFactsSerializer(BaseSerializer):
class Meta:
@@ -1769,17 +1768,10 @@ class AnsibleFactsSerializer(BaseSerializer):
class GroupSerializer(BaseSerializerWithVariables):
show_capabilities = ['copy', 'edit', 'delete']
capabilities_prefetch = ['inventory.admin', 'inventory.adhoc']
groups_with_active_failures = serializers.IntegerField(
read_only=True,
min_value=0,
help_text=_('This field has been deprecated and will be removed in a future release')
)
class Meta:
model = Group
fields = ('*', 'inventory', 'variables', 'has_active_failures',
'total_hosts', 'hosts_with_active_failures', 'total_groups',
'groups_with_active_failures', 'has_inventory_sources')
fields = ('*', 'inventory', 'variables')
def build_relational_field(self, field_name, relation_info):
field_class, field_kwargs = super(GroupSerializer, self).build_relational_field(field_name, relation_info)
@@ -2123,7 +2115,13 @@ class InventorySourceSerializer(UnifiedJobTemplateSerializer, InventorySourceOpt
def get_field_from_model_or_attrs(fd):
return attrs.get(fd, self.instance and getattr(self.instance, fd) or None)
if get_field_from_model_or_attrs('source') != 'scm':
if get_field_from_model_or_attrs('source') == 'scm':
if (('source' in attrs or 'source_project' in attrs) and
get_field_from_model_or_attrs('source_project') is None):
raise serializers.ValidationError(
{"source_project": _("Project required for scm type sources.")}
)
else:
redundant_scm_fields = list(filter(
lambda x: attrs.get(x, None),
['source_project', 'source_path', 'update_on_project_update']
@@ -2823,7 +2821,7 @@ class JobTemplateMixin(object):
# .only('id', 'status', 'finished', 'polymorphic_ctype_id')
optimized_qs = uj_qs.non_polymorphic()
return [{
'id': x.id, 'status': x.status, 'finished': x.finished,
'id': x.id, 'status': x.status, 'finished': x.finished, 'canceled_on': x.canceled_on,
# Make type consistent with API top-level key, for instance workflow_job
'type': x.get_real_instance_class()._meta.verbose_name.replace(' ', '_')
} for x in optimized_qs[:10]]
@@ -3685,7 +3683,7 @@ class WorkflowJobTemplateNodeSerializer(LaunchConfigurationBaseSerializer):
class Meta:
model = WorkflowJobTemplateNode
fields = ('*', 'workflow_job_template', '-name', '-description', 'id', 'url', 'related',
'unified_job_template', 'success_nodes', 'failure_nodes', 'always_nodes',)
'unified_job_template', 'success_nodes', 'failure_nodes', 'always_nodes', 'all_parents_must_converge',)
def get_related(self, obj):
res = super(WorkflowJobTemplateNodeSerializer, self).get_related(obj)
@@ -3725,7 +3723,7 @@ class WorkflowJobNodeSerializer(LaunchConfigurationBaseSerializer):
model = WorkflowJobNode
fields = ('*', 'job', 'workflow_job', '-name', '-description', 'id', 'url', 'related',
'unified_job_template', 'success_nodes', 'failure_nodes', 'always_nodes',
'do_not_run',)
'all_parents_must_converge', 'do_not_run',)
def get_related(self, obj):
res = super(WorkflowJobNodeSerializer, self).get_related(obj)
@@ -3833,7 +3831,7 @@ class JobEventSerializer(BaseSerializer):
model = JobEvent
fields = ('*', '-name', '-description', 'job', 'event', 'counter',
'event_display', 'event_data', 'event_level', 'failed',
'changed', 'uuid', 'parent_uuid', 'host', 'host_name', 'parent',
'changed', 'uuid', 'parent_uuid', 'host', 'host_name',
'playbook', 'play', 'task', 'role', 'stdout', 'start_line', 'end_line',
'verbosity')
@@ -3842,13 +3840,9 @@ class JobEventSerializer(BaseSerializer):
res.update(dict(
job = self.reverse('api:job_detail', kwargs={'pk': obj.job_id}),
))
if obj.parent_id:
res['parent'] = self.reverse('api:job_event_detail', kwargs={'pk': obj.parent_id})
res['children'] = self.reverse('api:job_event_children_list', kwargs={'pk': obj.pk})
if obj.host_id:
res['host'] = self.reverse('api:host_detail', kwargs={'pk': obj.host_id})
if obj.hosts.exists():
res['hosts'] = self.reverse('api:job_event_hosts_list', kwargs={'pk': obj.pk})
return res
def get_summary_fields(self, obj):
@@ -4060,6 +4054,13 @@ class JobLaunchSerializer(BaseSerializer):
**attrs)
self._ignored_fields = rejected
# Basic validation - cannot run a playbook without a playbook
if not template.project:
errors['project'] = _("A project is required to run a job.")
elif template.project.status in ('error', 'failed'):
errors['playbook'] = _("Missing a revision to run due to failed project update.")
# cannot run a playbook without an inventory
if template.inventory and template.inventory.pending_deletion is True:
errors['inventory'] = _("The inventory associated with this Job Template is being deleted.")
elif 'inventory' in accepted and accepted['inventory'].pending_deletion:

View File

@@ -81,7 +81,8 @@ from awx.main.utils import (
getattrd,
get_pk_from_dict,
schedule_task_manager,
ignore_inventory_computed_fields
ignore_inventory_computed_fields,
set_environ
)
from awx.main.utils.encryption import encrypt_value
from awx.main.utils.filters import SmartFilter
@@ -204,20 +205,15 @@ class DashboardView(APIView):
'failed': ec2_inventory_failed.count()}
user_groups = get_user_queryset(request.user, models.Group)
groups_job_failed = (
models.Group.objects.filter(hosts_with_active_failures__gt=0) | models.Group.objects.filter(groups_with_active_failures__gt=0)
).count()
groups_inventory_failed = models.Group.objects.filter(inventory_sources__last_job_failed=True).count()
data['groups'] = {'url': reverse('api:group_list', request=request),
'failures_url': reverse('api:group_list', request=request) + "?has_active_failures=True",
'total': user_groups.count(),
'job_failed': groups_job_failed,
'inventory_failed': groups_inventory_failed}
user_hosts = get_user_queryset(request.user, models.Host)
user_hosts_failed = user_hosts.filter(has_active_failures=True)
user_hosts_failed = user_hosts.filter(last_job_host_summary__failed=True)
data['hosts'] = {'url': reverse('api:host_list', request=request),
'failures_url': reverse('api:host_list', request=request) + "?has_active_failures=True",
'failures_url': reverse('api:host_list', request=request) + "?last_job_host_summary__failed=True",
'total': user_hosts.count(),
'failed': user_hosts_failed.count()}
@@ -1611,7 +1607,8 @@ class HostInsights(GenericAPIView):
def _call_insights_api(self, url, session, headers):
try:
res = session.get(url, headers=headers, timeout=120)
with set_environ(**settings.AWX_TASK_ENV):
res = session.get(url, headers=headers, timeout=120)
except requests.exceptions.SSLError:
raise BadGateway(_('SSLError while trying to connect to {}').format(url))
except requests.exceptions.Timeout:
@@ -2150,7 +2147,7 @@ class InventorySourceHostsList(HostRelatedSearchMixin, SubListDestroyAPIView):
host__inventory_sources=inv_source
).delete()
r = super(InventorySourceHostsList, self).perform_list_destroy(instance_list)
update_inventory_computed_fields.delay(inv_source.inventory_id, True)
update_inventory_computed_fields.delay(inv_source.inventory_id)
return r
@@ -2177,7 +2174,7 @@ class InventorySourceGroupsList(SubListDestroyAPIView):
group__inventory_sources=inv_source
).delete()
r = super(InventorySourceGroupsList, self).perform_list_destroy(instance_list)
update_inventory_computed_fields.delay(inv_source.inventory_id, True)
update_inventory_computed_fields.delay(inv_source.inventory_id)
return r
@@ -3268,7 +3265,7 @@ class WorkflowJobRelaunch(GenericAPIView):
jt = obj.job_template
if not jt:
raise ParseError(_('Cannot relaunch slice workflow job orphaned from job template.'))
elif not jt.inventory or min(jt.inventory.hosts.count(), jt.job_slice_count) != obj.workflow_nodes.count():
elif not obj.inventory or min(obj.inventory.hosts.count(), jt.job_slice_count) != obj.workflow_nodes.count():
raise ParseError(_('Cannot relaunch sliced workflow job after slice count has changed.'))
new_workflow_job = obj.create_relaunch_workflow_job()
new_workflow_job.signal_start()
@@ -3819,6 +3816,12 @@ class JobEventHostsList(HostRelatedSearchMixin, SubListAPIView):
relationship = 'hosts'
name = _('Job Event Hosts List')
def get_queryset(self):
parent_event = self.get_parent_object()
self.check_parent_access(parent_event)
qs = self.request.user.get_queryset(self.model).filter(job_events_as_primary_host=parent_event)
return qs
class BaseJobEventsList(NoTruncateMixin, SubListAPIView):
@@ -3841,8 +3844,7 @@ class HostJobEventsList(BaseJobEventsList):
def get_queryset(self):
parent_obj = self.get_parent_object()
self.check_parent_access(parent_obj)
qs = self.request.user.get_queryset(self.model).filter(
Q(host=parent_obj) | Q(hosts=parent_obj)).distinct()
qs = self.request.user.get_queryset(self.model).filter(host=parent_obj)
return qs
@@ -3858,9 +3860,7 @@ class JobJobEventsList(BaseJobEventsList):
def get_queryset(self):
job = self.get_parent_object()
self.check_parent_access(job)
qs = job.job_events
qs = qs.select_related('host')
qs = qs.prefetch_related('hosts', 'children')
qs = job.job_events.select_related('host').order_by('start_line')
return qs.all()
@@ -4303,7 +4303,7 @@ class NotificationTemplateTest(GenericAPIView):
msg = "Tower Notification Test {} {}".format(obj.id, settings.TOWER_URL_BASE)
if obj.notification_type in ('email', 'pagerduty'):
body = "Ansible Tower Test Notification {} {}".format(obj.id, settings.TOWER_URL_BASE)
elif obj.notification_type == 'webhook':
elif obj.notification_type in ('webhook', 'grafana'):
body = '{{"body": "Ansible Tower Test Notification {} {}"}}'.format(obj.id, settings.TOWER_URL_BASE)
else:
body = {"body": "Ansible Tower Test Notification {} {}".format(obj.id, settings.TOWER_URL_BASE)}

View File

@@ -20,6 +20,7 @@ from rest_framework import status
import requests
from awx.api.generics import APIView
from awx.conf.registry import settings_registry
from awx.main.ha import is_ha_environment
from awx.main.utils import (
get_awx_version,
@@ -37,6 +38,7 @@ from awx.main.models import (
InstanceGroup,
JobTemplate,
)
from awx.main.utils import set_environ
logger = logging.getLogger('awx.api.views.root')
@@ -190,7 +192,8 @@ class ApiV2SubscriptionView(APIView):
data['rh_password'] = settings.REDHAT_PASSWORD
try:
user, pw = data.get('rh_username'), data.get('rh_password')
validated = get_licenser().validate_rh(user, pw)
with set_environ(**settings.AWX_TASK_ENV):
validated = get_licenser().validate_rh(user, pw)
if user:
settings.REDHAT_USERNAME = data['rh_username']
if pw:
@@ -202,10 +205,15 @@ class ApiV2SubscriptionView(APIView):
getattr(getattr(exc, 'response', None), 'status_code', None) == 401
):
msg = _("The provided credentials are invalid (HTTP 401).")
if isinstance(exc, (ValueError, OSError)) and exc.args:
elif isinstance(exc, requests.exceptions.ProxyError):
msg = _("Unable to connect to proxy server.")
elif isinstance(exc, requests.exceptions.ConnectionError):
msg = _("Could not connect to subscription service.")
elif isinstance(exc, (ValueError, OSError)) and exc.args:
msg = exc.args[0]
logger.exception(smart_text(u"Invalid license submitted."),
extra=dict(actor=request.user.username))
else:
logger.exception(smart_text(u"Invalid license submitted."),
extra=dict(actor=request.user.username))
return Response({"error": msg}, status=status.HTTP_400_BAD_REQUEST)
return Response(validated)
@@ -302,7 +310,8 @@ class ApiV2ConfigView(APIView):
# If the license is valid, write it to the database.
if license_data_validated['valid_key']:
settings.LICENSE = license_data
settings.TOWER_URL_BASE = "{}://{}".format(request.scheme, request.get_host())
if not settings_registry.is_setting_read_only('TOWER_URL_BASE'):
settings.TOWER_URL_BASE = "{}://{}".format(request.scheme, request.get_host())
return Response(license_data_validated)
logger.warning(smart_text(u"Invalid license submitted."),

View File

@@ -11,7 +11,7 @@ from django.utils.translation import ugettext_lazy as _
# Django REST Framework
from rest_framework.fields import ( # noqa
BooleanField, CharField, ChoiceField, DictField, EmailField,
BooleanField, CharField, ChoiceField, DictField, DateTimeField, EmailField,
IntegerField, ListField, NullBooleanField
)

View File

@@ -1,14 +1,11 @@
# Python
from collections import namedtuple
import contextlib
import logging
import re
import sys
import threading
import time
import traceback
import urllib.parse
from io import StringIO
# Django
from django.conf import LazySettings
@@ -28,8 +25,6 @@ from awx.conf import settings_registry
from awx.conf.models import Setting
from awx.conf.migrations._reencrypt import decrypt_field as old_decrypt_field
import cachetools
# FIXME: Gracefully handle when settings are accessed before the database is
# ready (or during migrations).
@@ -91,42 +86,11 @@ def _ctit_db_wrapper(trans_safe=False):
transaction.set_rollback(False)
yield
except DBError:
# We want the _full_ traceback with the context
# First we get the current call stack, which constitutes the "top",
# it has the context up to the point where the context manager is used
top_stack = StringIO()
traceback.print_stack(file=top_stack)
top_lines = top_stack.getvalue().strip('\n').split('\n')
top_stack.close()
# Get "bottom" stack from the local error that happened
# inside of the "with" block this wraps
exc_type, exc_value, exc_traceback = sys.exc_info()
bottom_stack = StringIO()
traceback.print_tb(exc_traceback, file=bottom_stack)
bottom_lines = bottom_stack.getvalue().strip('\n').split('\n')
# Glue together top and bottom where overlap is found
bottom_cutoff = 0
for i, line in enumerate(bottom_lines):
if line in top_lines:
# start of overlapping section, take overlap from bottom
top_lines = top_lines[:top_lines.index(line)]
bottom_cutoff = i
break
bottom_lines = bottom_lines[bottom_cutoff:]
tb_lines = top_lines + bottom_lines
tb_string = '\n'.join(
['Traceback (most recent call last):'] +
tb_lines +
['{}: {}'.format(exc_type.__name__, str(exc_value))]
)
bottom_stack.close()
# Log the combined stack
if trans_safe:
if 'check_migrations' not in sys.argv:
logger.debug('Database settings are not available, using defaults, error:\n{}'.format(tb_string))
if 'migrate' not in sys.argv and 'check_migrations' not in sys.argv:
logger.exception('Database settings are not available, using defaults.')
else:
logger.debug('Error modifying something related to database settings.\n{}'.format(tb_string))
logger.exception('Error modifying something related to database settings.')
finally:
if trans_safe and is_atomic and rollback_set:
transaction.set_rollback(rollback_set)
@@ -138,12 +102,13 @@ def filter_sensitive(registry, key, value):
return value
# settings.__getattr__ is called *constantly*, and the LOG_AGGREGATOR_ ones are
# so ubiquitous when external logging is enabled that they should kept in memory
# with a short TTL to avoid even having to contact memcached
# the primary use case for this optimization is the callback receiver
# when external logging is enabled
LOGGING_SETTINGS_CACHE = cachetools.TTLCache(maxsize=50, ttl=1)
class TransientSetting(object):
__slots__ = ('pk', 'value')
def __init__(self, pk, value):
self.pk = pk
self.value = value
class EncryptedCacheProxy(object):
@@ -173,7 +138,6 @@ class EncryptedCacheProxy(object):
def get(self, key, **kwargs):
value = self.cache.get(key, **kwargs)
value = self._handle_encryption(self.decrypter, key, value)
logger.debug('cache get(%r, %r) -> %r', key, empty, filter_sensitive(self.registry, key, value))
return value
def set(self, key, value, log=True, **kwargs):
@@ -196,8 +160,6 @@ class EncryptedCacheProxy(object):
self.set(key, value, log=False, **kwargs)
def _handle_encryption(self, method, key, value):
TransientSetting = namedtuple('TransientSetting', ['pk', 'value'])
if value is not empty and self.registry.is_setting_encrypted(key):
# If the setting exists in the database, we'll use its primary key
# as part of the AES key when encrypting/decrypting
@@ -447,17 +409,11 @@ class SettingsWrapper(UserSettingsHolder):
return self._get_default('SETTINGS_MODULE')
def __getattr__(self, name):
if name.startswith('LOG_AGGREGATOR_'):
cached = LOGGING_SETTINGS_CACHE.get(name)
if cached:
return cached
value = empty
if name in self.all_supported_settings:
with _ctit_db_wrapper(trans_safe=True):
value = self._get_local(name)
if value is not empty:
if name.startswith('LOG_AGGREGATOR_'):
LOGGING_SETTINGS_CACHE[name] = value
return value
value = self._get_default(name)
# sometimes users specify RabbitMQ passwords that contain

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@@ -307,7 +307,7 @@ class BaseAccess(object):
return True # User has access to both, permission check passed
def check_license(self, add_host_name=None, feature=None, check_expiration=True):
def check_license(self, add_host_name=None, feature=None, check_expiration=True, quiet=False):
validation_info = get_licenser().validate()
if validation_info.get('license_type', 'UNLICENSED') == 'open':
return
@@ -317,8 +317,10 @@ class BaseAccess(object):
validation_info['time_remaining'] = 99999999
validation_info['grace_period_remaining'] = 99999999
report_violation = lambda message: logger.error(message)
if quiet:
report_violation = lambda message: None
else:
report_violation = lambda message: logger.warning(message)
if (
validation_info.get('trial', False) is True or
validation_info['instance_count'] == 10 # basic 10 license
@@ -907,7 +909,7 @@ class HostAccess(BaseAccess):
model = Host
select_related = ('created_by', 'modified_by', 'inventory',
'last_job__job_template', 'last_job_host_summary__job',)
prefetch_related = ('groups',)
prefetch_related = ('groups', 'inventory_sources')
def filtered_queryset(self):
return self.model.objects.filter(inventory__in=Inventory.accessible_pk_qs(self.user, 'read_role'))
@@ -2238,7 +2240,7 @@ class JobEventAccess(BaseAccess):
'''
model = JobEvent
prefetch_related = ('hosts', 'job__job_template', 'host',)
prefetch_related = ('job__job_template', 'host',)
def filtered_queryset(self):
return self.model.objects.filter(
@@ -2427,6 +2429,9 @@ class ScheduleAccess(BaseAccess):
def can_add(self, data):
if not JobLaunchConfigAccess(self.user).can_add(data):
return False
if not data:
return Role.objects.filter(role_field__in=['update_role', 'execute_role'], ancestors__in=self.user.roles.all()).exists()
return self.check_related('unified_job_template', UnifiedJobTemplate, data, role_field='execute_role', mandatory=True)
@check_superuser

View File

@@ -31,7 +31,7 @@ data _since_ the last report date - i.e., new data in the last 24 hours)
'''
@register('config', '1.0')
@register('config', '1.1')
def config(since):
license_info = get_license(show_key=False)
install_type = 'traditional'
@@ -53,6 +53,7 @@ def config(since):
'ansible_version': get_ansible_version(),
'license_type': license_info.get('license_type', 'UNLICENSED'),
'free_instances': license_info.get('free_instances', 0),
'total_licensed_instances': license_info.get('instance_count', 0),
'license_expiry': license_info.get('time_remaining', 0),
'pendo_tracking': settings.PENDO_TRACKING_STATE,
'authentication_backends': settings.AUTHENTICATION_BACKENDS,

View File

@@ -15,7 +15,7 @@ from awx.conf.license import get_license
from awx.main.models import Job
from awx.main.access import access_registry
from awx.main.models.ha import TowerAnalyticsState
from awx.main.utils import get_awx_http_client_headers
from awx.main.utils import get_awx_http_client_headers, set_environ
__all__ = ['register', 'gather', 'ship', 'table_version']
@@ -169,12 +169,13 @@ def ship(path):
s = requests.Session()
s.headers = get_awx_http_client_headers()
s.headers.pop('Content-Type')
response = s.post(url,
files=files,
verify="/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem",
auth=(rh_user, rh_password),
headers=s.headers,
timeout=(31, 31))
with set_environ(**settings.AWX_TASK_ENV):
response = s.post(url,
files=files,
verify="/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem",
auth=(rh_user, rh_password),
headers=s.headers,
timeout=(31, 31))
if response.status_code != 202:
return logger.exception('Upload failed with status {}, {}'.format(response.status_code,
response.text))

View File

@@ -616,6 +616,18 @@ register(
category_slug='jobs',
)
register(
'MAX_FORKS',
field_class=fields.IntegerField,
allow_null=False,
default=200,
label=_('Maximum number of forks per job.'),
help_text=_('Saving a Job Template with more than this number of forks will result in an error. '
'When set to 0, no limit is applied.'),
category=_('Jobs'),
category_slug='jobs',
)
register(
'LOG_AGGREGATOR_HOST',
field_class=fields.CharField,
@@ -787,6 +799,28 @@ register(
)
register(
'AUTOMATION_ANALYTICS_LAST_GATHER',
field_class=fields.DateTimeField,
label=_('Last gather date for Automation Analytics.'),
allow_null=True,
category=_('System'),
category_slug='system'
)
register(
'AUTOMATION_ANALYTICS_GATHER_INTERVAL',
field_class=fields.IntegerField,
label=_('Automation Analytics Gather Interval'),
help_text=_('Interval (in seconds) between data gathering.'),
default=14400, # every 4 hours
min_value=1800, # every 30 minutes
category=_('System'),
category_slug='system'
)
def logging_validate(serializer, attrs):
if not serializer.instance or \
not hasattr(serializer.instance, 'LOG_AGGREGATOR_HOST') or \
@@ -811,10 +845,7 @@ def galaxy_validate(serializer, attrs):
to save settings which obviously break all project updates.
"""
prefix = 'PRIMARY_GALAXY_'
from awx.main.constants import GALAXY_SERVER_FIELDS
if not any('{}{}'.format(prefix, subfield.upper()) in attrs for subfield in GALAXY_SERVER_FIELDS):
return attrs
errors = {}
def _new_value(setting_name):
if setting_name in attrs:
@@ -823,10 +854,22 @@ def galaxy_validate(serializer, attrs):
return ''
return getattr(serializer.instance, setting_name, '')
if not _new_value('PRIMARY_GALAXY_URL'):
if _new_value('PUBLIC_GALAXY_ENABLED') is False:
msg = _('A URL for Primary Galaxy must be defined before disabling public Galaxy.')
# put error in both keys because UI has trouble with errors in toggles
for key in ('PRIMARY_GALAXY_URL', 'PUBLIC_GALAXY_ENABLED'):
errors.setdefault(key, [])
errors[key].append(msg)
raise serializers.ValidationError(errors)
from awx.main.constants import GALAXY_SERVER_FIELDS
if not any('{}{}'.format(prefix, subfield.upper()) in attrs for subfield in GALAXY_SERVER_FIELDS):
return attrs
galaxy_data = {}
for subfield in GALAXY_SERVER_FIELDS:
galaxy_data[subfield] = _new_value('{}{}'.format(prefix, subfield.upper()))
errors = {}
if not galaxy_data['url']:
for k, v in galaxy_data.items():
if v:

View File

@@ -43,7 +43,7 @@ aim_inputs = {
'id': 'object_query',
'label': _('Object Query'),
'type': 'string',
'help_text': _('Lookup query for the object. Ex: "Safe=TestSafe;Object=testAccountName123"'),
'help_text': _('Lookup query for the object. Ex: Safe=TestSafe;Object=testAccountName123'),
}, {
'id': 'object_query_format',
'label': _('Object Query Format'),

View File

@@ -3,6 +3,16 @@ from .plugin import CredentialPlugin
from django.utils.translation import ugettext_lazy as _
from azure.keyvault import KeyVaultClient, KeyVaultAuthentication
from azure.common.credentials import ServicePrincipalCredentials
from msrestazure import azure_cloud
# https://github.com/Azure/msrestazure-for-python/blob/master/msrestazure/azure_cloud.py
clouds = [
vars(azure_cloud)[n]
for n in dir(azure_cloud)
if n.startswith("AZURE_") and n.endswith("_CLOUD")
]
default_cloud = vars(azure_cloud)["AZURE_PUBLIC_CLOUD"]
azure_keyvault_inputs = {
@@ -24,6 +34,12 @@ azure_keyvault_inputs = {
'id': 'tenant',
'label': _('Tenant ID'),
'type': 'string'
}, {
'id': 'cloud_name',
'label': _('Cloud Environment'),
'help_text': _('Specify which azure cloud environment to use.'),
'choices': list(set([default_cloud.name] + [c.name for c in clouds])),
'default': default_cloud.name
}],
'metadata': [{
'id': 'secret_field',
@@ -42,6 +58,7 @@ azure_keyvault_inputs = {
def azure_keyvault_backend(**kwargs):
url = kwargs['url']
[cloud] = [c for c in clouds if c.name == kwargs.get('cloud_name', default_cloud.name)]
def auth_callback(server, resource, scope):
credentials = ServicePrincipalCredentials(
@@ -49,7 +66,7 @@ def azure_keyvault_backend(**kwargs):
client_id = kwargs['client'],
secret = kwargs['secret'],
tenant = kwargs['tenant'],
resource = "https://vault.azure.net",
resource = f"https://{cloud.suffixes.keyvault_dns.split('.', 1).pop()}",
)
token = credentials.token
return token['token_type'], token['access_token']

View File

@@ -0,0 +1,56 @@
import logging
import os
import time
from multiprocessing import Process
from django.conf import settings
from django.db import connections
from schedule import Scheduler
from awx.main.dispatch.worker import TaskWorker
logger = logging.getLogger('awx.main.dispatch.periodic')
class Scheduler(Scheduler):
def run_continuously(self):
idle_seconds = max(
1,
min(self.jobs).period.total_seconds() / 2
)
def run():
ppid = os.getppid()
logger.warn(f'periodic beat started')
while True:
if os.getppid() != ppid:
# if the parent PID changes, this process has been orphaned
# via e.g., segfault or sigkill, we should exit too
pid = os.getpid()
logger.warn(f'periodic beat exiting gracefully pid:{pid}')
raise SystemExit()
try:
for conn in connections.all():
# If the database connection has a hiccup, re-establish a new
# connection
conn.close_if_unusable_or_obsolete()
self.run_pending()
except Exception:
logger.exception(
'encountered an error while scheduling periodic tasks'
)
time.sleep(idle_seconds)
process = Process(target=run)
process.daemon = True
process.start()
def run_continuously():
scheduler = Scheduler()
for task in settings.CELERYBEAT_SCHEDULE.values():
apply_async = TaskWorker.resolve_callable(task['task']).apply_async
total_seconds = task['schedule'].total_seconds()
scheduler.every(total_seconds).seconds.do(apply_async)
scheduler.run_continuously()

View File

@@ -1,7 +1,9 @@
import logging
import os
import random
import signal
import sys
import time
import traceback
from uuid import uuid4
@@ -72,9 +74,6 @@ class PoolWorker(object):
if not body.get('uuid'):
body['uuid'] = str(uuid4())
uuid = body['uuid']
logger.debug('delivered {} to worker[{}] qsize {}'.format(
uuid, self.pid, self.qsize
))
self.managed_tasks[uuid] = body
self.queue.put(body, block=True, timeout=5)
self.messages_sent += 1
@@ -247,7 +246,7 @@ class WorkerPool(object):
' qsize={{ w.managed_tasks|length }}'
' rss={{ w.mb }}MB'
'{% for task in w.managed_tasks.values() %}'
'\n - {% if loop.index0 == 0 %}running {% else %}queued {% endif %}'
'\n - {% if loop.index0 == 0 %}running {% if "age" in task %}for: {{ "%.1f" % task["age"] }}s {% endif %}{% else %}queued {% endif %}'
'{{ task["uuid"] }} '
'{% if "task" in task %}'
'{{ task["task"].rsplit(".", 1)[-1] }}'
@@ -368,6 +367,26 @@ class AutoscalePool(WorkerPool):
logger.warn('scaling down worker pid:{}'.format(w.pid))
w.quit()
self.workers.remove(w)
if w.alive:
# if we discover a task manager invocation that's been running
# too long, reap it (because otherwise it'll just hold the postgres
# advisory lock forever); the goal of this code is to discover
# deadlocks or other serious issues in the task manager that cause
# the task manager to never do more work
current_task = w.current_task
if current_task and isinstance(current_task, dict):
if current_task.get('task', '').endswith('tasks.run_task_manager'):
if 'started' not in current_task:
w.managed_tasks[
current_task['uuid']
]['started'] = time.time()
age = time.time() - current_task['started']
w.managed_tasks[current_task['uuid']]['age'] = age
if age > (60 * 5):
logger.error(
f'run_task_manager has held the advisory lock for >5m, sending SIGTERM to {w.pid}'
) # noqa
os.kill(w.pid, signal.SIGTERM)
for m in orphaned:
# if all the workers are dead, spawn at least one

View File

@@ -61,7 +61,7 @@ class AWXConsumer(ConsumerMixin):
])
def control(self, body, message):
logger.warn(body)
logger.warn('Consumer received control message {}'.format(body))
control = body.get('control')
if control in ('status', 'running'):
producer = Producer(
@@ -148,7 +148,6 @@ class BaseWorker(object):
finally:
if 'uuid' in body:
uuid = body['uuid']
logger.debug('task {} is finished'.format(uuid))
finished.put(uuid)
logger.warn('worker exiting gracefully pid:{}'.format(os.getpid()))

View File

@@ -1,10 +1,15 @@
import cProfile
import logging
import os
import pstats
import signal
import tempfile
import time
import traceback
from queue import Empty as QueueEmpty
from django.utils.timezone import now as tz_now
from django.conf import settings
from django.utils.timezone import now as tz_now
from django.db import DatabaseError, OperationalError, connection as django_connection
from django.db.utils import InterfaceError, InternalError, IntegrityError
@@ -32,6 +37,7 @@ class CallbackBrokerWorker(BaseWorker):
'''
MAX_RETRIES = 2
prof = None
def __init__(self):
self.buff = {}
@@ -42,6 +48,26 @@ class CallbackBrokerWorker(BaseWorker):
except QueueEmpty:
return {'event': 'FLUSH'}
def toggle_profiling(self, *args):
if self.prof:
self.prof.disable()
filename = f'callback-{os.getpid()}.pstats'
filepath = os.path.join(tempfile.gettempdir(), filename)
with open(filepath, 'w') as f:
pstats.Stats(self.prof, stream=f).sort_stats('cumulative').print_stats()
pstats.Stats(self.prof).dump_stats(filepath + '.raw')
self.prof = False
logger.error(f'profiling is disabled, wrote {filepath}')
else:
self.prof = cProfile.Profile()
self.prof.enable()
logger.error('profiling is enabled')
def work_loop(self, *args, **kw):
if settings.AWX_CALLBACK_PROFILE:
signal.signal(signal.SIGUSR1, self.toggle_profiling)
return super(CallbackBrokerWorker, self).work_loop(*args, **kw)
def flush(self, force=False):
now = tz_now()
if (

View File

@@ -370,33 +370,32 @@ class IsolatedManager(object):
private_data_dir
)
if runner_obj.status == 'successful':
for instance in instance_qs:
task_result = {}
try:
task_result = runner_obj.get_fact_cache(instance.hostname)
except Exception:
logger.exception('Failed to read status from isolated instances')
if 'awx_capacity_cpu' in task_result and 'awx_capacity_mem' in task_result:
task_result = {
'cpu': task_result['awx_cpu'],
'mem': task_result['awx_mem'],
'capacity_cpu': task_result['awx_capacity_cpu'],
'capacity_mem': task_result['awx_capacity_mem'],
'version': task_result['awx_capacity_version']
}
IsolatedManager.update_capacity(instance, task_result)
logger.debug('Isolated instance {} successful heartbeat'.format(instance.hostname))
elif instance.capacity == 0:
logger.debug('Isolated instance {} previously marked as lost, could not re-join.'.format(
instance.hostname))
else:
logger.warning('Could not update status of isolated instance {}'.format(instance.hostname))
if instance.is_lost(isolated=True):
instance.capacity = 0
instance.save(update_fields=['capacity'])
logger.error('Isolated instance {} last checked in at {}, marked as lost.'.format(
instance.hostname, instance.modified))
for instance in instance_qs:
task_result = {}
try:
task_result = runner_obj.get_fact_cache(instance.hostname)
except Exception:
logger.exception('Failed to read status from isolated instances')
if 'awx_capacity_cpu' in task_result and 'awx_capacity_mem' in task_result:
task_result = {
'cpu': task_result['awx_cpu'],
'mem': task_result['awx_mem'],
'capacity_cpu': task_result['awx_capacity_cpu'],
'capacity_mem': task_result['awx_capacity_mem'],
'version': task_result['awx_capacity_version']
}
IsolatedManager.update_capacity(instance, task_result)
logger.debug('Isolated instance {} successful heartbeat'.format(instance.hostname))
elif instance.capacity == 0:
logger.debug('Isolated instance {} previously marked as lost, could not re-join.'.format(
instance.hostname))
else:
logger.warning('Could not update status of isolated instance {}'.format(instance.hostname))
if instance.is_lost(isolated=True):
instance.capacity = 0
instance.save(update_fields=['capacity'])
logger.error('Isolated instance {} last checked in at {}, marked as lost.'.format(
instance.hostname, instance.modified))
finally:
if os.path.exists(private_data_dir):
shutil.rmtree(private_data_dir)

View File

@@ -9,6 +9,13 @@ class Command(BaseCommand):
def handle(self, *args, **options):
with connection.cursor() as cursor:
start = {}
for relation in (
'main_jobevent', 'main_inventoryupdateevent',
'main_projectupdateevent', 'main_adhoccommandevent'
):
cursor.execute(f"SELECT MAX(id) FROM {relation};")
start[relation] = cursor.fetchone()[0] or 0
clear = False
while True:
lines = []
@@ -17,19 +24,15 @@ class Command(BaseCommand):
'main_projectupdateevent', 'main_adhoccommandevent'
):
lines.append(relation)
for label, interval in (
('last minute: ', '1 minute'),
('last 5 minutes:', '5 minutes'),
('last hour: ', '1 hour'),
):
cursor.execute(
f"SELECT MAX(id) - MIN(id) FROM {relation} WHERE modified > now() - '{interval}'::interval;"
)
events = cursor.fetchone()[0] or 0
lines.append(f'{label} {events}')
minimum = start[relation]
cursor.execute(
f"SELECT MAX(id) - MIN(id) FROM {relation} WHERE id > {minimum} AND modified > now() - '1 minute'::interval;"
)
events = cursor.fetchone()[0] or 0
lines.append(f'↳ last minute {events}')
lines.append('')
if clear:
for i in range(20):
for i in range(12):
sys.stdout.write('\x1b[1A\x1b[2K')
for l in lines:
print(l)

View File

@@ -16,13 +16,10 @@ from awx.main.models import (
Job, AdHocCommand, ProjectUpdate, InventoryUpdate,
SystemJob, WorkflowJob, Notification
)
from awx.main.signals import ( # noqa
emit_update_inventory_on_created_or_deleted,
emit_update_inventory_computed_fields,
from awx.main.signals import (
disable_activity_stream,
disable_computed_fields
)
from django.db.models.signals import post_save, post_delete, m2m_changed # noqa
class Command(BaseCommand):

View File

@@ -921,11 +921,14 @@ class Command(BaseCommand):
available_instances = license_info.get('available_instances', 0)
free_instances = license_info.get('free_instances', 0)
time_remaining = license_info.get('time_remaining', 0)
hard_error = license_info.get('trial', False) is True or license_info['instance_count'] == 10
new_count = Host.objects.active_count()
if time_remaining <= 0 and not license_info.get('demo', False):
logger.error(LICENSE_EXPIRED_MESSAGE)
if license_info.get('trial', False) is True:
if time_remaining <= 0:
if hard_error:
logger.error(LICENSE_EXPIRED_MESSAGE)
raise CommandError("License has expired!")
else:
logger.warning(LICENSE_EXPIRED_MESSAGE)
# special check for tower-type inventory sources
# but only if running the plugin
TOWER_SOURCE_FILES = ['tower.yml', 'tower.yaml']
@@ -938,15 +941,11 @@ class Command(BaseCommand):
'new_count': new_count,
'available_instances': available_instances,
}
if license_info.get('demo', False):
logger.error(DEMO_LICENSE_MESSAGE % d)
else:
if hard_error:
logger.error(LICENSE_MESSAGE % d)
if (
license_info.get('trial', False) is True or
license_info['instance_count'] == 10 # basic 10 license
):
raise CommandError('License count exceeded!')
else:
logger.warning(LICENSE_MESSAGE % d)
def check_org_host_limit(self):
license_info = get_licenser().validate()
@@ -1007,12 +1006,6 @@ class Command(BaseCommand):
except re.error:
raise CommandError('invalid regular expression for --host-filter')
'''
TODO: Remove this deprecation when we remove support for rax.py
'''
if self.source == "rax.py":
logger.info("Rackspace inventory sync is Deprecated in Tower 3.1.0 and support for Rackspace will be removed in a future release.")
begin = time.time()
self.load_inventory_from_database()

View File

@@ -1,13 +1,11 @@
# Copyright (c) 2015 Ansible, Inc.
# All Rights Reserved.
import os
import logging
from multiprocessing import Process
from django.conf import settings
from django.core.cache import cache as django_cache
from django.core.management.base import BaseCommand
from django.db import connection as django_connection, connections
from django.db import connection as django_connection
from kombu import Exchange, Queue
from awx.main.utils.handlers import AWXProxyHandler
@@ -16,6 +14,7 @@ from awx.main.dispatch.control import Control
from awx.main.dispatch.kombu import Connection
from awx.main.dispatch.pool import AutoscalePool
from awx.main.dispatch.worker import AWXConsumer, TaskWorker
from awx.main.dispatch import periodic
logger = logging.getLogger('awx.main.dispatch')
@@ -36,71 +35,6 @@ class Command(BaseCommand):
help=('cause the dispatcher to recycle all of its worker processes;'
'running jobs will run to completion first'))
def beat(self):
from celery import Celery
from celery.beat import PersistentScheduler
from celery.apps import beat
class AWXScheduler(PersistentScheduler):
def __init__(self, *args, **kwargs):
self.ppid = os.getppid()
super(AWXScheduler, self).__init__(*args, **kwargs)
def setup_schedule(self):
super(AWXScheduler, self).setup_schedule()
self.update_from_dict(settings.CELERYBEAT_SCHEDULE)
def tick(self, *args, **kwargs):
if os.getppid() != self.ppid:
# if the parent PID changes, this process has been orphaned
# via e.g., segfault or sigkill, we should exit too
raise SystemExit()
return super(AWXScheduler, self).tick(*args, **kwargs)
def apply_async(self, entry, producer=None, advance=True, **kwargs):
for conn in connections.all():
# If the database connection has a hiccup, re-establish a new
# connection
conn.close_if_unusable_or_obsolete()
task = TaskWorker.resolve_callable(entry.task)
result, queue = task.apply_async()
class TaskResult(object):
id = result['uuid']
return TaskResult()
sched_file = '/var/lib/awx/beat.db'
app = Celery()
app.conf.BROKER_URL = settings.BROKER_URL
app.conf.CELERY_TASK_RESULT_EXPIRES = False
# celery in py3 seems to have a bug where the celerybeat schedule
# shelve can become corrupted; we've _only_ seen this in Ubuntu and py36
# it can be avoided by detecting and removing the corrupted file
# at some point, we'll just stop using celerybeat, because it's clearly
# buggy, too -_-
#
# https://github.com/celery/celery/issues/4777
sched = AWXScheduler(schedule_filename=sched_file, app=app)
try:
sched.setup_schedule()
except Exception:
logger.exception('{} is corrupted, removing.'.format(sched_file))
sched._remove_db()
finally:
try:
sched.close()
except Exception:
logger.exception('{} failed to sync/close'.format(sched_file))
beat.Beat(
30,
app,
schedule=sched_file, scheduler_cls=AWXScheduler
).run()
def handle(self, *arg, **options):
if options.get('status'):
print(Control('dispatcher').status())
@@ -116,9 +50,10 @@ class Command(BaseCommand):
# for the DB and memcached connections (that way lies race conditions)
django_connection.close()
django_cache.close()
beat = Process(target=self.beat)
beat.daemon = True
beat.start()
# spawn a daemon thread to periodically enqueues scheduled tasks
# (like the node heartbeat)
periodic.run_continuously()
reaper.reap()
consumer = None

View File

@@ -78,8 +78,7 @@ class HostManager(models.Manager):
self.core_filters = {}
qs = qs & q
unique_by_name = qs.order_by('name', 'pk').distinct('name')
return qs.filter(pk__in=unique_by_name)
return qs.order_by('name', 'pk').distinct('name')
return qs

View File

@@ -3,15 +3,17 @@ from uuid import uuid4
from django.db import migrations
from awx.main.models import Instance
def _generate_new_uuid_for_iso_nodes(apps, schema_editor):
Instance = apps.get_model('main', 'Instance')
for instance in Instance.objects.all():
if instance.is_isolated():
# The below code is a copy paste of instance.is_isolated()
# We can't call is_isolated because we are using the "old" version
# of the Instance definition.
if instance.rampart_groups.filter(controller__isnull=False).exists():
instance.uuid = str(uuid4())
instance.save()
class Migration(migrations.Migration):

View File

@@ -0,0 +1,18 @@
# Generated by Django 2.2.4 on 2019-11-25 20:53
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('main', '0101_v370_generate_new_uuids_for_iso_nodes'),
]
operations = [
migrations.AddField(
model_name='unifiedjob',
name='canceled_on',
field=models.DateTimeField(db_index=True, default=None, editable=False, help_text='The date and time when the cancel request was sent.', null=True),
),
]

View File

@@ -0,0 +1,52 @@
# -*- coding: utf-8 -*-
# Generated by Django 1.11.16 on 2019-02-21 17:35
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('main', '0102_v370_unifiedjob_canceled'),
]
operations = [
migrations.RemoveField(
model_name='group',
name='groups_with_active_failures',
),
migrations.RemoveField(
model_name='group',
name='has_active_failures',
),
migrations.RemoveField(
model_name='group',
name='has_inventory_sources',
),
migrations.RemoveField(
model_name='group',
name='hosts_with_active_failures',
),
migrations.RemoveField(
model_name='group',
name='total_groups',
),
migrations.RemoveField(
model_name='group',
name='total_hosts',
),
migrations.RemoveField(
model_name='host',
name='has_active_failures',
),
migrations.RemoveField(
model_name='host',
name='has_inventory_sources',
),
migrations.AlterField(
model_name='jobhostsummary',
name='failed',
field=models.BooleanField(db_index=True, default=False, editable=False),
),
]

View File

@@ -0,0 +1,24 @@
# Generated by Django 2.2.8 on 2020-01-15 20:01
from django.db import migrations, models
def cleanup_scan_jts(apps, schema_editor):
JobTemplate = apps.get_model('main', 'JobTemplate')
JobTemplate.objects.filter(job_type='scan').update(job_type='run')
class Migration(migrations.Migration):
dependencies = [
('main', '0103_v370_remove_computed_fields'),
]
operations = [
migrations.RunPython(cleanup_scan_jts),
migrations.AlterField(
model_name='jobtemplate',
name='job_type',
field=models.CharField(choices=[('run', 'Run'), ('check', 'Check')], default='run', max_length=64),
),
]

View File

@@ -0,0 +1,21 @@
# Generated by Django 2.2.8 on 2020-01-15 18:01
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('main', '0104_v370_cleanup_old_scan_jts'),
]
operations = [
migrations.RemoveField(
model_name='jobevent',
name='parent',
),
migrations.RemoveField(
model_name='jobevent',
name='hosts',
),
]

View File

@@ -0,0 +1,17 @@
# Generated by Django 2.2.8 on 2020-01-27 12:39
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('main', '0105_v370_remove_jobevent_parent_and_hosts'),
]
operations = [
migrations.RemoveField(
model_name='inventory',
name='groups_with_active_failures',
),
]

View File

@@ -0,0 +1,23 @@
# Generated by Django 2.2.4 on 2020-01-08 22:11
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('main', '0106_v370_remove_inventory_groups_with_active_failures'),
]
operations = [
migrations.AddField(
model_name='workflowjobnode',
name='all_parents_must_converge',
field=models.BooleanField(default=False, help_text='If enabled then the node will only run if all of the parent nodes have met the criteria to reach this node'),
),
migrations.AddField(
model_name='workflowjobtemplatenode',
name='all_parents_must_converge',
field=models.BooleanField(default=False, help_text='If enabled then the node will only run if all of the parent nodes have met the criteria to reach this node'),
),
]

View File

@@ -0,0 +1,18 @@
# Generated by Django 2.2.8 on 2020-02-06 16:43
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('main', '0107_v370_workflow_convergence_api_toggle'),
]
operations = [
migrations.AddField(
model_name='unifiedjob',
name='dependencies_processed',
field=models.BooleanField(default=False, editable=False, help_text='If True, the task manager has already processed potential dependencies for this job.'),
),
]

View File

@@ -1136,7 +1136,7 @@ ManagedCredentialType(
'help_text': ugettext_noop('The OpenShift or Kubernetes API Endpoint to authenticate with.')
},{
'id': 'bearer_token',
'label': ugettext_noop('API authentication bearer token.'),
'label': ugettext_noop('API authentication bearer token'),
'type': 'string',
'secret': True,
},{

View File

@@ -360,11 +360,10 @@ class BasePlaybookEvent(CreatedModifiedModel):
value = force_text(event_data.get(field, '')).strip()
if value != getattr(self, field):
setattr(self, field, value)
if isinstance(self, JobEvent):
analytics_logger.info(
'Event data saved.',
extra=dict(python_objects=dict(job_event=self))
)
analytics_logger.info(
'Event data saved.',
extra=dict(python_objects=dict(job_event=self))
)
@classmethod
def create_from_data(cls, **kwargs):
@@ -450,19 +449,6 @@ class JobEvent(BasePlaybookEvent):
default='',
editable=False,
)
hosts = models.ManyToManyField(
'Host',
related_name='job_events',
editable=False,
)
parent = models.ForeignKey(
'self',
related_name='children',
null=True,
default=None,
on_delete=models.SET_NULL,
editable=False,
)
parent_uuid = models.CharField(
max_length=1024,
default='',
@@ -617,6 +603,7 @@ class BaseCommandEvent(CreatedModifiedModel):
kwargs.pop('created', None)
sanitize_event_keys(kwargs, cls.VALID_KEYS)
kwargs.pop('workflow_job_id', None)
event = cls(**kwargs)
event._update_from_event_data()
return event

View File

@@ -4,7 +4,6 @@
# Python
import datetime
import time
import itertools
import logging
import re
import copy
@@ -123,12 +122,6 @@ class Inventory(CommonModelNameNotUnique, ResourceMixin, RelatedJobsMixin):
help_text=_('This field is deprecated and will be removed in a future release. '
'Total number of groups in this inventory.'),
)
groups_with_active_failures = models.PositiveIntegerField(
default=0,
editable=False,
help_text=_('This field is deprecated and will be removed in a future release. '
'Number of groups in this inventory with active failures.'),
)
has_inventory_sources = models.BooleanField(
default=False,
editable=False,
@@ -339,139 +332,17 @@ class Inventory(CommonModelNameNotUnique, ResourceMixin, RelatedJobsMixin):
return data
def update_host_computed_fields(self):
'''
Update computed fields for all hosts in this inventory.
'''
hosts_to_update = {}
hosts_qs = self.hosts
# Define queryset of all hosts with active failures.
hosts_with_active_failures = hosts_qs.filter(last_job_host_summary__isnull=False, last_job_host_summary__failed=True).values_list('pk', flat=True)
# Find all hosts that need the has_active_failures flag set.
hosts_to_set = hosts_qs.filter(has_active_failures=False, pk__in=hosts_with_active_failures)
for host_pk in hosts_to_set.values_list('pk', flat=True):
host_updates = hosts_to_update.setdefault(host_pk, {})
host_updates['has_active_failures'] = True
# Find all hosts that need the has_active_failures flag cleared.
hosts_to_clear = hosts_qs.filter(has_active_failures=True).exclude(pk__in=hosts_with_active_failures)
for host_pk in hosts_to_clear.values_list('pk', flat=True):
host_updates = hosts_to_update.setdefault(host_pk, {})
host_updates['has_active_failures'] = False
# Define queryset of all hosts with cloud inventory sources.
hosts_with_cloud_inventory = hosts_qs.filter(inventory_sources__source__in=CLOUD_INVENTORY_SOURCES).values_list('pk', flat=True)
# Find all hosts that need the has_inventory_sources flag set.
hosts_to_set = hosts_qs.filter(has_inventory_sources=False, pk__in=hosts_with_cloud_inventory)
for host_pk in hosts_to_set.values_list('pk', flat=True):
host_updates = hosts_to_update.setdefault(host_pk, {})
host_updates['has_inventory_sources'] = True
# Find all hosts that need the has_inventory_sources flag cleared.
hosts_to_clear = hosts_qs.filter(has_inventory_sources=True).exclude(pk__in=hosts_with_cloud_inventory)
for host_pk in hosts_to_clear.values_list('pk', flat=True):
host_updates = hosts_to_update.setdefault(host_pk, {})
host_updates['has_inventory_sources'] = False
# Now apply updates to hosts where needed (in batches).
all_update_pks = list(hosts_to_update.keys())
def _chunk(items, chunk_size):
for i, group in itertools.groupby(enumerate(items), lambda x: x[0] // chunk_size):
yield (g[1] for g in group)
for update_pks in _chunk(all_update_pks, 500):
for host in hosts_qs.filter(pk__in=update_pks):
host_updates = hosts_to_update[host.pk]
for field, value in host_updates.items():
setattr(host, field, value)
host.save(update_fields=host_updates.keys())
def update_group_computed_fields(self):
'''
Update computed fields for all active groups in this inventory.
'''
group_children_map = self.get_group_children_map()
group_hosts_map = self.get_group_hosts_map()
active_host_pks = set(self.hosts.values_list('pk', flat=True))
failed_host_pks = set(self.hosts.filter(last_job_host_summary__failed=True).values_list('pk', flat=True))
# active_group_pks = set(self.groups.values_list('pk', flat=True))
failed_group_pks = set() # Update below as we check each group.
groups_with_cloud_pks = set(self.groups.filter(inventory_sources__source__in=CLOUD_INVENTORY_SOURCES).values_list('pk', flat=True))
groups_to_update = {}
# Build list of group pks to check, starting with the groups at the
# deepest level within the tree.
root_group_pks = set(self.root_groups.values_list('pk', flat=True))
group_depths = {} # pk: max_depth
def update_group_depths(group_pk, current_depth=0):
max_depth = group_depths.get(group_pk, -1)
# Arbitrarily limit depth to avoid hitting Python recursion limit (which defaults to 1000).
if current_depth > 100:
return
if current_depth > max_depth:
group_depths[group_pk] = current_depth
for child_pk in group_children_map.get(group_pk, set()):
update_group_depths(child_pk, current_depth + 1)
for group_pk in root_group_pks:
update_group_depths(group_pk)
group_pks_to_check = [x[1] for x in sorted([(v,k) for k,v in group_depths.items()], reverse=True)]
for group_pk in group_pks_to_check:
# Get all children and host pks for this group.
parent_pks_to_check = set([group_pk])
parent_pks_checked = set()
child_pks = set()
host_pks = set()
while parent_pks_to_check:
for parent_pk in list(parent_pks_to_check):
c_ids = group_children_map.get(parent_pk, set())
child_pks.update(c_ids)
parent_pks_to_check.remove(parent_pk)
parent_pks_checked.add(parent_pk)
parent_pks_to_check.update(c_ids - parent_pks_checked)
h_ids = group_hosts_map.get(parent_pk, set())
host_pks.update(h_ids)
# Define updates needed for this group.
group_updates = groups_to_update.setdefault(group_pk, {})
group_updates.update({
'total_hosts': len(active_host_pks & host_pks),
'has_active_failures': bool(failed_host_pks & host_pks),
'hosts_with_active_failures': len(failed_host_pks & host_pks),
'total_groups': len(child_pks),
'groups_with_active_failures': len(failed_group_pks & child_pks),
'has_inventory_sources': bool(group_pk in groups_with_cloud_pks),
})
if group_updates['has_active_failures']:
failed_group_pks.add(group_pk)
# Now apply updates to each group as needed (in batches).
all_update_pks = list(groups_to_update.keys())
for offset in range(0, len(all_update_pks), 500):
update_pks = all_update_pks[offset:(offset + 500)]
for group in self.groups.filter(pk__in=update_pks):
group_updates = groups_to_update[group.pk]
for field, value in list(group_updates.items()):
if getattr(group, field) != value:
setattr(group, field, value)
else:
group_updates.pop(field)
if group_updates:
group.save(update_fields=group_updates.keys())
def update_computed_fields(self, update_groups=True, update_hosts=True):
def update_computed_fields(self):
'''
Update model fields that are computed from database relationships.
'''
logger.debug("Going to update inventory computed fields, pk={0}".format(self.pk))
start_time = time.time()
if update_hosts:
self.update_host_computed_fields()
if update_groups:
self.update_group_computed_fields()
active_hosts = self.hosts
failed_hosts = active_hosts.filter(has_active_failures=True)
failed_hosts = active_hosts.filter(last_job_host_summary__failed=True)
active_groups = self.groups
if self.kind == 'smart':
active_groups = active_groups.none()
failed_groups = active_groups.filter(has_active_failures=True)
if self.kind == 'smart':
active_inventory_sources = self.inventory_sources.none()
else:
@@ -482,7 +353,6 @@ class Inventory(CommonModelNameNotUnique, ResourceMixin, RelatedJobsMixin):
'total_hosts': active_hosts.count(),
'hosts_with_active_failures': failed_hosts.count(),
'total_groups': active_groups.count(),
'groups_with_active_failures': failed_groups.count(),
'has_inventory_sources': bool(active_inventory_sources.count()),
'total_inventory_sources': active_inventory_sources.count(),
'inventory_sources_with_failures': failed_inventory_sources.count(),
@@ -545,7 +415,7 @@ class Inventory(CommonModelNameNotUnique, ResourceMixin, RelatedJobsMixin):
if (self.kind == 'smart' and 'host_filter' in kwargs.get('update_fields', ['host_filter']) and
connection.vendor != 'sqlite'):
# Minimal update of host_count for smart inventory host filter changes
self.update_computed_fields(update_groups=False, update_hosts=False)
self.update_computed_fields()
def delete(self, *args, **kwargs):
self._update_host_smart_inventory_memeberships()
@@ -631,18 +501,6 @@ class Host(CommonModelNameNotUnique, RelatedJobsMixin):
editable=False,
on_delete=models.SET_NULL,
)
has_active_failures = models.BooleanField(
default=False,
editable=False,
help_text=_('This field is deprecated and will be removed in a future release. '
'Flag indicating whether the last job failed for this host.'),
)
has_inventory_sources = models.BooleanField(
default=False,
editable=False,
help_text=_('This field is deprecated and will be removed in a future release. '
'Flag indicating whether this host was created/updated from any external inventory sources.'),
)
inventory_sources = models.ManyToManyField(
'InventorySource',
related_name='hosts',
@@ -673,34 +531,6 @@ class Host(CommonModelNameNotUnique, RelatedJobsMixin):
def get_absolute_url(self, request=None):
return reverse('api:host_detail', kwargs={'pk': self.pk}, request=request)
def update_computed_fields(self, update_inventory=True, update_groups=True):
'''
Update model fields that are computed from database relationships.
'''
has_active_failures = bool(self.last_job_host_summary and
self.last_job_host_summary.failed)
active_inventory_sources = self.inventory_sources.filter(source__in=CLOUD_INVENTORY_SOURCES)
computed_fields = {
'has_active_failures': has_active_failures,
'has_inventory_sources': bool(active_inventory_sources.count()),
}
for field, value in computed_fields.items():
if getattr(self, field) != value:
setattr(self, field, value)
else:
computed_fields.pop(field)
if computed_fields:
self.save(update_fields=computed_fields.keys())
# Groups and inventory may also need to be updated when host fields
# change.
# NOTE: I think this is no longer needed
# if update_groups:
# for group in self.all_groups:
# group.update_computed_fields()
# if update_inventory:
# self.inventory.update_computed_fields(update_groups=False,
# update_hosts=False)
# Rebuild summary fields cache
variables_dict = VarsDictProperty('variables')
@property
@@ -815,42 +645,6 @@ class Group(CommonModelNameNotUnique, RelatedJobsMixin):
blank=True,
help_text=_('Hosts associated directly with this group.'),
)
total_hosts = models.PositiveIntegerField(
default=0,
editable=False,
help_text=_('This field is deprecated and will be removed in a future release. '
'Total number of hosts directly or indirectly in this group.'),
)
has_active_failures = models.BooleanField(
default=False,
editable=False,
help_text=_('This field is deprecated and will be removed in a future release. '
'Flag indicating whether this group has any hosts with active failures.'),
)
hosts_with_active_failures = models.PositiveIntegerField(
default=0,
editable=False,
help_text=_('This field is deprecated and will be removed in a future release. '
'Number of hosts in this group with active failures.'),
)
total_groups = models.PositiveIntegerField(
default=0,
editable=False,
help_text=_('This field is deprecated and will be removed in a future release. '
'Total number of child groups contained within this group.'),
)
groups_with_active_failures = models.PositiveIntegerField(
default=0,
editable=False,
help_text=_('This field is deprecated and will be removed in a future release. '
'Number of child groups within this group that have active failures.'),
)
has_inventory_sources = models.BooleanField(
default=False,
editable=False,
help_text=_('This field is deprecated and will be removed in a future release. '
'Flag indicating whether this group was created/updated from any external inventory sources.'),
)
inventory_sources = models.ManyToManyField(
'InventorySource',
related_name='groups',
@@ -925,32 +719,6 @@ class Group(CommonModelNameNotUnique, RelatedJobsMixin):
mark_actual()
activity_stream_delete(None, self)
def update_computed_fields(self):
'''
Update model fields that are computed from database relationships.
'''
active_hosts = self.all_hosts
failed_hosts = active_hosts.filter(last_job_host_summary__failed=True)
active_groups = self.all_children
# FIXME: May not be accurate unless we always update groups depth-first.
failed_groups = active_groups.filter(has_active_failures=True)
active_inventory_sources = self.inventory_sources.filter(source__in=CLOUD_INVENTORY_SOURCES)
computed_fields = {
'total_hosts': active_hosts.count(),
'has_active_failures': bool(failed_hosts.count()),
'hosts_with_active_failures': failed_hosts.count(),
'total_groups': active_groups.count(),
'groups_with_active_failures': failed_groups.count(),
'has_inventory_sources': bool(active_inventory_sources.count()),
}
for field, value in computed_fields.items():
if getattr(self, field) != value:
setattr(self, field, value)
else:
computed_fields.pop(field)
if computed_fields:
self.save(update_fields=computed_fields.keys())
variables_dict = VarsDictProperty('variables')
def get_all_parents(self, except_pks=None):
@@ -1556,7 +1324,7 @@ class InventorySource(UnifiedJobTemplate, InventorySourceOptions, CustomVirtualE
self.update()
if not getattr(_inventory_updates, 'is_updating', False):
if self.inventory is not None:
self.inventory.update_computed_fields(update_groups=False, update_hosts=False)
self.inventory.update_computed_fields()
def _get_current_status(self):
if self.source:
@@ -2616,6 +2384,9 @@ class satellite6(PluginFileInjector):
group_patterns = '[]'
group_prefix = 'foreman_'
want_hostcollections = 'False'
want_ansible_ssh_host = 'False'
rich_params = 'False'
want_facts = 'True'
foreman_opts = dict(inventory_update.source_vars_dict.items())
foreman_opts.setdefault('ssl_verify', 'False')
for k, v in foreman_opts.items():
@@ -2625,6 +2396,12 @@ class satellite6(PluginFileInjector):
group_prefix = v
elif k == 'satellite6_want_hostcollections' and isinstance(v, bool):
want_hostcollections = v
elif k == 'satellite6_want_ansible_ssh_host' and isinstance(v, bool):
want_ansible_ssh_host = v
elif k == 'satellite6_rich_params' and isinstance(v, bool):
rich_params = v
elif k == 'satellite6_want_facts' and isinstance(v, bool):
want_facts = v
else:
cp.set(section, k, str(v))
@@ -2636,9 +2413,11 @@ class satellite6(PluginFileInjector):
section = 'ansible'
cp.add_section(section)
cp.set(section, 'group_patterns', group_patterns)
cp.set(section, 'want_facts', 'True')
cp.set(section, 'want_facts', str(want_facts))
cp.set(section, 'want_hostcollections', str(want_hostcollections))
cp.set(section, 'group_prefix', group_prefix)
cp.set(section, 'want_ansible_ssh_host', str(want_ansible_ssh_host))
cp.set(section, 'rich_params', str(rich_params))
section = 'cache'
cp.add_section(section)

View File

@@ -13,6 +13,7 @@ from urllib.parse import urljoin
# Django
from django.conf import settings
from django.core.exceptions import ValidationError
from django.db import models
#from django.core.cache import cache
from django.utils.encoding import smart_str
@@ -28,7 +29,7 @@ from awx.api.versioning import reverse
from awx.main.models.base import (
BaseModel, CreatedModifiedModel,
prevent_search, accepts_json,
JOB_TYPE_CHOICES, VERBOSITY_CHOICES,
JOB_TYPE_CHOICES, NEW_JOB_TYPE_CHOICES, VERBOSITY_CHOICES,
VarsDictProperty
)
from awx.main.models.events import JobEvent, SystemJobEvent
@@ -204,6 +205,11 @@ class JobTemplate(UnifiedJobTemplate, JobOptions, SurveyJobTemplateMixin, Resour
app_label = 'main'
ordering = ('name',)
job_type = models.CharField(
max_length=64,
choices=NEW_JOB_TYPE_CHOICES,
default='run',
)
host_config_key = prevent_search(models.CharField(
max_length=1024,
blank=True,
@@ -293,6 +299,11 @@ class JobTemplate(UnifiedJobTemplate, JobOptions, SurveyJobTemplateMixin, Resour
def resources_needed_to_start(self):
return [fd for fd in ['project', 'inventory'] if not getattr(self, '{}_id'.format(fd))]
def clean_forks(self):
if settings.MAX_FORKS > 0 and self.forks > settings.MAX_FORKS:
raise ValidationError(_(f'Maximum number of forks ({settings.MAX_FORKS}) exceeded.'))
return self.forks
def create_job(self, **kwargs):
'''
Create a new job based on this template.
@@ -818,8 +829,10 @@ class Job(UnifiedJob, JobOptions, SurveyJobMixin, JobNotificationMixin, TaskMana
continue
host.ansible_facts = ansible_facts
host.ansible_facts_modified = now()
ansible_local_system_id = ansible_facts.get('ansible_local', {}).get('insights', {}).get('system_id', None)
ansible_facts_system_id = ansible_facts.get('insights', {}).get('system_id', None)
ansible_local = ansible_facts.get('ansible_local', {}).get('insights', {})
ansible_facts = ansible_facts.get('insights', {})
ansible_local_system_id = ansible_local.get('system_id', None) if isinstance(ansible_local, dict) else None
ansible_facts_system_id = ansible_facts.get('system_id', None) if isinstance(ansible_facts, dict) else None
if ansible_local_system_id:
print("Setting local {}".format(ansible_local_system_id))
logger.debug("Insights system_id {} found for host <{}, {}> in"
@@ -1060,7 +1073,7 @@ class JobHostSummary(CreatedModifiedModel):
processed = models.PositiveIntegerField(default=0, editable=False)
rescued = models.PositiveIntegerField(default=0, editable=False)
skipped = models.PositiveIntegerField(default=0, editable=False)
failed = models.BooleanField(default=False, editable=False)
failed = models.BooleanField(default=False, editable=False, db_index=True)
def __str__(self):
host = getattr_dne(self, 'host')
@@ -1095,7 +1108,6 @@ class JobHostSummary(CreatedModifiedModel):
update_fields.append('last_job_host_summary_id')
if update_fields:
self.host.save(update_fields=update_fields)
#self.host.update_computed_fields()
class SystemJobOptions(BaseModel):

View File

@@ -270,21 +270,19 @@ class JobNotificationMixin(object):
'elapsed', 'job_explanation', 'execution_node', 'controller_node', 'allow_simultaneous',
'scm_revision', 'diff_mode', 'job_slice_number', 'job_slice_count', 'custom_virtualenv',
'approval_status', 'approval_node_name', 'workflow_url',
{'host_status_counts': ['skipped', 'ok', 'changed', 'failures', 'dark']},
{'playbook_counts': ['play_count', 'task_count']},
{'host_status_counts': ['skipped', 'ok', 'changed', 'failed', 'failures', 'dark'
'processed', 'rescued', 'ignored']},
{'summary_fields': [{'inventory': ['id', 'name', 'description', 'has_active_failures',
'total_hosts', 'hosts_with_active_failures', 'total_groups',
'groups_with_active_failures', 'has_inventory_sources',
'has_inventory_sources',
'total_inventory_sources', 'inventory_sources_with_failures',
'organization_id', 'kind']},
{'project': ['id', 'name', 'description', 'status', 'scm_type']},
{'project_update': ['id', 'name', 'description', 'status', 'failed']},
{'job_template': ['id', 'name', 'description']},
{'unified_job_template': ['id', 'name', 'description', 'unified_job_type']},
{'instance_group': ['name', 'id']},
{'created_by': ['id', 'username', 'first_name', 'last_name']},
{'labels': ['count', 'results']},
{'source_workflow_job': ['description', 'elapsed', 'failed', 'id', 'name', 'status']}]}]
{'labels': ['count', 'results']}]}]
@classmethod
def context_stub(cls):
@@ -303,7 +301,7 @@ class JobNotificationMixin(object):
'finished': False,
'force_handlers': False,
'forks': 0,
'host_status_counts': {'skipped': 1, 'ok': 5, 'changed': 3, 'failures': 0, 'dark': 0},
'host_status_counts': {'skipped': 1, 'ok': 5, 'changed': 3, 'failures': 0, 'dark': 0, 'failed': False, 'processed': 0, 'rescued': 0},
'id': 42,
'job_explanation': 'Sample job explanation',
'job_slice_count': 1,
@@ -314,7 +312,6 @@ class JobNotificationMixin(object):
'limit': 'bar_limit',
'modified': datetime.datetime(2018, 12, 13, 6, 4, 0, 0, tzinfo=datetime.timezone.utc),
'name': 'Stub JobTemplate',
'playbook_counts': {'play_count': 5, 'task_count': 10},
'playbook': 'ping.yml',
'scm_revision': '',
'skip_tags': '',
@@ -327,7 +324,6 @@ class JobNotificationMixin(object):
'username': 'admin'},
'instance_group': {'id': 1, 'name': 'tower'},
'inventory': {'description': 'Sample inventory description',
'groups_with_active_failures': 0,
'has_active_failures': False,
'has_inventory_sources': False,
'hosts_with_active_failures': 0,
@@ -348,18 +344,10 @@ class JobNotificationMixin(object):
'name': 'Stub project',
'scm_type': 'git',
'status': 'successful'},
'project_update': {'id': 5, 'name': 'Stub Project Update', 'description': 'Project Update',
'status': 'running', 'failed': False},
'unified_job_template': {'description': 'Sample unified job template description',
'id': 39,
'name': 'Stub Job Template',
'unified_job_type': 'job'},
'source_workflow_job': {'description': 'Sample workflow job description',
'elapsed': 0.000,
'failed': False,
'id': 88,
'name': 'Stub WorkflowJobTemplate',
'status': 'running'}},
'unified_job_type': 'job'}},
'timeout': 0,
'type': 'job',
'url': '/api/v2/jobs/13/',
@@ -393,10 +381,20 @@ class JobNotificationMixin(object):
The context will contain whitelisted content retrieved from a serialized job object
(see JobNotificationMixin.JOB_FIELDS_WHITELIST), the job's friendly name,
and a url to the job run."""
context = {'job': {},
'job_friendly_name': self.get_notification_friendly_name(),
'url': self.get_ui_url(),
'job_metadata': json.dumps(self.notification_data(), indent=4)}
job_context = {'host_status_counts': {}}
summary = None
if hasattr(self, 'job_host_summaries'):
summary = self.job_host_summaries.first()
if summary:
from awx.api.serializers import JobHostSummarySerializer
summary_data = JobHostSummarySerializer(summary).to_representation(summary)
job_context['host_status_counts'] = summary_data
context = {
'job': job_context,
'job_friendly_name': self.get_notification_friendly_name(),
'url': self.get_ui_url(),
'job_metadata': json.dumps(self.notification_data(), indent=4)
}
def build_context(node, fields, whitelisted_fields):
for safe_field in whitelisted_fields:

View File

@@ -3,6 +3,7 @@
# Python
from io import StringIO
import datetime
import codecs
import json
import logging
@@ -623,6 +624,11 @@ class UnifiedJob(PolymorphicModel, PasswordFieldsModel, CommonModelNameNotUnique
editable=False,
help_text=_("The date and time the job was queued for starting."),
)
dependencies_processed = models.BooleanField(
default=False,
editable=False,
help_text=_("If True, the task manager has already processed potential dependencies for this job.")
)
finished = models.DateTimeField(
null=True,
default=None,
@@ -630,6 +636,13 @@ class UnifiedJob(PolymorphicModel, PasswordFieldsModel, CommonModelNameNotUnique
help_text=_("The date and time the job finished execution."),
db_index=True,
)
canceled_on = models.DateTimeField(
null=True,
default=None,
editable=False,
help_text=_("The date and time when the cancel request was sent."),
db_index=True,
)
elapsed = models.DecimalField(
max_digits=12,
decimal_places=3,
@@ -833,7 +846,12 @@ class UnifiedJob(PolymorphicModel, PasswordFieldsModel, CommonModelNameNotUnique
self.unified_job_template = self._get_parent_instance()
if 'unified_job_template' not in update_fields:
update_fields.append('unified_job_template')
if self.cancel_flag and not self.canceled_on:
# Record the 'canceled' time.
self.canceled_on = now()
if 'canceled_on' not in update_fields:
update_fields.append('canceled_on')
# Okay; we're done. Perform the actual save.
result = super(UnifiedJob, self).save(*args, **kwargs)
@@ -997,6 +1015,8 @@ class UnifiedJob(PolymorphicModel, PasswordFieldsModel, CommonModelNameNotUnique
dir=settings.JOBOUTPUT_ROOT,
encoding='utf-8'
)
from awx.main.tasks import purge_old_stdout_files # circular import
purge_old_stdout_files.apply_async()
# Before the addition of event-based stdout, older versions of
# awx stored stdout as raw text blobs in a certain database column
@@ -1199,12 +1219,17 @@ class UnifiedJob(PolymorphicModel, PasswordFieldsModel, CommonModelNameNotUnique
status_data['instance_group_name'] = self.instance_group.name
else:
status_data['instance_group_name'] = None
elif status in ['successful', 'failed', 'canceled'] and self.finished:
status_data['finished'] = datetime.datetime.strftime(self.finished, "%Y-%m-%dT%H:%M:%S.%fZ")
status_data.update(self.websocket_emit_data())
status_data['group_name'] = 'jobs'
if getattr(self, 'unified_job_template_id', None):
status_data['unified_job_template_id'] = self.unified_job_template_id
emit_channel_notification('jobs-status_changed', status_data)
if self.spawned_by_workflow:
status_data['group_name'] = "workflow_events"
status_data['workflow_job_template_id'] = self.unified_job_template.id
emit_channel_notification('workflow_events-' + str(self.workflow_job_id), status_data)
except IOError: # includes socket errors
logger.exception('%s failed to emit channel msg about status change', self.log_format)

View File

@@ -79,6 +79,11 @@ class WorkflowNodeBase(CreatedModifiedModel, LaunchTimeConfig):
symmetrical=False,
related_name='%(class)ss_always',
)
all_parents_must_converge = models.BooleanField(
default=False,
help_text=_("If enabled then the node will only run if all of the parent nodes "
"have met the criteria to reach this node")
)
unified_job_template = models.ForeignKey(
'UnifiedJobTemplate',
related_name='%(class)ss',
@@ -102,7 +107,7 @@ class WorkflowNodeBase(CreatedModifiedModel, LaunchTimeConfig):
'''
return ['workflow_job', 'unified_job_template',
'extra_data', 'survey_passwords',
'inventory', 'credentials', 'char_prompts']
'inventory', 'credentials', 'char_prompts', 'all_parents_must_converge']
def create_workflow_job_node(self, **kwargs):
'''
@@ -130,7 +135,7 @@ class WorkflowJobTemplateNode(WorkflowNodeBase):
FIELDS_TO_PRESERVE_AT_COPY = [
'unified_job_template', 'workflow_job_template', 'success_nodes', 'failure_nodes',
'always_nodes', 'credentials', 'inventory', 'extra_data', 'survey_passwords',
'char_prompts'
'char_prompts', 'all_parents_must_converge'
]
REENCRYPTION_BLACKLIST_AT_COPY = ['extra_data', 'survey_passwords']
@@ -745,6 +750,8 @@ class WorkflowApproval(UnifiedJob, JobNotificationMixin):
def signal_start(self, **kwargs):
can_start = super(WorkflowApproval, self).signal_start(**kwargs)
self.send_approval_notification('running')
self.started = self.created
self.save(update_fields=['started'])
return can_start
def send_approval_notification(self, approval_status):

View File

@@ -2,6 +2,7 @@
# All Rights Reserved.
import datetime
import json
import logging
import requests
import dateutil.parser as dp
@@ -23,6 +24,33 @@ class GrafanaBackend(AWXBaseEmailBackend, CustomNotificationBase):
recipient_parameter = "grafana_url"
sender_parameter = None
DEFAULT_BODY = "{{ job_metadata }}"
default_messages = {
"started": {
"body": DEFAULT_BODY, "message": CustomNotificationBase.DEFAULT_MSG
},
"success": {
"body": DEFAULT_BODY, "message": CustomNotificationBase.DEFAULT_MSG
},
"error": {
"body": DEFAULT_BODY, "message": CustomNotificationBase.DEFAULT_MSG
},
"workflow_approval": {
"running": {
"message": CustomNotificationBase.DEFAULT_APPROVAL_RUNNING_MSG, "body": None
},
"approved": {
"message": CustomNotificationBase.DEFAULT_APPROVAL_APPROVED_MSG, "body": None
},
"timed_out": {
"message": CustomNotificationBase.DEFAULT_APPROVAL_TIMEOUT_MSG, "body": None
},
"denied": {
"message": CustomNotificationBase.DEFAULT_APPROVAL_DENIED_MSG, "body": None
}
}
}
def __init__(self, grafana_key,dashboardId=None, panelId=None, annotation_tags=None, grafana_no_verify_ssl=False, isRegion=True,
fail_silently=False, **kwargs):
super(GrafanaBackend, self).__init__(fail_silently=fail_silently)
@@ -34,6 +62,13 @@ class GrafanaBackend(AWXBaseEmailBackend, CustomNotificationBase):
self.isRegion = isRegion
def format_body(self, body):
# expect body to be a string representing a dict
try:
potential_body = json.loads(body)
if isinstance(potential_body, dict):
body = potential_body
except json.JSONDecodeError:
body = {}
return body
def send_messages(self, messages):
@@ -41,14 +76,16 @@ class GrafanaBackend(AWXBaseEmailBackend, CustomNotificationBase):
for m in messages:
grafana_data = {}
grafana_headers = {}
try:
epoch=datetime.datetime.utcfromtimestamp(0)
grafana_data['time'] = int((dp.parse(m.body['started']).replace(tzinfo=None) - epoch).total_seconds() * 1000)
grafana_data['timeEnd'] = int((dp.parse(m.body['finished']).replace(tzinfo=None) - epoch).total_seconds() * 1000)
except ValueError:
logger.error(smart_text(_("Error converting time {} or timeEnd {} to int.").format(m.body['started'],m.body['finished'])))
if not self.fail_silently:
raise Exception(smart_text(_("Error converting time {} and/or timeEnd {} to int.").format(m.body['started'],m.body['finished'])))
if 'started' in m.body:
try:
epoch=datetime.datetime.utcfromtimestamp(0)
grafana_data['time'] = grafana_data['timeEnd'] = int((dp.parse(m.body['started']).replace(tzinfo=None) - epoch).total_seconds() * 1000)
if m.body.get('finished'):
grafana_data['timeEnd'] = int((dp.parse(m.body['finished']).replace(tzinfo=None) - epoch).total_seconds() * 1000)
except ValueError:
logger.error(smart_text(_("Error converting time {} or timeEnd {} to int.").format(m.body['started'],m.body['finished'])))
if not self.fail_silently:
raise Exception(smart_text(_("Error converting time {} and/or timeEnd {} to int.").format(m.body['started'],m.body['finished'])))
grafana_data['isRegion'] = self.isRegion
grafana_data['dashboardId'] = self.dashboardId
grafana_data['panelId'] = self.panelId

View File

@@ -8,7 +8,7 @@ REPLACE_STR = '$encrypted$'
class UriCleaner(object):
REPLACE_STR = REPLACE_STR
SENSITIVE_URI_PATTERN = re.compile(r'(\w+:(\/?\/?)[^\s]+)', re.MULTILINE) # NOQA
SENSITIVE_URI_PATTERN = re.compile(r'(\w{1,20}:(\/?\/?)[^\s]+)', re.MULTILINE) # NOQA
@staticmethod
def remove_sensitive(cleartext):

View File

@@ -89,8 +89,8 @@ class SimpleDAG(object):
run_status(n['node_object']),
color
)
for label, edges in self.node_from_edges_by_label.iteritems():
for from_node, to_nodes in edges.iteritems():
for label, edges in self.node_from_edges_by_label.items():
for from_node, to_nodes in edges.items():
for to_node in to_nodes:
doc += "%s -> %s [ label=\"%s\" ];\n" % (
run_status(self.nodes[from_node]['node_object']),
@@ -140,36 +140,36 @@ class SimpleDAG(object):
def find_ord(self, obj):
return self.node_obj_to_node_index.get(obj, None)
def _get_dependencies_by_label(self, node_index, label):
def _get_children_by_label(self, node_index, label):
return [self.nodes[index] for index in
self.node_from_edges_by_label.get(label, {})
.get(node_index, [])]
def get_dependencies(self, obj, label=None):
def get_children(self, obj, label=None):
this_ord = self.find_ord(obj)
nodes = []
if label:
return self._get_dependencies_by_label(this_ord, label)
return self._get_children_by_label(this_ord, label)
else:
nodes = []
for l in self.node_from_edges_by_label.keys():
nodes.extend(self._get_dependencies_by_label(this_ord, l))
nodes.extend(self._get_children_by_label(this_ord, l))
return nodes
def _get_dependents_by_label(self, node_index, label):
def _get_parents_by_label(self, node_index, label):
return [self.nodes[index] for index in
self.node_to_edges_by_label.get(label, {})
.get(node_index, [])]
def get_dependents(self, obj, label=None):
def get_parents(self, obj, label=None):
this_ord = self.find_ord(obj)
nodes = []
if label:
return self._get_dependents_by_label(this_ord, label)
return self._get_parents_by_label(this_ord, label)
else:
nodes = []
for l in self.node_to_edges_by_label.keys():
nodes.extend(self._get_dependents_by_label(this_ord, l))
nodes.extend(self._get_parents_by_label(this_ord, l))
return nodes
def get_root_nodes(self):
@@ -188,7 +188,7 @@ class SimpleDAG(object):
while stack:
node_obj = stack.pop()
children = [node['node_object'] for node in self.get_dependencies(node_obj)]
children = [node['node_object'] for node in self.get_children(node_obj)]
children_to_add = list(filter(lambda node_obj: node_obj not in node_objs_visited, children))
if children_to_add:
@@ -212,7 +212,7 @@ class SimpleDAG(object):
if obj.id in obj_ids_processed:
return
for child in self.get_dependencies(obj):
for child in self.get_children(obj):
visit(child)
obj_ids_processed.add(obj.id)
nodes_sorted.appendleft(node)

View File

@@ -55,7 +55,7 @@ class WorkflowDAG(SimpleDAG):
def _are_relevant_parents_finished(self, node):
obj = node['node_object']
parent_nodes = [p['node_object'] for p in self.get_dependents(obj)]
parent_nodes = [p['node_object'] for p in self.get_parents(obj)]
for p in parent_nodes:
if p.do_not_run is True:
continue
@@ -69,33 +69,55 @@ class WorkflowDAG(SimpleDAG):
return False
return True
def _all_parents_met_convergence_criteria(self, node):
# This function takes any node and checks that all it's parents have met their criteria to run the child.
# This returns a boolean and is really only useful if the node is an ALL convergence node and is
# intended to be used in conjuction with the node property `all_parents_must_converge`
obj = node['node_object']
parent_nodes = [p['node_object'] for p in self.get_parents(obj)]
for p in parent_nodes:
#node has a status
if p.job and p.job.status in ["successful", "failed"]:
if p.job and p.job.status == "successful":
status = "success_nodes"
elif p.job and p.job.status == "failed":
status = "failure_nodes"
#check that the nodes status matches either a pathway of the same status or is an always path.
if (p not in [node['node_object'] for node in self.get_parents(obj, status)] and
p not in [node['node_object'] for node in self.get_parents(obj, "always_nodes")]):
return False
return True
def bfs_nodes_to_run(self):
nodes = self.get_root_nodes()
nodes_found = []
node_ids_visited = set()
for index, n in enumerate(nodes):
obj = n['node_object']
if obj.id in node_ids_visited:
continue
node_ids_visited.add(obj.id)
if obj.do_not_run is True:
continue
if obj.job:
elif obj.job:
if obj.job.status in ['failed', 'error', 'canceled']:
nodes.extend(self.get_dependencies(obj, 'failure_nodes') +
self.get_dependencies(obj, 'always_nodes'))
nodes.extend(self.get_children(obj, 'failure_nodes') +
self.get_children(obj, 'always_nodes'))
elif obj.job.status == 'successful':
nodes.extend(self.get_dependencies(obj, 'success_nodes') +
self.get_dependencies(obj, 'always_nodes'))
nodes.extend(self.get_children(obj, 'success_nodes') +
self.get_children(obj, 'always_nodes'))
elif obj.unified_job_template is None:
nodes.extend(self.get_dependencies(obj, 'failure_nodes') +
self.get_dependencies(obj, 'always_nodes'))
nodes.extend(self.get_children(obj, 'failure_nodes') +
self.get_children(obj, 'always_nodes'))
else:
if self._are_relevant_parents_finished(n):
# This catches root nodes or ANY convergence nodes
if not obj.all_parents_must_converge and self._are_relevant_parents_finished(n):
nodes_found.append(n)
# This catches ALL convergence nodes
elif obj.all_parents_must_converge and self._are_relevant_parents_finished(n):
if self._all_parents_met_convergence_criteria(n):
nodes_found.append(n)
return [n['node_object'] for n in nodes_found]
def cancel_node_jobs(self):
@@ -135,8 +157,8 @@ class WorkflowDAG(SimpleDAG):
for node in failed_nodes:
obj = node['node_object']
if (len(self.get_dependencies(obj, 'failure_nodes')) +
len(self.get_dependencies(obj, 'always_nodes'))) == 0:
if (len(self.get_children(obj, 'failure_nodes')) +
len(self.get_children(obj, 'always_nodes'))) == 0:
if obj.unified_job_template is None:
res = True
failed_unified_job_template_node_ids.append(str(obj.id))
@@ -145,8 +167,8 @@ class WorkflowDAG(SimpleDAG):
failed_path_nodes_id_status.append((str(obj.id), obj.job.status))
if res is True:
s = _("No error handle path for workflow job node(s) [{node_status}] workflow job "
"node(s) missing unified job template and error handle path [{no_ufjt}].")
s = _("No error handling path for workflow job node(s) [{node_status}]. Workflow job "
"node(s) missing unified job template and error handling path [{no_ufjt}].")
parms = {
'node_status': '',
'no_ufjt': '',
@@ -190,35 +212,48 @@ class WorkflowDAG(SimpleDAG):
pass
elif p.job:
if p.job.status == 'successful':
if node in (self.get_dependencies(p, 'success_nodes') +
self.get_dependencies(p, 'always_nodes')):
if node in (self.get_children(p, 'success_nodes') +
self.get_children(p, 'always_nodes')):
return False
elif p.job.status in ['failed', 'error', 'canceled']:
if node in (self.get_dependencies(p, 'failure_nodes') +
self.get_dependencies(p, 'always_nodes')):
if node in (self.get_children(p, 'failure_nodes') +
self.get_children(p, 'always_nodes')):
return False
else:
return False
elif p.do_not_run is False and p.unified_job_template is None:
if node in (self.get_dependencies(p, 'failure_nodes') +
self.get_dependencies(p, 'always_nodes')):
elif not p.do_not_run and p.unified_job_template is None:
if node in (self.get_children(p, 'failure_nodes') +
self.get_children(p, 'always_nodes')):
return False
else:
return False
return True
r'''
determine if the current node is a convergence node by checking if all the
parents are finished then checking to see if all parents meet the needed
path criteria to run the convergence child.
(i.e. parent must fail, parent must succeed, etc. to proceed)
Return a list object
'''
def mark_dnr_nodes(self):
root_nodes = self.get_root_nodes()
nodes_marked_do_not_run = []
for node in self.sort_nodes_topological():
obj = node['node_object']
if obj.do_not_run is False and not obj.job and node not in root_nodes:
parent_nodes = [p['node_object'] for p in self.get_dependents(obj)]
if self._are_all_nodes_dnr_decided(parent_nodes):
if self._should_mark_node_dnr(node, parent_nodes):
parent_nodes = [p['node_object'] for p in self.get_parents(obj)]
if not obj.do_not_run and not obj.job and node not in root_nodes:
if obj.all_parents_must_converge:
if any(p.do_not_run for p in parent_nodes) or not self._all_parents_met_convergence_criteria(node):
obj.do_not_run = True
nodes_marked_do_not_run.append(node)
else:
if self._are_all_nodes_dnr_decided(parent_nodes):
if self._should_mark_node_dnr(node, parent_nodes):
obj.do_not_run = True
nodes_marked_do_not_run.append(node)
return [n['node_object'] for n in nodes_marked_do_not_run]

View File

@@ -23,6 +23,7 @@ from awx.main.models import (
Project,
ProjectUpdate,
SystemJob,
UnifiedJob,
WorkflowApproval,
WorkflowJob,
WorkflowJobTemplate
@@ -74,21 +75,6 @@ class TaskManager():
key=lambda task: task.created)
return all_tasks
def get_latest_project_update_tasks(self, all_sorted_tasks):
project_ids = set()
for task in all_sorted_tasks:
if isinstance(task, Job):
project_ids.add(task.project_id)
return ProjectUpdate.objects.filter(id__in=project_ids)
def get_latest_inventory_update_tasks(self, all_sorted_tasks):
inventory_ids = set()
for task in all_sorted_tasks:
if isinstance(task, Job):
inventory_ids.add(task.inventory_id)
return InventoryUpdate.objects.filter(id__in=inventory_ids)
def get_running_workflow_jobs(self):
graph_workflow_jobs = [wf for wf in
WorkflowJob.objects.filter(status='running')]
@@ -200,9 +186,6 @@ class TaskManager():
schedule_task_manager()
return result
def get_dependent_jobs_for_inv_and_proj_update(self, job_obj):
return [{'type': j.model_to_str(), 'id': j.id} for j in job_obj.dependent_jobs.all()]
def start_task(self, task, rampart_group, dependent_tasks=None, instance=None):
from awx.main.tasks import handle_work_error, handle_work_success
@@ -364,10 +347,6 @@ class TaskManager():
def should_update_inventory_source(self, job, latest_inventory_update):
now = tz_now()
# Already processed dependencies for this job
if job.dependent_jobs.all():
return False
if latest_inventory_update is None:
return True
'''
@@ -393,8 +372,6 @@ class TaskManager():
def should_update_related_project(self, job, latest_project_update):
now = tz_now()
if job.dependent_jobs.all():
return False
if latest_project_update is None:
return True
@@ -426,18 +403,21 @@ class TaskManager():
return True
return False
def generate_dependencies(self, task):
dependencies = []
if type(task) is Job:
def generate_dependencies(self, undeped_tasks):
created_dependencies = []
for task in undeped_tasks:
dependencies = []
if not type(task) is Job:
continue
# TODO: Can remove task.project None check after scan-job-default-playbook is removed
if task.project is not None and task.project.scm_update_on_launch is True:
latest_project_update = self.get_latest_project_update(task)
if self.should_update_related_project(task, latest_project_update):
project_task = self.create_project_update(task)
created_dependencies.append(project_task)
dependencies.append(project_task)
else:
if latest_project_update.status in ['waiting', 'pending', 'running']:
dependencies.append(latest_project_update)
dependencies.append(latest_project_update)
# Inventory created 2 seconds behind job
try:
@@ -452,56 +432,20 @@ class TaskManager():
latest_inventory_update = self.get_latest_inventory_update(inventory_source)
if self.should_update_inventory_source(task, latest_inventory_update):
inventory_task = self.create_inventory_update(task, inventory_source)
created_dependencies.append(inventory_task)
dependencies.append(inventory_task)
else:
if latest_inventory_update.status in ['waiting', 'pending', 'running']:
dependencies.append(latest_inventory_update)
dependencies.append(latest_inventory_update)
if len(dependencies) > 0:
self.capture_chain_failure_dependencies(task, dependencies)
return dependencies
def process_dependencies(self, dependent_task, dependency_tasks):
for task in dependency_tasks:
if self.is_job_blocked(task):
logger.debug("Dependent {} is blocked from running".format(task.log_format))
continue
preferred_instance_groups = task.preferred_instance_groups
found_acceptable_queue = False
idle_instance_that_fits = None
for rampart_group in preferred_instance_groups:
if idle_instance_that_fits is None:
idle_instance_that_fits = rampart_group.find_largest_idle_instance()
if not rampart_group.is_containerized and self.get_remaining_capacity(rampart_group.name) <= 0:
logger.debug("Skipping group {} capacity <= 0".format(rampart_group.name))
continue
execution_instance = rampart_group.fit_task_to_most_remaining_capacity_instance(task)
if execution_instance:
logger.debug("Starting dependent {} in group {} instance {}".format(
task.log_format, rampart_group.name, execution_instance.hostname))
elif not execution_instance and idle_instance_that_fits:
if not rampart_group.is_containerized:
execution_instance = idle_instance_that_fits
logger.debug("Starting dependent {} in group {} on idle instance {}".format(
task.log_format, rampart_group.name, execution_instance.hostname))
if execution_instance or rampart_group.is_containerized:
self.graph[rampart_group.name]['graph'].add_job(task)
tasks_to_fail = [t for t in dependency_tasks if t != task]
tasks_to_fail += [dependent_task]
self.start_task(task, rampart_group, tasks_to_fail, execution_instance)
found_acceptable_queue = True
break
else:
logger.debug("No instance available in group {} to run job {} w/ capacity requirement {}".format(
rampart_group.name, task.log_format, task.task_impact))
if not found_acceptable_queue:
logger.debug("Dependent {} couldn't be scheduled on graph, waiting for next cycle".format(task.log_format))
UnifiedJob.objects.filter(pk__in = [task.pk for task in undeped_tasks]).update(dependencies_processed=True)
return created_dependencies
def process_pending_tasks(self, pending_tasks):
running_workflow_templates = set([wf.unified_job_template_id for wf in self.get_running_workflow_jobs()])
for task in pending_tasks:
self.process_dependencies(task, self.generate_dependencies(task))
if self.is_job_blocked(task):
logger.debug("{} is blocked from running".format(task.log_format))
continue
@@ -574,13 +518,6 @@ class TaskManager():
def calculate_capacity_consumed(self, tasks):
self.graph = InstanceGroup.objects.capacity_values(tasks=tasks, graph=self.graph)
def would_exceed_capacity(self, task, instance_group):
current_capacity = self.graph[instance_group]['consumed_capacity']
capacity_total = self.graph[instance_group]['capacity_total']
if current_capacity == 0:
return False
return (task.task_impact + current_capacity > capacity_total)
def consume_capacity(self, task, instance_group):
logger.debug('{} consumed {} capacity units from {} with prior total of {}'.format(
task.log_format, task.task_impact, instance_group,
@@ -598,6 +535,9 @@ class TaskManager():
self.process_running_tasks(running_tasks)
pending_tasks = [t for t in all_sorted_tasks if t.status == 'pending']
undeped_tasks = [t for t in pending_tasks if not t.dependencies_processed]
dependencies = self.generate_dependencies(undeped_tasks)
self.process_pending_tasks(dependencies)
self.process_pending_tasks(pending_tasks)
def _schedule(self):

View File

@@ -10,6 +10,7 @@ import pkg_resources
import sys
# Django
from django.db import connection
from django.conf import settings
from django.db.models.signals import (
pre_save,
@@ -71,41 +72,6 @@ def get_current_user_or_none():
return u
def emit_update_inventory_computed_fields(sender, **kwargs):
logger.debug("In update inventory computed fields")
if getattr(_inventory_updates, 'is_updating', False):
return
instance = kwargs['instance']
if sender == Group.hosts.through:
sender_name = 'group.hosts'
elif sender == Group.parents.through:
sender_name = 'group.parents'
elif sender == Host.inventory_sources.through:
sender_name = 'host.inventory_sources'
elif sender == Group.inventory_sources.through:
sender_name = 'group.inventory_sources'
else:
sender_name = str(sender._meta.verbose_name)
if kwargs['signal'] == post_save:
if sender == Job:
return
sender_action = 'saved'
elif kwargs['signal'] == post_delete:
sender_action = 'deleted'
elif kwargs['signal'] == m2m_changed and kwargs['action'] in ('post_add', 'post_remove', 'post_clear'):
sender_action = 'changed'
else:
return
logger.debug('%s %s, updating inventory computed fields: %r %r',
sender_name, sender_action, sender, kwargs)
try:
inventory = instance.inventory
except Inventory.DoesNotExist:
pass
else:
update_inventory_computed_fields.delay(inventory.id, True)
def emit_update_inventory_on_created_or_deleted(sender, **kwargs):
if getattr(_inventory_updates, 'is_updating', False):
return
@@ -124,7 +90,9 @@ def emit_update_inventory_on_created_or_deleted(sender, **kwargs):
pass
else:
if inventory is not None:
update_inventory_computed_fields.delay(inventory.id, True)
connection.on_commit(
lambda: update_inventory_computed_fields.delay(inventory.id)
)
def rebuild_role_ancestor_list(reverse, model, instance, pk_set, action, **kwargs):
@@ -207,10 +175,6 @@ def connect_computed_field_signals():
post_delete.connect(emit_update_inventory_on_created_or_deleted, sender=Host)
post_save.connect(emit_update_inventory_on_created_or_deleted, sender=Group)
post_delete.connect(emit_update_inventory_on_created_or_deleted, sender=Group)
m2m_changed.connect(emit_update_inventory_computed_fields, sender=Group.hosts.through)
m2m_changed.connect(emit_update_inventory_computed_fields, sender=Group.parents.through)
m2m_changed.connect(emit_update_inventory_computed_fields, sender=Host.inventory_sources.through)
m2m_changed.connect(emit_update_inventory_computed_fields, sender=Group.inventory_sources.through)
post_save.connect(emit_update_inventory_on_created_or_deleted, sender=InventorySource)
post_delete.connect(emit_update_inventory_on_created_or_deleted, sender=InventorySource)
post_save.connect(emit_update_inventory_on_created_or_deleted, sender=Job)
@@ -347,10 +311,6 @@ def disable_computed_fields():
post_delete.disconnect(emit_update_inventory_on_created_or_deleted, sender=Host)
post_save.disconnect(emit_update_inventory_on_created_or_deleted, sender=Group)
post_delete.disconnect(emit_update_inventory_on_created_or_deleted, sender=Group)
m2m_changed.disconnect(emit_update_inventory_computed_fields, sender=Group.hosts.through)
m2m_changed.disconnect(emit_update_inventory_computed_fields, sender=Group.parents.through)
m2m_changed.disconnect(emit_update_inventory_computed_fields, sender=Host.inventory_sources.through)
m2m_changed.disconnect(emit_update_inventory_computed_fields, sender=Group.inventory_sources.through)
post_save.disconnect(emit_update_inventory_on_created_or_deleted, sender=InventorySource)
post_delete.disconnect(emit_update_inventory_on_created_or_deleted, sender=InventorySource)
post_save.disconnect(emit_update_inventory_on_created_or_deleted, sender=Job)

View File

@@ -52,6 +52,7 @@ import ansible_runner
from awx import __version__ as awx_application_version
from awx.main.constants import CLOUD_PROVIDERS, PRIVILEGE_ESCALATION_METHODS, STANDARD_INVENTORY_UPDATE_ENV, GALAXY_SERVER_FIELDS
from awx.main.access import access_registry
from awx.main.redact import UriCleaner
from awx.main.models import (
Schedule, TowerScheduleState, Instance, InstanceGroup,
UnifiedJob, Notification,
@@ -337,17 +338,31 @@ def send_notifications(notification_list, job_id=None):
@task()
def gather_analytics():
from awx.conf.models import Setting
from rest_framework.fields import DateTimeField
if not settings.INSIGHTS_TRACKING_STATE:
return
try:
tgz = analytics.gather()
if not tgz:
return
logger.debug('gathered analytics: {}'.format(tgz))
analytics.ship(tgz)
finally:
if os.path.exists(tgz):
os.remove(tgz)
last_gather = Setting.objects.filter(key='AUTOMATION_ANALYTICS_LAST_GATHER').first()
if last_gather:
last_time = DateTimeField().to_internal_value(last_gather.value)
else:
last_time = None
gather_time = now()
if not last_time or ((gather_time - last_time).total_seconds() > settings.AUTOMATION_ANALYTICS_GATHER_INTERVAL):
with advisory_lock('gather_analytics_lock', wait=False) as acquired:
if acquired is False:
logger.debug('Not gathering analytics, another task holds lock')
return
try:
tgz = analytics.gather()
if not tgz:
return
logger.info('gathered analytics: {}'.format(tgz))
analytics.ship(tgz)
settings.AUTOMATION_ANALYTICS_LAST_GATHER = gather_time
finally:
if os.path.exists(tgz):
os.remove(tgz)
@task(queue=get_local_queuename)
@@ -499,7 +514,7 @@ def awx_periodic_scheduler():
invalid_license = False
try:
access_registry[Job](None).check_license()
access_registry[Job](None).check_license(quiet=True)
except PermissionDenied as e:
invalid_license = e
@@ -569,7 +584,7 @@ def handle_work_error(task_id, *args, **kwargs):
first_instance = instance
first_instance_type = each_task['type']
if instance.celery_task_id != task_id and not instance.cancel_flag:
if instance.celery_task_id != task_id and not instance.cancel_flag and not instance.status == 'successful':
instance.status = 'failed'
instance.failed = True
if not instance.job_explanation:
@@ -588,7 +603,7 @@ def handle_work_error(task_id, *args, **kwargs):
@task()
def update_inventory_computed_fields(inventory_id, should_update_hosts=True):
def update_inventory_computed_fields(inventory_id):
'''
Signal handler and wrapper around inventory.update_computed_fields to
prevent unnecessary recursive calls.
@@ -599,7 +614,7 @@ def update_inventory_computed_fields(inventory_id, should_update_hosts=True):
return
i = i[0]
try:
i.update_computed_fields(update_hosts=should_update_hosts)
i.update_computed_fields()
except DatabaseError as e:
if 'did not affect any rows' in str(e):
logger.debug('Exiting duplicate update_inventory_computed_fields task.')
@@ -642,7 +657,7 @@ def update_host_smart_inventory_memberships():
logger.exception('Failed to update smart inventory memberships for {}'.format(smart_inventory.pk))
# Update computed fields for changed inventories outside atomic action
for smart_inventory in changed_inventories:
smart_inventory.update_computed_fields(update_groups=False, update_hosts=False)
smart_inventory.update_computed_fields()
@task()
@@ -1130,6 +1145,23 @@ class BaseTask(object):
else:
event_data['host_name'] = ''
event_data['host_id'] = ''
if isinstance(self, RunProjectUpdate):
# it's common for Ansible's SCM modules to print
# error messages on failure that contain the plaintext
# basic auth credentials (username + password)
# it's also common for the nested event data itself (['res']['...'])
# to contain unredacted text on failure
# this is a _little_ expensive to filter
# with regex, but project updates don't have many events,
# so it *should* have a negligible performance impact
try:
event_data_json = json.dumps(event_data)
event_data_json = UriCleaner.remove_sensitive(event_data_json)
event_data = json.loads(event_data_json)
except json.JSONDecodeError:
pass
should_write_event = False
event_data.setdefault(self.event_data_key, self.instance.id)
self.dispatcher.dispatch(event_data)
@@ -1148,7 +1180,11 @@ class BaseTask(object):
'''
Ansible runner callback to tell the job when/if it is canceled
'''
self.instance = self.update_model(self.instance.pk)
unified_job_id = self.instance.pk
self.instance = self.update_model(unified_job_id)
if not self.instance:
logger.error('unified job {} was deleted while running, canceling'.format(unified_job_id))
return True
if self.instance.cancel_flag or self.instance.status == 'canceled':
cancel_wait = (now() - self.instance.modified).seconds if self.instance.modified else 0
if cancel_wait > 5:
@@ -1656,8 +1692,12 @@ class RunJob(BaseTask):
args.append('--vault-id')
args.append('{}@prompt'.format(vault_id))
if job.forks: # FIXME: Max limit?
args.append('--forks=%d' % job.forks)
if job.forks:
if settings.MAX_FORKS > 0 and job.forks > settings.MAX_FORKS:
logger.warning(f'Maximum number of forks ({settings.MAX_FORKS}) exceeded.')
args.append('--forks=%d' % settings.MAX_FORKS)
else:
args.append('--forks=%d' % job.forks)
if job.force_handlers:
args.append('--force-handlers')
if job.limit:
@@ -1769,7 +1809,7 @@ class RunJob(BaseTask):
current_revision = git_repo.head.commit.hexsha
if desired_revision == current_revision:
job_revision = desired_revision
logger.info('Skipping project sync for {} because commit is locally available'.format(job.log_format))
logger.debug('Skipping project sync for {} because commit is locally available'.format(job.log_format))
else:
sync_needs = all_sync_needs
except (ValueError, BadGitName):
@@ -1868,7 +1908,8 @@ class RunJob(BaseTask):
except Inventory.DoesNotExist:
pass
else:
update_inventory_computed_fields.delay(inventory.id, True)
if inventory is not None:
update_inventory_computed_fields.delay(inventory.id)
@task()
@@ -1977,8 +2018,9 @@ class RunProjectUpdate(BaseTask):
continue
env_key = ('ANSIBLE_GALAXY_SERVER_{}_{}'.format(server.get('id', 'unnamed'), key)).upper()
env[env_key] = server[key]
# now set the precedence of galaxy servers
env['ANSIBLE_GALAXY_SERVER_LIST'] = ','.join([server.get('id', 'unnamed') for server in galaxy_servers])
if galaxy_servers:
# now set the precedence of galaxy servers
env['ANSIBLE_GALAXY_SERVER_LIST'] = ','.join([server.get('id', 'unnamed') for server in galaxy_servers])
return env
def _build_scm_url_extra_vars(self, project_update):
@@ -2851,4 +2893,4 @@ def deep_copy_model_obj(
), permission_check_func[2])
permission_check_func(creater, copy_mapping.values())
if isinstance(new_obj, Inventory):
update_inventory_computed_fields.delay(new_obj.id, True)
update_inventory_computed_fields.delay(new_obj.id)

View File

@@ -10,6 +10,8 @@ group_patterns = foo_group_patterns
want_facts = True
want_hostcollections = True
group_prefix = foo_group_prefix
want_ansible_ssh_host = True
rich_params = True
[cache]
path = /tmp

View File

@@ -2,6 +2,9 @@ from django.db import connection
from django.db.models.signals import post_migrate
from django.apps import apps
from django.conf import settings
from unittest import mock
import contextlib
def app_post_migration(sender, app_config, **kwargs):
@@ -23,3 +26,13 @@ if settings.DATABASES['default']['ENGINE'] == 'django.db.backends.sqlite3':
@contextlib.contextmanager
def immediate_on_commit():
"""
Context manager executing transaction.on_commit() hooks immediately as
if the connection was in auto-commit mode.
"""
def on_commit(func):
func()
with mock.patch('django.db.connection.on_commit', side_effect=on_commit) as patch:
yield patch

View File

@@ -599,9 +599,9 @@ class TestControlledBySCM:
delete(inv_src.get_absolute_url(), admin_user, expect=204)
assert scm_inventory.inventory_sources.count() == 0
def test_adding_inv_src_ok(self, post, scm_inventory, admin_user):
def test_adding_inv_src_ok(self, post, scm_inventory, project, admin_user):
post(reverse('api:inventory_inventory_sources_list', kwargs={'pk': scm_inventory.id}),
{'name': 'new inv src', 'update_on_project_update': False, 'source': 'scm', 'overwrite_vars': True},
{'name': 'new inv src', 'source_project': project.pk, 'update_on_project_update': False, 'source': 'scm', 'overwrite_vars': True},
admin_user, expect=201)
def test_adding_inv_src_prohibited(self, post, scm_inventory, project, admin_user):

View File

@@ -153,7 +153,8 @@ def test_summary_fields_recent_jobs(job_template, admin_user, get):
'id': job.id,
'status': 'failed',
'finished': job.finished,
'type': 'job'
'canceled_on': None,
'type': 'job'
} for job in jobs[-10:][::-1]]

View File

@@ -264,18 +264,6 @@ def test_job_launch_fails_without_credential_access(job_template_prompts, runtim
dict(credentials=runtime_data['credentials']), rando, expect=403)
@pytest.mark.django_db
@pytest.mark.job_runtime_vars
def test_job_block_scan_job_type_change(job_template_prompts, post, admin_user):
job_template = job_template_prompts(True)
# Assure that changing the type of a scan job blocks the launch
response = post(reverse('api:job_template_launch', kwargs={'pk':job_template.pk}),
dict(job_type='scan'), admin_user, expect=400)
assert 'job_type' in response.data
@pytest.mark.django_db
def test_job_launch_JT_with_validation(machine_credential, credential, deploy_jobtemplate):
deploy_jobtemplate.extra_vars = '{"job_template_var": 3}'

View File

@@ -118,6 +118,22 @@ def test_extra_credential_unique_type_xfail(get, post, organization_factory, job
assert response.data.get('count') == 1
@pytest.mark.django_db
def test_create_with_forks_exceeding_maximum_xfail(alice, post, project, inventory, settings):
project.use_role.members.add(alice)
inventory.use_role.members.add(alice)
settings.MAX_FORKS = 10
response = post(reverse('api:job_template_list'), {
'name': 'Some name',
'project': project.id,
'inventory': inventory.id,
'playbook': 'helloworld.yml',
'forks': 11,
}, alice)
assert response.status_code == 400
assert 'Maximum number of forks (10) exceeded' in str(response.data)
@pytest.mark.django_db
def test_attach_extra_credential(get, post, organization_factory, job_template_factory, credential):
objs = organization_factory("org", superusers=['admin'])

View File

@@ -1,8 +1,6 @@
import pytest
import base64
import contextlib
import json
from unittest import mock
from django.db import connection
from django.test.utils import override_settings
@@ -12,22 +10,11 @@ from awx.main.utils.encryption import decrypt_value, get_encryption_key
from awx.api.versioning import reverse, drf_reverse
from awx.main.models.oauth import (OAuth2Application as Application,
OAuth2AccessToken as AccessToken)
from awx.main.tests.functional import immediate_on_commit
from awx.sso.models import UserEnterpriseAuth
from oauth2_provider.models import RefreshToken
@contextlib.contextmanager
def immediate_on_commit():
"""
Context manager executing transaction.on_commit() hooks immediately as
if the connection was in auto-commit mode.
"""
def on_commit(func):
func()
with mock.patch('django.db.connection.on_commit', side_effect=on_commit) as patch:
yield patch
@pytest.mark.django_db
def test_personal_access_token_creation(oauth_application, post, alice):
url = drf_reverse('api:oauth_authorization_root_view') + 'token/'

View File

@@ -365,3 +365,77 @@ def test_zoneinfo(get, admin_user):
url = reverse('api:schedule_zoneinfo')
r = get(url, admin_user, expect=200)
assert {'name': 'America/New_York'} in r.data
@pytest.mark.django_db
def test_normal_user_can_create_jt_schedule(options, post, project, inventory, alice):
jt = JobTemplate.objects.create(
name='test-jt',
project=project,
playbook='helloworld.yml',
inventory=inventory
)
jt.save()
url = reverse('api:schedule_list')
# can't create a schedule on the JT because we don't have execute rights
params = {
'name': 'My Example Schedule',
'rrule': RRULE_EXAMPLE,
'unified_job_template': jt.id,
}
assert 'POST' not in options(url, user=alice).data['actions'].keys()
post(url, params, alice, expect=403)
# now we can, because we're allowed to execute the JT
jt.execute_role.members.add(alice)
assert 'POST' in options(url, user=alice).data['actions'].keys()
post(url, params, alice, expect=201)
@pytest.mark.django_db
def test_normal_user_can_create_project_schedule(options, post, project, alice):
url = reverse('api:schedule_list')
# can't create a schedule on the project because we don't have update rights
params = {
'name': 'My Example Schedule',
'rrule': RRULE_EXAMPLE,
'unified_job_template': project.id,
}
assert 'POST' not in options(url, user=alice).data['actions'].keys()
post(url, params, alice, expect=403)
# use role does *not* grant the ability to schedule
project.use_role.members.add(alice)
assert 'POST' not in options(url, user=alice).data['actions'].keys()
post(url, params, alice, expect=403)
# now we can, because we're allowed to update project
project.update_role.members.add(alice)
assert 'POST' in options(url, user=alice).data['actions'].keys()
post(url, params, alice, expect=201)
@pytest.mark.django_db
def test_normal_user_can_create_inventory_update_schedule(options, post, inventory_source, alice):
url = reverse('api:schedule_list')
# can't create a schedule on the project because we don't have update rights
params = {
'name': 'My Example Schedule',
'rrule': RRULE_EXAMPLE,
'unified_job_template': inventory_source.id,
}
assert 'POST' not in options(url, user=alice).data['actions'].keys()
post(url, params, alice, expect=403)
# use role does *not* grant the ability to schedule
inventory_source.inventory.use_role.members.add(alice)
assert 'POST' not in options(url, user=alice).data['actions'].keys()
post(url, params, alice, expect=403)
# now we can, because we're allowed to update project
inventory_source.inventory.update_role.members.add(alice)
assert 'POST' in options(url, user=alice).data['actions'].keys()
post(url, params, alice, expect=201)

View File

@@ -125,9 +125,9 @@ def project_playbooks():
@pytest.fixture
def run_computed_fields_right_away(request):
def run_me(inventory_id, should_update_hosts=True):
def run_me(inventory_id):
i = Inventory.objects.get(id=inventory_id)
i.update_computed_fields(update_hosts=should_update_hosts)
i.update_computed_fields()
mocked = mock.patch(
'awx.main.signals.update_inventory_computed_fields.delay',

View File

@@ -11,6 +11,7 @@ from awx.main.signals import (
# AWX models
from awx.main.models.organization import Organization
from awx.main.models import ActivityStream, Job
from awx.main.tests.functional import immediate_on_commit
@pytest.mark.django_db
@@ -34,9 +35,10 @@ class TestComputedFields:
def test_computed_fields_normal_use(self, mocker, inventory):
job = Job.objects.create(name='fake-job', inventory=inventory)
with mocker.patch.object(update_inventory_computed_fields, 'delay'):
job.delete()
update_inventory_computed_fields.delay.assert_called_once_with(inventory.id, True)
with immediate_on_commit():
with mocker.patch.object(update_inventory_computed_fields, 'delay'):
job.delete()
update_inventory_computed_fields.delay.assert_called_once_with(inventory.id)
def test_disable_computed_fields(self, mocker, inventory):
job = Job.objects.create(name='fake-job', inventory=inventory)

View File

@@ -23,8 +23,11 @@ class TestJobNotificationMixin(object):
'finished': bool,
'force_handlers': bool,
'forks': int,
'host_status_counts': {'skipped': int, 'ok': int, 'changed': int,
'failures': int, 'dark': int},
'host_status_counts': {
'skipped': int, 'ok': int, 'changed': int,
'failures': int, 'dark': int, 'processed': int,
'rescued': int, 'failed': bool
},
'id': int,
'job_explanation': str,
'job_slice_count': int,
@@ -36,7 +39,6 @@ class TestJobNotificationMixin(object):
'modified': datetime.datetime,
'name': str,
'playbook': str,
'playbook_counts': {'play_count': int, 'task_count': int},
'scm_revision': str,
'skip_tags': str,
'start_at_task': str,
@@ -48,7 +50,6 @@ class TestJobNotificationMixin(object):
'username': str},
'instance_group': {'id': int, 'name': str},
'inventory': {'description': str,
'groups_with_active_failures': int,
'has_active_failures': bool,
'has_inventory_sources': bool,
'hosts_with_active_failures': int,
@@ -69,17 +70,10 @@ class TestJobNotificationMixin(object):
'name': str,
'scm_type': str,
'status': str},
'project_update': {'id': int, 'name': str, 'description': str, 'status': str, 'failed': bool},
'unified_job_template': {'description': str,
'id': int,
'name': str,
'unified_job_type': str},
'source_workflow_job': {'description': str,
'elapsed': float,
'failed': bool,
'id': int,
'name': str,
'status': str}},
'unified_job_type': str}},
'timeout': int,
'type': str,

View File

@@ -283,13 +283,13 @@ class TestTaskImpact:
def test_limit_task_impact(self, job_host_limit, run_computed_fields_right_away):
job = job_host_limit(5, 2)
job.inventory.refresh_from_db() # FIXME: computed fields operates on reloaded inventory
job.inventory.update_computed_fields()
assert job.inventory.total_hosts == 5
assert job.task_impact == 2 + 1 # forks becomes constraint
def test_host_task_impact(self, job_host_limit, run_computed_fields_right_away):
job = job_host_limit(3, 5)
job.inventory.refresh_from_db() # FIXME: computed fields operates on reloaded inventory
job.inventory.update_computed_fields()
assert job.task_impact == 3 + 1 # hosts becomes constraint
def test_shard_task_impact(self, slice_job_factory, run_computed_fields_right_away):
@@ -304,6 +304,7 @@ class TestTaskImpact:
len(jobs[0].inventory.get_script_data(slice_number=i + 1, slice_count=3)['all']['hosts'])
for i in range(3)
] == [1, 1, 1]
jobs[0].inventory.update_computed_fields()
assert [job.task_impact for job in jobs] == [2, 2, 2] # plus one base task impact
# Uneven distribution - first job takes the extra host
jobs[0].inventory.hosts.create(name='remainder_foo')
@@ -311,5 +312,5 @@ class TestTaskImpact:
len(jobs[0].inventory.get_script_data(slice_number=i + 1, slice_count=3)['all']['hosts'])
for i in range(3)
] == [2, 1, 1]
jobs[0].inventory.refresh_from_db() # FIXME: computed fields operates on reloaded inventory
jobs[0].inventory.update_computed_fields()
assert [job.task_impact for job in jobs] == [3, 2, 2]

View File

@@ -67,7 +67,7 @@ def test_multi_group_with_shared_dependency(instance_factory, default_instance_g
pu = p.project_updates.first()
TaskManager.start_task.assert_called_once_with(pu,
default_instance_group,
[j1],
[j1,j2],
default_instance_group.instances.all()[0])
pu.finished = pu.created + timedelta(seconds=1)
pu.status = "successful"
@@ -193,7 +193,7 @@ def test_instance_group_basic_policies(instance_factory, instance_group_factory)
ig2 = InstanceGroup.objects.get(id=ig2.id)
ig3 = InstanceGroup.objects.get(id=ig3.id)
assert len(ig0.instances.all()) == 1
assert i0 in ig0.instances.all()
assert i0 in ig0.instances.all()
assert len(InstanceGroup.objects.get(id=ig1.id).instances.all()) == 2
assert i1 in ig1.instances.all()
assert i2 in ig1.instances.all()

View File

@@ -6,7 +6,7 @@ from datetime import timedelta
from awx.main.scheduler import TaskManager
from awx.main.scheduler.dependency_graph import DependencyGraph
from awx.main.utils import encrypt_field
from awx.main.models import WorkflowJobTemplate, JobTemplate
from awx.main.models import WorkflowJobTemplate, JobTemplate, Job
@pytest.mark.django_db
@@ -307,8 +307,8 @@ def test_shared_dependencies_launch(default_instance_group, job_template_factory
TaskManager().schedule()
pu = p.project_updates.first()
iu = ii.inventory_updates.first()
TaskManager.start_task.assert_has_calls([mock.call(pu, default_instance_group, [iu, j1], instance),
mock.call(iu, default_instance_group, [pu, j1], instance)])
TaskManager.start_task.assert_has_calls([mock.call(iu, default_instance_group, [j1, j2, pu], instance),
mock.call(pu, default_instance_group, [j1, j2, iu], instance)])
pu.status = "successful"
pu.finished = pu.created + timedelta(seconds=1)
pu.save()
@@ -383,3 +383,35 @@ def test_job_not_blocking_inventory_update(default_instance_group, job_template_
dependency_graph = DependencyGraph(None)
dependency_graph.add_job(job)
assert not dependency_graph.is_job_blocked(inventory_update)
@pytest.mark.django_db
def test_generate_dependencies_only_once(job_template_factory):
objects = job_template_factory('jt', organization='org1')
job = objects.job_template.create_job()
job.status = "pending"
job.name = "job_gen_dep"
job.save()
with mock.patch("awx.main.scheduler.TaskManager.start_task"):
# job starts with dependencies_processed as False
assert not job.dependencies_processed
# run one cycle of ._schedule() to generate dependencies
TaskManager()._schedule()
# make sure dependencies_processed is now True
job = Job.objects.filter(name="job_gen_dep")[0]
assert job.dependencies_processed
# Run ._schedule() again, but make sure .generate_dependencies() is not
# called with job in the argument list
tm = TaskManager()
tm.generate_dependencies = mock.MagicMock()
tm._schedule()
# .call_args is tuple, (positional_args, kwargs), [0][0] then is
# the first positional arg, i.e. the first argument of
# .generate_dependencies()
assert tm.generate_dependencies.call_args[0][0] == []

View File

@@ -0,0 +1,6 @@
def test_imported_azure_cloud_sdk_vars():
from awx.main.credential_plugins import azure_kv
assert len(azure_kv.clouds) > 0
assert all([hasattr(c, 'name') for c in azure_kv.clouds])
assert all([hasattr(c, 'suffixes') for c in azure_kv.clouds])
assert all([hasattr(c.suffixes, 'keyvault_dns') for c in azure_kv.clouds])

View File

@@ -60,7 +60,11 @@ INI_TEST_VARS = {
'satellite6': {
'satellite6_group_patterns': 'foo_group_patterns',
'satellite6_group_prefix': 'foo_group_prefix',
'satellite6_want_hostcollections': True
'satellite6_want_hostcollections': True,
'satellite6_want_ansible_ssh_host': True,
'satellite6_rich_params': True,
'satellite6_want_facts': True
},
'cloudforms': {
'version': '2.4',

View File

@@ -57,7 +57,7 @@ def test_empty_in(empty_value):
@pytest.mark.parametrize(u"valid_value", [u'foo', u'foo,'])
def test_valid_in(valid_value):
field_lookup = FieldLookupBackend()
value, new_lookup = field_lookup.value_to_python(JobTemplate, 'project__name__in', valid_value)
value, new_lookup, _ = field_lookup.value_to_python(JobTemplate, 'project__name__in', valid_value)
assert 'foo' in value

View File

@@ -89,6 +89,27 @@ def test_finish_job_fact_cache_with_existing_data(job, hosts, inventory, mocker,
hosts[1].save.assert_called_once_with()
def test_finish_job_fact_cache_with_malformed_fact(job, hosts, inventory, mocker, tmpdir):
fact_cache = os.path.join(tmpdir, 'facts')
modified_times = {}
job.start_job_fact_cache(fact_cache, modified_times, 0)
for h in hosts:
h.save = mocker.Mock()
for h in hosts:
filepath = os.path.join(fact_cache, h.name)
with open(filepath, 'w') as f:
json.dump({'ansible_local': {'insights': 'this is an unexpected error from ansible'}}, f)
new_modification_time = time.time() + 3600
os.utime(filepath, (new_modification_time, new_modification_time))
job.finish_job_fact_cache(fact_cache, modified_times)
for h in hosts:
assert h.insights_system_id is None
def test_finish_job_fact_cache_with_bad_data(job, hosts, inventory, mocker, tmpdir):
fact_cache = os.path.join(tmpdir, 'facts')
modified_times = {}

View File

@@ -171,6 +171,7 @@ class TestWorkflowJobCreate:
with mocker.patch('awx.main.models.WorkflowJobNode.objects.create', mock_create):
wfjt_node_no_prompts.create_workflow_job_node(workflow_job=workflow_job_unit)
mock_create.assert_called_once_with(
all_parents_must_converge=False,
extra_data={},
survey_passwords={},
char_prompts=wfjt_node_no_prompts.char_prompts,
@@ -185,6 +186,7 @@ class TestWorkflowJobCreate:
workflow_job=workflow_job_unit
)
mock_create.assert_called_once_with(
all_parents_must_converge=False,
extra_data={},
survey_passwords={},
char_prompts=wfjt_node_with_prompts.char_prompts,

View File

@@ -19,6 +19,7 @@ class WorkflowNode(object):
self.job = job
self.do_not_run = do_not_run
self.unified_job_template = unified_job_template
self.all_parents_must_converge = False
@pytest.fixture
@@ -94,7 +95,7 @@ class TestDNR():
(g, nodes) = workflow_dag_1
r'''
S0
0
/\
S / \
/ \
@@ -113,7 +114,7 @@ class TestDNR():
assert 0 == len(do_not_run_nodes)
r'''
S0
0
/\
S / \
/ \
@@ -133,6 +134,260 @@ class TestDNR():
assert nodes[3] == do_not_run_nodes[0]
class TestAllWorkflowNodes():
# test workflow convergence is functioning as expected
@pytest.fixture
def simple_all_convergence(self, wf_node_generator):
g = WorkflowDAG()
nodes = [wf_node_generator() for i in range(4)]
for n in nodes:
g.add_node(n)
r'''
0
/\
S / \ S
/ \
1 2
\ /
F \ / S
\/
3
'''
g.add_edge(nodes[0], nodes[1], "success_nodes")
g.add_edge(nodes[0], nodes[2], "success_nodes")
g.add_edge(nodes[1], nodes[3], "failure_nodes")
g.add_edge(nodes[2], nodes[3], "success_nodes")
nodes[3].all_parents_must_converge = True
nodes[0].job = Job(status='successful')
nodes[1].job = Job(status='failed')
nodes[2].job = Job(status='successful')
return (g, nodes)
def test_simple_all_convergence(self, simple_all_convergence):
(g, nodes) = simple_all_convergence
dnr_nodes = g.mark_dnr_nodes()
assert 0 == len(dnr_nodes), "no nodes should be marked DNR"
nodes_to_run = g.bfs_nodes_to_run()
assert 1 == len(nodes_to_run), "Node 3, and only node 3, should be chosen to run"
assert nodes[3] == nodes_to_run[0], "Only node 3 should be chosen to run"
@pytest.fixture
def workflow_all_converge_1(self, wf_node_generator):
g = WorkflowDAG()
nodes = [wf_node_generator() for i in range(3)]
for n in nodes:
g.add_node(n)
r'''
0
|\ F
| \
S| 1
| /
|/ A
2
'''
g.add_edge(nodes[0], nodes[1], "failure_nodes")
g.add_edge(nodes[0], nodes[2], "success_nodes")
g.add_edge(nodes[1], nodes[2], "always_nodes")
nodes[2].all_parents_must_converge = True
nodes[0].job = Job(status='successful')
return (g, nodes)
def test_all_converge_edge_case_1(self, workflow_all_converge_1):
(g, nodes) = workflow_all_converge_1
dnr_nodes = g.mark_dnr_nodes()
assert 2 == len(dnr_nodes), "node[1] and node[2] should be marked DNR"
assert nodes[1] == dnr_nodes[0], "Node 1 should be marked DNR"
assert nodes[2] == dnr_nodes[1], "Node 2 should be marked DNR"
nodes_to_run = g.bfs_nodes_to_run()
assert 0 == len(nodes_to_run), "No nodes should be chosen to run"
@pytest.fixture
def workflow_all_converge_2(self, wf_node_generator):
"""The ordering of _1 and this test, _2, is _slightly_ different.
The hope is that topological sorting results in 2 being processed before 3
and/or 3 before 2.
"""
g = WorkflowDAG()
nodes = [wf_node_generator() for i in range(3)]
for n in nodes:
g.add_node(n)
r'''
0
|\ S
| \
F| 1
| /
|/ A
2
'''
g.add_edge(nodes[0], nodes[1], "success_nodes")
g.add_edge(nodes[0], nodes[2], "failure_nodes")
g.add_edge(nodes[1], nodes[2], "always_nodes")
nodes[2].all_parents_must_converge = True
nodes[0].job = Job(status='successful')
return (g, nodes)
def test_all_converge_edge_case_2(self, workflow_all_converge_2):
(g, nodes) = workflow_all_converge_2
dnr_nodes = g.mark_dnr_nodes()
assert 1 == len(dnr_nodes), "1 and only 1 node should be marked DNR"
assert nodes[2] == dnr_nodes[0], "Node 3 should be marked DNR"
nodes_to_run = g.bfs_nodes_to_run()
assert 1 == len(nodes_to_run), "Node 2, and only node 2, should be chosen to run"
assert nodes[1] == nodes_to_run[0], "Only node 2 should be chosen to run"
@pytest.fixture
def workflow_all_converge_will_run(self, wf_node_generator):
g = WorkflowDAG()
nodes = [wf_node_generator() for i in range(4)]
for n in nodes:
g.add_node(n)
r'''
0 1 2
S \ F | / S
\ | /
\ | /
\|/
|
3
'''
g.add_edge(nodes[0], nodes[3], "success_nodes")
g.add_edge(nodes[1], nodes[3], "failure_nodes")
g.add_edge(nodes[2], nodes[3], "success_nodes")
nodes[3].all_parents_must_converge = True
nodes[0].job = Job(status='successful')
nodes[1].job = Job(status='failed')
nodes[2].job = Job(status='running')
return (g, nodes)
def test_workflow_all_converge_will_run(self, workflow_all_converge_will_run):
(g, nodes) = workflow_all_converge_will_run
dnr_nodes = g.mark_dnr_nodes()
assert 0 == len(dnr_nodes), "No nodes should get marked DNR"
nodes_to_run = g.bfs_nodes_to_run()
assert 0 == len(nodes_to_run), "No nodes should run yet"
nodes[2].job.status = 'successful'
nodes_to_run = g.bfs_nodes_to_run()
assert 1 == len(nodes_to_run), "1 and only 1 node should want to run"
assert nodes[3] == nodes_to_run[0], "Convergence node should be chosen to run"
@pytest.fixture
def workflow_all_converge_dnr(self, wf_node_generator):
g = WorkflowDAG()
nodes = [wf_node_generator() for i in range(4)]
for n in nodes:
g.add_node(n)
r'''
0 1 2
S \ F | / F
\ | /
\ | /
\|/
|
3
'''
g.add_edge(nodes[0], nodes[3], "success_nodes")
g.add_edge(nodes[1], nodes[3], "failure_nodes")
g.add_edge(nodes[2], nodes[3], "failure_nodes")
nodes[3].all_parents_must_converge = True
nodes[0].job = Job(status='successful')
nodes[1].job = Job(status='running')
nodes[2].job = Job(status='failed')
return (g, nodes)
def test_workflow_all_converge_while_parent_runs(self, workflow_all_converge_dnr):
(g, nodes) = workflow_all_converge_dnr
dnr_nodes = g.mark_dnr_nodes()
assert 0 == len(dnr_nodes), "No nodes should get marked DNR"
nodes_to_run = g.bfs_nodes_to_run()
assert 0 == len(nodes_to_run), "No nodes should run yet"
def test_workflow_all_converge_with_incorrect_parent(self, workflow_all_converge_dnr):
# Another tick of the scheduler
(g, nodes) = workflow_all_converge_dnr
nodes[1].job.status = 'successful'
dnr_nodes = g.mark_dnr_nodes()
assert 1 == len(dnr_nodes), "1 and only 1 node should be marked DNR"
assert nodes[3] == dnr_nodes[0], "Convergence node should be marked DNR"
nodes_to_run = g.bfs_nodes_to_run()
assert 0 == len(nodes_to_run), "Convergence node should NOT be chosen to run because it is DNR"
def test_workflow_all_converge_runs(self, workflow_all_converge_dnr):
# Trick the scheduler again to make sure the convergence node acutally runs
(g, nodes) = workflow_all_converge_dnr
nodes[1].job.status = 'failed'
dnr_nodes = g.mark_dnr_nodes()
assert 0 == len(dnr_nodes), "No nodes should be marked DNR"
nodes_to_run = g.bfs_nodes_to_run()
assert 1 == len(nodes_to_run), "Convergence node should be chosen to run"
@pytest.fixture
def workflow_all_converge_deep_dnr_tree(self, wf_node_generator):
g = WorkflowDAG()
nodes = [wf_node_generator() for i in range(7)]
for n in nodes:
g.add_node(n)
r'''
0 1 2
\ | /
S \ S| / F
\ | /
\|/
|
3
/\
S / \ S
/ \
4| | 5
\ /
S \ / S
\/
6
'''
g.add_edge(nodes[0], nodes[3], "success_nodes")
g.add_edge(nodes[1], nodes[3], "success_nodes")
g.add_edge(nodes[2], nodes[3], "failure_nodes")
g.add_edge(nodes[3], nodes[4], "success_nodes")
g.add_edge(nodes[3], nodes[5], "success_nodes")
g.add_edge(nodes[4], nodes[6], "success_nodes")
g.add_edge(nodes[5], nodes[6], "success_nodes")
nodes[3].all_parents_must_converge = True
nodes[4].all_parents_must_converge = True
nodes[5].all_parents_must_converge = True
nodes[6].all_parents_must_converge = True
nodes[0].job = Job(status='successful')
nodes[1].job = Job(status='successful')
nodes[2].job = Job(status='successful')
return (g, nodes)
def test_workflow_all_converge_deep_dnr_tree(self, workflow_all_converge_deep_dnr_tree):
(g, nodes) = workflow_all_converge_deep_dnr_tree
dnr_nodes = g.mark_dnr_nodes()
assert 4 == len(dnr_nodes), "All nodes w/ no jobs should be marked DNR"
assert nodes[3] in dnr_nodes
assert nodes[4] in dnr_nodes
assert nodes[5] in dnr_nodes
assert nodes[6] in dnr_nodes
nodes_to_run = g.bfs_nodes_to_run()
assert 0 == len(nodes_to_run), "All non-run nodes should be DNR and NOT candidates to run"
class TestIsWorkflowDone():
@pytest.fixture
def workflow_dag_2(self, workflow_dag_1):
@@ -212,8 +467,8 @@ class TestIsWorkflowDone():
assert g.is_workflow_done() is True
assert g.has_workflow_failed() == \
(True, smart_text(_("No error handle path for workflow job node(s) [({},{})] workflow job node(s)"
" missing unified job template and error handle path [].").format(nodes[2].id, nodes[2].job.status)))
(True, smart_text(_("No error handling path for workflow job node(s) [({},{})]. Workflow job node(s)"
" missing unified job template and error handling path [].").format(nodes[2].id, nodes[2].job.status)))
def test_is_workflow_done_no_unified_job_tempalte_end(self, workflow_dag_failed):
(g, nodes) = workflow_dag_failed
@@ -222,8 +477,8 @@ class TestIsWorkflowDone():
assert g.is_workflow_done() is True
assert g.has_workflow_failed() == \
(True, smart_text(_("No error handle path for workflow job node(s) [] workflow job node(s) missing"
" unified job template and error handle path [{}].").format(nodes[2].id)))
(True, smart_text(_("No error handling path for workflow job node(s) []. Workflow job node(s) missing"
" unified job template and error handling path [{}].").format(nodes[2].id)))
def test_is_workflow_done_no_unified_job_tempalte_begin(self, workflow_dag_1):
(g, nodes) = workflow_dag_1
@@ -233,22 +488,22 @@ class TestIsWorkflowDone():
assert g.is_workflow_done() is True
assert g.has_workflow_failed() == \
(True, smart_text(_("No error handle path for workflow job node(s) [] workflow job node(s) missing"
" unified job template and error handle path [{}].").format(nodes[0].id)))
(True, smart_text(_("No error handling path for workflow job node(s) []. Workflow job node(s) missing"
" unified job template and error handling path [{}].").format(nodes[0].id)))
def test_canceled_should_fail(self, workflow_dag_canceled):
(g, nodes) = workflow_dag_canceled
assert g.has_workflow_failed() == \
(True, smart_text(_("No error handle path for workflow job node(s) [({},{})] workflow job node(s)"
" missing unified job template and error handle path [].").format(nodes[0].id, nodes[0].job.status)))
(True, smart_text(_("No error handling path for workflow job node(s) [({},{})]. Workflow job node(s)"
" missing unified job template and error handling path [].").format(nodes[0].id, nodes[0].job.status)))
def test_failure_should_fail(self, workflow_dag_failure):
(g, nodes) = workflow_dag_failure
assert g.has_workflow_failed() == \
(True, smart_text(_("No error handle path for workflow job node(s) [({},{})] workflow job node(s)"
" missing unified job template and error handle path [].").format(nodes[0].id, nodes[0].job.status)))
(True, smart_text(_("No error handling path for workflow job node(s) [({},{})]. Workflow job node(s)"
" missing unified job template and error handling path [].").format(nodes[0].id, nodes[0].job.status)))
class TestBFSNodesToRun():

View File

@@ -197,36 +197,6 @@ def test_change_jt_sensitive_data(job_template_with_ids, mocker, user_unit):
})
def test_jt_add_scan_job_check(job_template_with_ids, user_unit):
"Assure that permissions to add scan jobs work correctly"
access = JobTemplateAccess(user_unit)
project = job_template_with_ids.project
inventory = job_template_with_ids.inventory
project.use_role = Role()
inventory.use_role = Role()
organization = Organization(name='test-org')
inventory.organization = organization
organization.admin_role = Role()
def mock_get_object(Class, **kwargs):
if Class == Project:
return project
elif Class == Inventory:
return inventory
else:
raise Exception('Item requested has not been mocked')
with mock.patch('awx.main.models.rbac.Role.__contains__', return_value=True):
with mock.patch('awx.main.access.get_object_or_400', mock_get_object):
assert access.can_add({
'project': project.pk,
'inventory': inventory.pk,
'job_type': 'scan'
})
def mock_raise_none(self, add_host=False, feature=None, check_expiration=True):
return None

View File

@@ -152,3 +152,10 @@ def test_uri_scm_cleartext_redact_and_replace(test_data):
# Ensure the host didn't get redacted
assert redacted_str.count(uri.host) == test_data['host_occurrences']
@pytest.mark.timeout(1)
def test_large_string_performance():
length = 100000
redacted = UriCleaner.remove_sensitive('x' * length)
assert len(redacted) == length

View File

@@ -2146,7 +2146,14 @@ class TestInventoryUpdateCredentials(TestJobExecution):
inventory_update.get_cloud_credential = get_cred
inventory_update.get_extra_credentials = mocker.Mock(return_value=[])
inventory_update.source_vars = '{"satellite6_group_patterns": "[a,b,c]", "satellite6_group_prefix": "hey_", "satellite6_want_hostcollections": True}'
inventory_update.source_vars = {
'satellite6_group_patterns': '[a,b,c]',
'satellite6_group_prefix': 'hey_',
'satellite6_want_hostcollections': True,
'satellite6_want_ansible_ssh_host': True,
'satellite6_rich_params': True,
'satellite6_want_facts': False
}
private_data_files = task.build_private_data_files(inventory_update, private_data_dir)
env = task.build_env(inventory_update, private_data_dir, False, private_data_files)
@@ -2159,6 +2166,9 @@ class TestInventoryUpdateCredentials(TestJobExecution):
assert config.get('ansible', 'group_patterns') == '[a,b,c]'
assert config.get('ansible', 'group_prefix') == 'hey_'
assert config.get('ansible', 'want_hostcollections') == 'True'
assert config.get('ansible', 'want_ansible_ssh_host') == 'True'
assert config.get('ansible', 'rich_params') == 'True'
assert config.get('ansible', 'want_facts') == 'False'
def test_cloudforms_source(self, inventory_update, private_data_dir, mocker):
task = tasks.RunInventoryUpdate()

View File

@@ -79,8 +79,8 @@ class mockHost:
@mock.patch('awx.main.utils.filters.get_model', return_value=mockHost())
class TestSmartFilterQueryFromString():
@mock.patch(
'awx.api.filters.get_field_from_path',
lambda model, path: (model, path) # disable field filtering, because a__b isn't a real Host field
'awx.api.filters.get_fields_from_path',
lambda model, path: ([model], path) # disable field filtering, because a__b isn't a real Host field
)
@pytest.mark.parametrize("filter_string,q_expected", [
('facts__facts__blank=""', Q(**{u"facts__facts__blank": u""})),

View File

@@ -106,7 +106,7 @@ def could_be_inventory(project_path, dir_path, filename):
def read_ansible_config(project_path, variables_of_interest):
fnames = ['/etc/ansible/ansible.cfg']
if project_path:
fnames.insert(0, os.path.join(project_path, 'ansible.cfg'))
fnames.append(os.path.join(project_path, 'ansible.cfg'))
values = {}
try:
parser = ConfigParser()

View File

@@ -107,6 +107,17 @@ class LogstashFormatterBase(logging.Formatter):
class LogstashFormatter(LogstashFormatterBase):
def __init__(self, *args, **kwargs):
self.cluster_host_id = settings.CLUSTER_HOST_ID
self.tower_uuid = None
uuid = (
getattr(settings, 'LOG_AGGREGATOR_TOWER_UUID', None) or
getattr(settings, 'INSTALL_UUID', None)
)
if uuid:
self.tower_uuid = uuid
super(LogstashFormatter, self).__init__(*args, **kwargs)
def reformat_data_for_log(self, raw_data, kind=None):
'''
Process dictionaries from various contexts (job events, activity stream
@@ -128,37 +139,6 @@ class LogstashFormatter(LogstashFormatterBase):
data = json.loads(data)
data_for_log = {}
def index_by_name(alist):
"""Takes a list of dictionaries with `name` as a key in each dict
and returns a dictionary indexed by those names"""
adict = {}
for item in alist:
subdict = copy(item)
if 'name' in subdict:
name = subdict.get('name', None)
elif 'path' in subdict:
name = subdict.get('path', None)
if name:
# Logstash v2 can not accept '.' in a name
name = name.replace('.', '_')
adict[name] = subdict
return adict
def convert_to_type(t, val):
if t is float:
val = val[:-1] if val.endswith('s') else val
try:
return float(val)
except ValueError:
return val
elif t is int:
try:
return int(val)
except ValueError:
return val
elif t is str:
return val
if kind == 'job_events':
job_event = raw_data['python_objects']['job_event']
for field_object in job_event._meta.fields:
@@ -198,6 +178,21 @@ class LogstashFormatter(LogstashFormatterBase):
data_for_log['host_name'] = raw_data['host_name']
data_for_log['job_id'] = raw_data['job_id']
elif kind == 'performance':
def convert_to_type(t, val):
if t is float:
val = val[:-1] if val.endswith('s') else val
try:
return float(val)
except ValueError:
return val
elif t is int:
try:
return int(val)
except ValueError:
return val
elif t is str:
return val
request = raw_data['python_objects']['request']
response = raw_data['python_objects']['response']
@@ -231,21 +226,8 @@ class LogstashFormatter(LogstashFormatterBase):
log_kind = record.name[len('awx.analytics.'):]
fields = self.reformat_data_for_log(fields, kind=log_kind)
# General AWX metadata
for log_name, setting_name in [
('type', 'LOG_AGGREGATOR_TYPE'),
('cluster_host_id', 'CLUSTER_HOST_ID'),
('tower_uuid', 'LOG_AGGREGATOR_TOWER_UUID')]:
if hasattr(settings, setting_name):
fields[log_name] = getattr(settings, setting_name, None)
elif log_name == 'type':
fields[log_name] = 'other'
uuid = (
getattr(settings, 'LOG_AGGREGATOR_TOWER_UUID', None) or
getattr(settings, 'INSTALL_UUID', None)
)
if uuid:
fields['tower_uuid'] = uuid
fields['cluster_host_id'] = self.cluster_host_id
fields['tower_uuid'] = self.tower_uuid
return fields
def format(self, record):

View File

@@ -4,6 +4,7 @@
# Python
import logging
import json
import os
import requests
import time
import threading
@@ -18,6 +19,7 @@ from django.conf import settings
# requests futures, a dependency used by these handlers
from requests_futures.sessions import FuturesSession
import cachetools
# AWX
from awx.main.utils.formatters import LogstashFormatter
@@ -273,6 +275,16 @@ HANDLER_MAPPING = {
}
TTLCache = cachetools.TTLCache
if 'py.test' in os.environ.get('_', ''):
# don't cache settings in unit tests
class TTLCache(TTLCache):
def __getitem__(self, item):
raise KeyError()
class AWXProxyHandler(logging.Handler):
'''
Handler specific to the AWX external logging feature
@@ -316,6 +328,7 @@ class AWXProxyHandler(logging.Handler):
def get_handler_class(self, protocol):
return HANDLER_MAPPING.get(protocol, AWXNullHandler)
@cachetools.cached(cache=TTLCache(maxsize=1, ttl=3), key=lambda *args, **kw: 'get_handler')
def get_handler(self, custom_settings=None, force_create=False):
new_kwargs = {}
use_settings = custom_settings or settings
@@ -342,10 +355,14 @@ class AWXProxyHandler(logging.Handler):
self._handler.setFormatter(self.formatter)
return self._handler
@cachetools.cached(cache=TTLCache(maxsize=1, ttl=3), key=lambda *args, **kw: 'should_audit')
def should_audit(self):
return settings.LOG_AGGREGATOR_AUDIT
def emit(self, record):
if AWXProxyHandler.thread_local.enabled:
actual_handler = self.get_handler()
if settings.LOG_AGGREGATOR_AUDIT:
if self.should_audit():
self.auditor.setLevel(settings.LOG_AGGREGATOR_LEVEL)
self.auditor.emit(record)
return actual_handler.emit(record)

View File

@@ -366,6 +366,7 @@ class VMWareInventory(object):
def _get_instances(self, inkwargs):
''' Make API calls '''
instances = []
si = None
try:
si = SmartConnect(**inkwargs)
except ssl.SSLError as connection_error:

View File

@@ -5,7 +5,6 @@ import os
import re # noqa
import sys
from datetime import timedelta
from celery.schedules import crontab
# global settings
from django.conf import global_settings
@@ -435,13 +434,9 @@ CELERYBEAT_SCHEDULE = {
'schedule': timedelta(seconds=60),
'options': {'expires': 50,}
},
'purge_stdout_files': {
'task': 'awx.main.tasks.purge_old_stdout_files',
'schedule': timedelta(days=7)
},
'gather_analytics': {
'task': 'awx.main.tasks.gather_analytics',
'schedule': crontab(hour='*/6')
'schedule': timedelta(minutes=5)
},
'task_manager': {
'task': 'awx.main.scheduler.tasks.run_task_manager',
@@ -455,7 +450,6 @@ CELERYBEAT_SCHEDULE = {
},
# 'isolated_heartbeat': set up at the end of production.py and development.py
}
AWX_INCONSISTENT_TASK_INTERVAL = 60 * 3
AWX_CELERY_QUEUES_STATIC = [
CELERY_DEFAULT_QUEUE,
@@ -665,6 +659,9 @@ PENDO_TRACKING_STATE = "off"
# Note: This setting may be overridden by database settings.
INSIGHTS_TRACKING_STATE = False
# Last gather date for Analytics
AUTOMATION_ANALYTICS_LAST_GATHER = None
AUTOMATION_ANALYTICS_INTERVAL = 14400
# Default list of modules allowed for ad hoc commands.
# Note: This setting may be overridden by database settings.
@@ -1142,8 +1139,7 @@ LOGGING = {
'handlers': ['null']
},
'awx.main.commands.run_callback_receiver': {
'handlers': ['callback_receiver'],
'level': 'INFO' # in debug mode, includes full callback data
'handlers': ['callback_receiver'], # level handled by dynamic_level_filter
},
'awx.main.dispatch': {
'handlers': ['dispatcher'],
@@ -1221,6 +1217,9 @@ AWX_REQUEST_PROFILE = False
#
AWX_REQUEST_PROFILE_WITH_DOT = False
# Allow profiling callback workers via SIGUSR1
AWX_CALLBACK_PROFILE = False
# Delete temporary directories created to store playbook run-time
AWX_CLEANUP_PATHS = True

View File

@@ -179,3 +179,4 @@ else:
os.environ['SDB_NOTIFY_HOST'] = os.popen('ip route').read().split(' ')[2]
WEBSOCKET_ORIGIN_WHITELIST = ['https://localhost:8043', 'https://localhost:3000']
AWX_CALLBACK_PROFILE = True

View File

@@ -44,7 +44,7 @@ JOBOUTPUT_ROOT = '/var/lib/awx/job_status/'
SCHEDULE_METADATA_LOCATION = '/var/lib/awx/.tower_cycle'
# Ansible base virtualenv paths and enablement
BASE_VENV_PATH = "/var/lib/awx/venv"
BASE_VENV_PATH = os.path.realpath("/var/lib/awx/venv")
ANSIBLE_VENV_PATH = os.path.join(BASE_VENV_PATH, "ansible")
# Tower base virtualenv paths and enablement

View File

@@ -72,7 +72,7 @@ function AddEditCredentialsController (
vm.form.credential_type._displayValue = credentialType.get('name');
vm.isTestable = (isEditable && credentialType.get('kind') === 'external');
if (credential.get('related.input_sources.results.length' > 0)) {
if (credential.get('related.input_sources.results').length > 0) {
vm.form.credential_type._disabled = true;
}

View File

@@ -40,7 +40,6 @@ function CredentialsResolve (
return $q.all(promises)
.then(models => {
const typeId = models.credential.get('credential_type');
const orgId = models.credential.get('organization');
Rest.setUrl(GetBasePath('credentials'));
const params = { target_input_sources__target_credential: id };
@@ -48,7 +47,9 @@ function CredentialsResolve (
const dependents = {
credentialType: new CredentialType('get', typeId),
organization: new Organization('get', orgId),
organization: new Organization('get', {
resource: models.credential.get('summary_fields.organization')
}),
credentialInputSources: models.credential.extend('GET', 'input_sources'),
sourceCredentials: sourceCredentialsPromise
};

View File

@@ -25,15 +25,15 @@ function ListJobsController (
vm.strings = strings;
let newJobs = [];
// smart-search
const name = 'jobs';
const iterator = 'job';
let paginateQuerySet = {};
let launchModalOpen = false;
let refreshAfterLaunchClose = false;
let pendingRefresh = false;
let refreshTimerRunning = false;
let newJobsTimerRunning = false;
vm.searchBasePath = SearchBasePath;
@@ -104,23 +104,53 @@ function ListJobsController (
$scope.$emit('updateCount', vm.job_dataset.count, 'jobs');
});
$scope.$on('ws-jobs', () => {
if (!launchModalOpen) {
if (!refreshTimerRunning) {
refreshJobs();
} else {
pendingRefresh = true;
const canAddRowsDynamically = () => {
const orderByValue = _.get($state.params, 'job_search.order_by');
const pageValue = _.get($state.params, 'job_search.page');
const idInValue = _.get($state.params, 'job_search.id__in');
return (!idInValue && (!pageValue || pageValue === '1')
&& (orderByValue === '-finished' || orderByValue === '-started'));
};
const updateJobRow = (msg) => {
// Loop across the jobs currently shown and update the row
// if it exists
for (let i = 0; i < vm.jobs.length; i++) {
if (vm.jobs[i].id === msg.unified_job_id) {
// Update the job status.
vm.jobs[i].status = msg.status;
if (msg.finished) {
vm.jobs[i].finished = msg.finished;
const orderByValue = _.get($state.params, 'job_search.order_by');
if (orderByValue === '-finished') {
// Attempt to sort the rows in the list by their finish
// timestamp in descending order
vm.jobs.sort((a, b) =>
(!b.finished) - (!a.finished)
|| new Date(b.finished) - new Date(a.finished));
}
}
break;
}
} else {
refreshAfterLaunchClose = true;
}
};
$scope.$on('ws-jobs', (e, msg) => {
if (msg.status === 'pending' && canAddRowsDynamically()) {
newJobs.push(msg.unified_job_id);
if (!launchModalOpen && !newJobsTimerRunning) {
fetchNewJobs();
}
} else if (!newJobs.includes(msg.unified_job_id)) {
updateJobRow(msg);
}
});
$scope.$on('launchModalOpen', (evt, isOpen) => {
evt.stopPropagation();
if (!isOpen && refreshAfterLaunchClose) {
refreshAfterLaunchClose = false;
refreshJobs();
if (!isOpen && newJobs.length > 0) {
fetchNewJobs();
}
launchModalOpen = isOpen;
});
@@ -289,22 +319,49 @@ function ListJobsController (
});
};
function refreshJobs () {
qs.search(SearchBasePath, $state.params.job_search, { 'X-WS-Session-Quiet': true })
const fetchNewJobs = () => {
newJobsTimerRunning = true;
const newJobIdsFilter = newJobs.join(',');
newJobs = [];
const newJobsSearchParams = Object.assign({}, $state.params.job_search);
newJobsSearchParams.count_disabled = 1;
newJobsSearchParams.id__in = newJobIdsFilter;
delete newJobsSearchParams.page_size;
const stringifiedSearchParams = qs.encodeQueryset(newJobsSearchParams, false);
Rest.setUrl(`${vm.searchBasePath}${stringifiedSearchParams}`);
Rest.get()
.then(({ data }) => {
vm.jobs = data.results;
vm.job_dataset = data;
vm.job_dataset.count += data.results.length;
const pageSize = parseInt($state.params.job_search.page_size, 10) || 20;
const joinedJobs = data.results.concat(vm.jobs);
vm.jobs = joinedJobs.length > pageSize
? joinedJobs.slice(0, pageSize)
: joinedJobs;
$timeout(() => {
if (canAddRowsDynamically()) {
if (newJobs.length > 0 && !launchModalOpen) {
fetchNewJobs();
} else {
newJobsTimerRunning = false;
}
} else {
// Bail out - one of [order_by, page, id__in] params has changed since we
// received these new job messages
newJobs = [];
newJobsTimerRunning = false;
}
}, 5000);
})
.catch(({ data, status }) => {
ProcessErrors($scope, data, status, null, {
hdr: strings.get('error.HEADER'),
msg: strings.get('error.CALL', {
path: `${vm.searchBasePath}${stringifiedSearchParams}`,
status
})
});
});
pendingRefresh = false;
refreshTimerRunning = true;
$timeout(() => {
if (pendingRefresh) {
refreshJobs();
} else {
refreshTimerRunning = false;
}
}, 5000);
}
};
vm.isCollapsed = true;

View File

@@ -37,7 +37,7 @@ export const OUTPUT_ELEMENT_LAST = '#atStdoutMenuLast';
export const OUTPUT_MAX_BUFFER_LENGTH = 1000;
export const OUTPUT_MAX_LAG = 120;
export const OUTPUT_NO_COUNT_JOB_TYPES = ['ad_hoc_command', 'system_job', 'inventory_update'];
export const OUTPUT_ORDER_BY = 'counter';
export const OUTPUT_ORDER_BY = 'start_line';
export const OUTPUT_PAGE_CACHE = true;
export const OUTPUT_PAGE_LIMIT = 5;
export const OUTPUT_PAGE_SIZE = 50;

View File

@@ -113,11 +113,6 @@ function projectsListController (
// And we found the affected project
$log.debug(`Received event for project: ${project.name}`);
$log.debug(`Status changed to: ${data.status}`);
if (data.status === 'successful' || data.status === 'failed' || data.status === 'canceled') {
reloadList();
} else {
project.scm_update_tooltip = vm.strings.get('update.UPDATE_RUNNING');
}
project.status = data.status;
buildTooltips(project);
}

View File

@@ -153,7 +153,10 @@ function TemplatesStrings (BaseString) {
TIMED_OUT: t.s('APPROVAL TIMED OUT'),
TIMEOUT: t.s('Timeout'),
APPROVED: t.s('APPROVED'),
DENIED: t.s('DENIED')
DENIED: t.s('DENIED'),
CONVERGENCE: t.s('Convergence'),
ALL: t.s('All'),
ANY: t.s('Any'),
};
}

View File

@@ -24,7 +24,6 @@ function ListTemplatesController(
qs,
GetBasePath,
ngToast,
$timeout
) {
const vm = this || {};
const [jobTemplate, workflowTemplate] = resolvedModels;
@@ -32,10 +31,6 @@ function ListTemplatesController(
const choices = workflowTemplate.options('actions.GET.type.choices')
.concat(jobTemplate.options('actions.GET.type.choices'));
let launchModalOpen = false;
let refreshAfterLaunchClose = false;
let pendingRefresh = false;
let refreshTimerRunning = false;
let paginateQuerySet = {};
vm.strings = strings;
@@ -120,25 +115,39 @@ function ListTemplatesController(
setToolbarSort();
}, true);
$scope.$on(`ws-jobs`, () => {
if (!launchModalOpen) {
if (!refreshTimerRunning) {
refreshTemplates();
} else {
pendingRefresh = true;
}
} else {
refreshAfterLaunchClose = true;
}
});
$scope.$on(`ws-jobs`, (e, msg) => {
if (msg.unified_job_template_id && vm.templates) {
const template = vm.templates.find((t) => t.id === msg.unified_job_template_id);
if (template) {
if (msg.status === 'pending') {
// This is a new job - add it to the front of the
// recent_jobs array
if (template.summary_fields.recent_jobs.length === 10) {
template.summary_fields.recent_jobs.pop();
}
$scope.$on('launchModalOpen', (evt, isOpen) => {
evt.stopPropagation();
if (!isOpen && refreshAfterLaunchClose) {
refreshAfterLaunchClose = false;
refreshTemplates();
template.summary_fields.recent_jobs.unshift({
id: msg.unified_job_id,
status: msg.status,
type: msg.type
});
} else {
// This is an update to an existing job. Check to see
// if we have it in our array of recent_jobs
for (let i=0; i<template.summary_fields.recent_jobs.length; i++) {
const recentJob = template.summary_fields.recent_jobs[i];
if (recentJob.id === msg.unified_job_id) {
recentJob.status = msg.status;
if (msg.finished) {
recentJob.finished = msg.finished;
template.last_job_run = msg.finished;
}
break;
}
};
}
}
}
launchModalOpen = isOpen;
});
vm.isInvalid = (template) => {
@@ -265,15 +274,6 @@ function ListTemplatesController(
vm.templates = vm.dataset.results;
})
.finally(() => Wait('stop'));
pendingRefresh = false;
refreshTimerRunning = true;
$timeout(() => {
if (pendingRefresh) {
refreshTemplates();
} else {
refreshTimerRunning = false;
}
}, 5000);
}
function createErrorHandler(path, action) {
@@ -483,8 +483,7 @@ ListTemplatesController.$inject = [
'Wait',
'QuerySet',
'GetBasePath',
'ngToast',
'$timeout'
'ngToast'
];
export default ListTemplatesController;

View File

@@ -598,6 +598,11 @@ table, tbody {
}
.List-staticColumnLayout--groups {
display: grid;
grid-template-columns: @at-space @at-space-5x auto;
}
.List-staticColumnLayout--hostNestedGroups {
display: grid;
grid-template-columns: @at-space @at-space-5x @at-space-5x auto;
}

View File

@@ -1,4 +1,4 @@
const SUPPORTED_LOCALES = ['en', 'es', 'fr', 'ja', 'nl'];
const SUPPORTED_LOCALES = ['en', 'es', 'fr', 'ja', 'nl', 'zh'];
const DEFAULT_LOCALE = 'en';
const BASE_PATH = global.$basePath ? `${global.$basePath}languages/` : '/static/languages/';

View File

@@ -58,6 +58,10 @@ export default ['i18n', function(i18n) {
type: 'text',
reset: 'ANSIBLE_FACT_CACHE_TIMEOUT',
},
MAX_FORKS: {
type: 'text',
reset: 'MAX_FORKS',
},
PROJECT_UPDATE_VVV: {
type: 'toggleSwitch',
},

View File

@@ -79,6 +79,12 @@ export default ['i18n', function(i18n) {
AUTOMATION_ANALYTICS_URL: {
type: 'text',
reset: 'AUTOMATION_ANALYTICS_URL',
},
AUTOMATION_ANALYTICS_GATHER_INTERVAL: {
type: 'number',
integer: true,
min: 1800,
reset: 'AUTOMATION_ANALYTICS_GATHER_INTERVAL',
}
},

View File

@@ -39,7 +39,7 @@ export default
label: i18n._("Hosts")
},
{
url: "/#/hosts?host_search=has_active_failures:true",
url: "/#/hosts?host_search=last_job_host_summary__failed:true",
number: scope.data.hosts.failed,
label: i18n._("Failed Hosts"),
isFailureCount: true

View File

@@ -11,7 +11,7 @@ export default
templateUrl: templateUrl('home/dashboard/lists/job-templates/job-templates-list')
};
function link(scope, element, attr) {
function link(scope) {
scope.$watch("data", function(data) {
if (data) {
@@ -22,7 +22,7 @@ export default
scope.noJobTemplates = true;
}
}
});
}, true);
scope.canAddJobTemplate = false;
let url = GetBasePath('job_templates');

Some files were not shown because too many files have changed in this diff Show More