Compare commits

..

218 Commits
1.0.3 ... 1.0.4

Author SHA1 Message Date
Matthew Jones
8505783350 Merge remote-tracking branch 'tower/release_3.2.3' into devel
* tower/release_3.2.3:
  fix unicode bugs with log statements
  use --export option for ansible-inventory
  add support for new "BECOME" prompt in Ansible 2.5+ for adhoc commands
  enforce strings for secret password inputs on Credentials
  fix a bug for "users should be able to change type of unused credential"
  fix xss vulnerabilities - on host recent jobs popover - on schedule name tooltip
  fix a bug when testing UDP-based logging configuration
  bump templates form credential_types page limit
  Wait for Slack RTM API websocket connection to be established
  don't process artifacts from custom `set_stat` calls asynchronously
  don't overwrite env['ANSIBLE_LIBRARY'] when fact caching is enabled
  only allow facts to cache in the proper file system location
  replace our memcached-based fact cache implementation with local files
  add support for new "BECOME" prompt in Ansible 2.5+
  fix a bug in inventory generation for isolated nodes
  properly handle unicode for isolated job buffers
2018-02-20 12:22:25 -05:00
Ryan Petrello
76ff925b77 Merge pull request #1298 from sjenning/add-import-playbook
add import_playbook as a top-level playbook indicator
2018-02-20 09:54:23 -05:00
Seth Jennings
42ff1cfd67 add import_playbook as top-level playbook indicator 2018-02-19 16:03:08 -06:00
Ryan Petrello
90bb43ce74 Merge pull request #1292 from ryanpetrello/fix-1291
don't require credentials to relaunch a job
2018-02-19 12:01:42 -05:00
Ryan Petrello
56e3d98e62 don't require credentials to relaunch a job
see: https://github.com/ansible/awx/issues/1291
2018-02-19 11:15:55 -05:00
Matthew Jones
7d51b3b6b6 Merge pull request #1116 from bmduffy/bugfix-pem-validation
[bugfix-pem-validation]
2018-02-19 07:53:19 -05:00
Brian Duffy
4270e3a17b [bugfix] updated pem validation unit tests 2018-02-18 15:11:42 +00:00
Brian Duffy
098f4eb198 [bugfix-pem-validation] pass flake8 2018-02-18 01:46:31 +00:00
Jake McDermott
ae1167ab15 Merge pull request #1282 from ansible/jakemcdermott-patch-1
fix last run job variable reference
2018-02-16 16:35:35 -05:00
Jake McDermott
5b5411fecd fix last run job variable reference 2018-02-16 16:32:13 -05:00
Brian Duffy
235213bd3b updated regex 2018-02-16 16:06:33 +00:00
Wayne Witzel III
2c71a27630 Merge pull request #1123 from wwitzel3/new-permissions
New RBAC Roles
2018-02-15 16:56:03 -05:00
Alan Rominger
1a6819cdea Merge pull request #630 from AlanCoding/text_type
Fix unicode bugs with log statements
2018-02-15 15:52:29 -05:00
AlanCoding
465e605464 fix unicode bugs with log statements 2018-02-15 15:26:58 -05:00
Alan Rominger
22f1a53266 Merge pull request #1233 from AlanCoding/no_turning_back
Raise 400 error on removal of credential on launch
2018-02-15 14:11:57 -05:00
Ryan Petrello
733b4b874e Merge pull request #1255 from ryanpetrello/license-compliance
changes to license compliance
2018-02-15 09:30:41 -05:00
AlanCoding
3d433350d3 raise 400 error on removal of credential on launch
Definition of removal is providing a `credentials` list on launch
that lacks a type of credential that the job template has.
This assures that every category of credential the job template
has will also exist on jobs ran from that job template.
This restriction already existed, but this makes the endpoint
fail instead of re-adding the credentials.
This change makes manual launch congruent with saved launch
configurations.
2018-02-15 08:16:03 -05:00
Wayne Witzel III
30a5617825 Address PR feedback 2018-02-14 22:53:33 +00:00
Alan Rominger
5935c410e4 Merge pull request #629 from AlanCoding/export
Use --export option for ansible-inventory
2018-02-14 15:56:05 -05:00
Alan Rominger
c90cf7c5e2 Merge pull request #1253 from AlanCoding/group_vars
Use --export option for ansible-inventory
2018-02-14 15:52:00 -05:00
Ryan Petrello
218dfb680e changes to license compliance
now if a license is expired or over the managed node limit, it won't
prevent host creation or Job/JobTemplate launches

see: https://github.com/ansible/ansible-tower/issues/7860
2018-02-14 15:51:19 -05:00
AlanCoding
b01deb393e use --export option for ansible-inventory 2018-02-14 14:48:13 -05:00
Chris Church
410111b8c8 Merge pull request #1241 from cclauss/six.string_types_in_mixins.py
six.string_types in mixins.py
2018-02-14 13:38:44 -05:00
AlanCoding
05e6eda453 use --export option for ansible-inventory 2018-02-14 12:34:41 -05:00
Bill Nottingham
4a4b44955b Merge pull request #1247 from pkoro/anchor-link-fix
Fix in anchor link
2018-02-14 10:37:43 -05:00
Ryan Petrello
9d82098162 Merge pull request #1249 from ryanpetrello/no-travis
we don't use travis for tests; remove .travis.yml
2018-02-14 10:15:48 -05:00
Ryan Petrello
1c62c142f1 we don't use travis for tests; remove .travis.yml 2018-02-14 10:07:17 -05:00
Paschalis Korosoglou
5215bbcbf6 Fix in anchor link 2018-02-14 16:05:58 +02:00
Matthew Jones
0d2daecf49 Merge pull request #1243 from matburt/fix_clustering_isolated
Fix isolated instance clustering implementation
2018-02-14 08:32:24 -05:00
cclauss
552b69592c six.string_types in mixins.py 2018-02-14 08:35:14 +01:00
Matthew Jones
ffe5a92eb9 Update isolated instance capacity calculaltion 2018-02-13 21:51:50 -05:00
Matthew Jones
925d9efecf Fixing up isolated node execution after cluster changes
* Rework queue detection to include control groups and isolated instances
* Fix up development tooling around isolated nodes
* Update unit tests
2018-02-13 21:51:38 -05:00
Jake McDermott
c1b6595a0b Merge pull request #1201 from jakemcdermott/item_copy_ui
api-backed copy ui
2018-02-13 17:42:00 -05:00
Jake McDermott
d4e46a35ce get exact match on ids 2018-02-13 17:15:59 -05:00
Jake McDermott
bf0683f7fe replace usage of all and spread 2018-02-13 17:15:56 -05:00
Jake McDermott
0ff94c63f2 use edit capability for showing copy on most views 2018-02-13 17:15:52 -05:00
Jake McDermott
16153daa14 add e2e test for inventory script copy 2018-02-13 17:15:48 -05:00
Jake McDermott
a680d188c0 implement model based copy for inventory scripts 2018-02-13 17:15:44 -05:00
Jake McDermott
d56f1a0120 add e2e test for credential copy 2018-02-13 17:15:41 -05:00
Jake McDermott
50d95ddc3f implement model-based credential copy 2018-02-13 17:15:37 -05:00
Jake McDermott
21a32f90ce add e2e test for notification template copy 2018-02-13 17:15:34 -05:00
Jake McDermott
09d3e6cd98 implement model-based copy for notification templates 2018-02-13 17:15:30 -05:00
Jake McDermott
29f1d695ae add NotificationTemplate model 2018-02-13 17:15:26 -05:00
Jake McDermott
e0f3e4feb7 add e2e test for inventory copy 2018-02-13 17:15:20 -05:00
Jake McDermott
e9ce9621f2 implement model-based copy for inventories 2018-02-13 17:15:16 -05:00
Jake McDermott
a02eda1bea add e2e test for project copy 2018-02-13 17:15:12 -05:00
Jake McDermott
779385ddb6 implement model based copy for projects 2018-02-13 17:15:05 -05:00
Jake McDermott
e5fd483d06 implement model-based copy for job templates and workflow templates 2018-02-13 17:15:01 -05:00
Jake McDermott
8679651d4c add e2e test for template copy and delete warnings 2018-02-13 17:14:57 -05:00
Jake McDermott
4c988fbc02 add WorkflowJobTemplate model 2018-02-13 17:14:48 -05:00
Jake McDermott
c40feb52b7 add base model unit test 2018-02-13 17:14:43 -05:00
Jake McDermott
78b975b2a9 add copy to base model 2018-02-13 17:14:38 -05:00
Jake McDermott
cfba11f8d7 slight cleanup of templates list controller
lint / fix all of the indentation issues
smaller functions
use a variable for any string a user sees
2018-02-13 17:14:30 -05:00
Jake McDermott
73fa8521d0 add e2e test for job and workflow template copy 2018-02-13 17:14:18 -05:00
Jake McDermott
894f0cf2c5 update current workflow copy implementation to be compatible with recent api changes 2018-02-13 17:13:54 -05:00
Chris Church
67ec811e8d Merge pull request #1186 from cclauss/execfile-file-reduce-StandardError
Miscellaneous Python 3 changes: execfile(), file(), reduce(), StandardError
2018-02-13 15:11:24 -05:00
Chris Church
31d0e55c2a Merge pull request #1175 from cclauss/unicode-to-six-u
Change unicode() --> six.text_type() for Python 3
2018-02-13 15:11:11 -05:00
Ryan Petrello
3a0f2ce2fe Merge pull request #628 from ryanpetrello/sudo-become-adhoc
add support for new "BECOME" prompt in Ansible 2.5+ for adhoc commands
2018-02-13 14:38:30 -05:00
Ryan Petrello
613d48cdbc add support for new "BECOME" prompt in Ansible 2.5+ for adhoc commands
see: https://github.com/ansible/ansible-tower/issues/7850
2018-02-13 14:26:27 -05:00
Alan Rominger
39362aab4b Merge pull request #1204 from AlanCoding/default_omission
Omit placeholder vars with survey password defaults
2018-02-13 12:58:11 -05:00
Alan Rominger
6cb3267ebe Merge pull request #1214 from AlanCoding/fix_schedule_qs
Change schedule queryset logic to avoid server error
2018-02-13 12:54:05 -05:00
Bill Nottingham
f8c66b826a Merge pull request #1217 from wenottingham/eat-your-celery-messages
Tweak celery-related messages.
2018-02-13 11:48:21 -05:00
Bill Nottingham
7b288ef98a Tweak celery-related messages. 2018-02-13 10:52:14 -05:00
AlanCoding
58a94be428 Omit placeholder vars with survey password defaults
WFJT nodes & schedules (launch configs) will accept POST/PATCH/PUT
with variables in extra_data that have $encrypted$ for their value
if a valid survey default exists.

In this case, the variable is simply removed from the extra_data.
This is done so that it does not affect pre-existing value
substitution for $encrypted$ values from the config itself
2018-02-13 09:07:59 -05:00
AlanCoding
960845883d change schedule qs logic, avoid server error 2018-02-13 08:32:00 -05:00
Ryan Petrello
eda53eb548 Merge pull request #627 from ryanpetrello/fix-7898
enforce strings for secret password inputs on Credentials
2018-02-12 17:11:02 -05:00
Ryan Petrello
82e41b40bb enforce strings for secret password inputs on Credentials
see: https://github.com/ansible/ansible-tower/issues/7898
2018-02-12 17:03:32 -05:00
Alan Rominger
0268d575f8 Merge pull request #1193 from AlanCoding/no_sneaking_credential_in
Validation clause for WFJT node to follow credential prompt rule
2018-02-12 12:46:12 -05:00
Brian Duffy
6b5a6e9226 [bugfix-pem-validation] left a print statement in 2018-02-12 16:44:32 +00:00
Ryan Petrello
56d01cda6b Merge pull request #1205 from ryanpetrello/fix-pexpect-test
improve a bwrap test
2018-02-12 10:50:00 -05:00
Ryan Petrello
194c2dcf0b improve a bwrap test 2018-02-12 10:14:37 -05:00
Ryan Petrello
b38be89d1a Merge pull request #1203 from ryanpetrello/update-pexpect
upgrade to the latest pexpect
2018-02-12 09:49:26 -05:00
Ryan Petrello
2a168faf6a upgrade to the latest pexpect
see: https://github.com/ansible/awx/issues/417
2018-02-12 09:18:14 -05:00
Ryan Petrello
83b5377387 Merge pull request #1187 from ryanpetrello/file-your-vars-away-for-a-rainy-day
pass extra vars via file rather than via commandline
2018-02-12 08:48:19 -05:00
cclauss
2e623ad80c Change unicode() --> six.text_type() for Python 3 2018-02-11 21:09:12 +01:00
Ryan Petrello
7e42c54868 Merge pull request #1184 from cclauss/basestring-to-six.string_types
basestring to six.string_types for Python 3
2018-02-10 09:49:16 -05:00
Bill Nottingham
aa5bd9f5bf Pass extra vars via file rather than via commandline, including custom creds.
The extra vars file created lives in the playbook private runtime
directory, and will be reaped along with the rest of the directory.

Adjust assorted unit tests as necessary.
2018-02-10 09:27:24 -05:00
Wayne Witzel III
13e777f01b Rename migration files 2018-02-10 02:52:26 +00:00
Wayne Witzel III
819b318fe5 Add Org Execute 2018-02-10 02:52:26 +00:00
Wayne Witzel III
9e7bd55579 Add Notification Admin 2018-02-10 02:52:26 +00:00
Wayne Witzel III
fbece6bdde Updating and adding tests for new RBAC roles 2018-02-10 02:52:26 +00:00
Wayne Witzel III
9fdd00785f Add new RBAC role migrations 2018-02-10 02:52:26 +00:00
Wayne Witzel III
b478740f28 Add Workflow Admin 2018-02-10 02:52:25 +00:00
Wayne Witzel III
109841c350 Add Credential Admin role 2018-02-10 02:52:25 +00:00
Wayne Witzel III
6c951aa883 Add Inventory Admin role 2018-02-10 02:52:25 +00:00
Wayne Witzel III
e7e83afd00 Add Project Admin role 2018-02-10 02:52:25 +00:00
Brian Duffy
7d956a3b68 [bugfix-pem-validation] update from code review 2018-02-10 01:08:29 +00:00
AlanCoding
02ac139d5c validation clause for WFJT node to follow cred prompt rule 2018-02-09 16:17:21 -05:00
Jake McDermott
605a2c7e01 Merge pull request #1189 from jakemcdermott/fix-multivault-select
fix recent multi-vault select breakage
2018-02-09 13:47:17 -05:00
Jake McDermott
484caf29b6 fix recent multi-vault select breakage 2018-02-09 12:52:16 -05:00
Jake McDermott
b2b519e48d Merge pull request #1096 from mabashian/169-v1
UI support for prompting on job template schedules
2018-02-09 11:34:25 -05:00
Jake McDermott
e8e6f50573 Merge branch 'devel' into 169-v1 2018-02-09 11:32:40 -05:00
cclauss
260aec543e Misc Python 3 changes: execfile(), file(), reduce(), StandardError 2018-02-09 17:17:05 +01:00
Marliana Lara
7c95cd008f Merge pull request #1152 from marshmalien/feat/ui_clustering_bugs
Fix UI bugs related to UI Clustering
2018-02-09 11:13:59 -05:00
Ryan Petrello
0ff11ac026 Merge pull request #1185 from ryanpetrello/stop-it-uwsgi
fix celery pid restart issues
2018-02-09 11:07:01 -05:00
Ryan Petrello
605c5e3276 fix celery pid restart issues 2018-02-09 11:03:00 -05:00
cclauss
c371b869dc basestring to six.string_types for Python 3 2018-02-09 16:28:36 +01:00
Shane McDonald
476dbe58c5 Merge pull request #1183 from ryanpetrello/swagger
normalize dates in the Swagger output to minimize diffs
2018-02-09 10:18:19 -05:00
Ryan Petrello
3c43aaef21 normalize dates in the Swagger output to minimize diffs 2018-02-09 10:16:27 -05:00
Ryan Petrello
76d5c02e07 Merge pull request #1181 from ryanpetrello/swagger
move swagger doc metadata out of the awx repo
2018-02-09 10:09:03 -05:00
Ryan Petrello
fe02abe630 move swagger doc metadata out of the awx repo 2018-02-09 09:45:23 -05:00
Ryan Petrello
ce9cb24995 Merge pull request #1171 from cclauss/from-six-import-xrange
from six.moves import xrange for Python 3
2018-02-09 09:02:38 -05:00
Jake McDermott
6cb6c61e5c Merge pull request #1176 from jakemcdermott/stabilize-xss
use project details view to check permissions list
2018-02-08 17:32:39 -05:00
Jake McDermott
67e5d083b8 use project details view to check permissions list 2018-02-08 17:26:54 -05:00
Ryan Petrello
5932c54126 Merge pull request #1165 from ryanpetrello/remove_new_in
remove the `new_in_<version>` in API doc gen
2018-02-08 17:07:50 -05:00
cclauss
e1a8b69736 from six.moves import xrange for Python 3 2018-02-08 22:41:33 +01:00
Ryan Petrello
7472026cca remove the new_in_<version> in API doc gen
see: https://github.com/ansible/awx/issues/73
2018-02-08 16:21:22 -05:00
Jake McDermott
8475bdfdc4 Merge pull request #1170 from shanemcd/fix_standalone_docker_wait_fors
Fix wait_fors in standalone Docker installs
2018-02-08 16:08:31 -05:00
Ryan Petrello
bd2f1568fb Merge pull request #626 from ryanpetrello/release_3.2.3
fix a bug for "users should be able to change type of unused credential"
2018-02-08 15:59:22 -05:00
Alan Rominger
b3dcfc8c18 Merge pull request #903 from ansible/item_copy
Implement item copy feature
2018-02-08 15:51:16 -05:00
Ryan Petrello
72715df751 fix a bug for "users should be able to change type of unused credential"
see: https://github.com/ansible/ansible-tower/issues/7516
related: https://github.com/ansible/tower/pull/441
2018-02-08 15:44:14 -05:00
Shane McDonald
6b3ca32827 Fix wait_fors in standalone Docker installs 2018-02-08 15:08:44 -05:00
Ryan Petrello
1ccdb305e3 Merge pull request #1164 from cclauss/use-new-style-exceptions
Modernize Python 2 code to get ready for Python 3
2018-02-08 14:10:25 -05:00
Ryan Petrello
033bec693b Merge pull request #1166 from ryanpetrello/fix-system-job-stdout
properly handle STDOUT_MAX_BYTES_DISPLAY for system jobs
2018-02-08 13:55:59 -05:00
Ryan Petrello
f2c5859fde properly handle STDOUT_MAX_BYTES_DISPLAY for system jobs
see: https://github.com/ansible/ansible-tower/issues/7890
2018-02-08 11:37:05 -05:00
cclauss
e18838a4b7 Modernize Python 2 code to get ready for Python 3 2018-02-08 17:26:22 +01:00
Shane McDonald
48300da443 Merge pull request #1163 from ryanpetrello/swagger
add indention to swagger docs
2018-02-08 10:52:47 -05:00
Ryan Petrello
5b9dc41015 add indention to swagger docs
this will make it easier to spot changes as our APIs change
2018-02-08 10:51:42 -05:00
Alan Rominger
01c6463b1b Merge pull request #1162 from AlanCoding/remove_cred_sf
Remove credential from node and schedule summary fields
2018-02-08 10:37:46 -05:00
Alan Rominger
181399df7a Merge pull request #1159 from AlanCoding/reschedule_msg
Verbose error messages for failure to re-schedule
2018-02-08 10:28:11 -05:00
Ryan Petrello
9bc0a0743b Merge pull request #1161 from ryanpetrello/zone-names
update zoneinfo endpoint to be a list of dicts
2018-02-08 09:48:11 -05:00
Ryan Petrello
c1d0768e37 Merge pull request #1160 from ryanpetrello/fix-old-rrule-dtstart
add a few schedule RRULE parsing improvements
2018-02-08 09:47:59 -05:00
Marliana Lara
d743faf33e Fix UI bugs related to instance groups views
* Fix bug where capacity_adjustment sets to "1.00" when instance is toggled
* Hookup websockets for instance group jobs and instance jobs
* Add Wait spinner to Capacity_Adjuster, Instance association modal, and Instance group delete
* Add updateDataset event listener to update instance and instanceGroups list after smartSearch query
2018-02-08 09:33:24 -05:00
AlanCoding
0f66892d06 remove credential from node and schedule summary fields 2018-02-08 09:22:55 -05:00
Ryan Petrello
c866d85b8c update zoneinfo endpoint to be a list of dicts 2018-02-08 09:12:26 -05:00
Ryan Petrello
3c799b007e don't allow rrule values that contain both COUNT and UNTIL
see: https://github.com/ansible/ansible-tower/issues/7887
2018-02-08 08:59:52 -05:00
Ryan Petrello
887f16023a improve detection of expensive DTSTART RRULE values 2018-02-08 08:54:30 -05:00
AlanCoding
87b59903a5 verbose error messages for failure to re-schedule 2018-02-08 08:46:56 -05:00
Bill Nottingham
e982f6ed06 Merge pull request #1154 from wenottingham/namespaces-the-final-frontier
Have bubblewrap mount a new /proc in the wrapped environment.
2018-02-07 17:24:38 -05:00
Ryan Petrello
fb5428dd63 Merge pull request #1151 from ansible/jakemcdermott-patch-1-1
always return schema from get_default_schema
2018-02-07 16:56:48 -05:00
Alan Rominger
b38aa3dfb6 Merge pull request #1153 from AlanCoding/fix_wfjt_scheduling
fix bug scheduling WFJT without prompts
2018-02-07 15:49:13 -05:00
Bill Nottingham
c1a0e2cd16 Have bubblewrap mount a new /proc in the wrapped environment.
Since we're running with a new pid namespace, we should have
a new /proc that is in that namespace. Otherwise things will
be weird.
2018-02-07 15:47:03 -05:00
AlanCoding
fe69a23a4e fix bug scheduling WFJT without prompts 2018-02-07 14:34:25 -05:00
Jake McDermott
90f555d684 always return schema from get_default_schema 2018-02-07 13:42:01 -05:00
Matthew Jones
4002f2071d Adding instance group policy unit tests
also remove async call for applying topology change
2018-02-07 11:14:53 -05:00
Ryan Petrello
244dfa1c92 Merge pull request #1145 from ryanpetrello/swagger
fix a bad swagger-related import that breaks the build
2018-02-07 09:12:28 -05:00
Ryan Petrello
1adb4cefec fix a bad swagger-related import that breaks the build 2018-02-07 08:56:59 -05:00
Bill Nottingham
4abcbf949a Merge pull request #1142 from geerlingguy/fix-some-text
Fix grammar for tasks - replace 'state' with 'stage'.
2018-02-06 19:28:20 -05:00
Jeff Geerling
19f0b9ba92 Fix grammar for tasks - replace 'state' with 'stage'. 2018-02-06 16:57:59 -06:00
Ryan Petrello
b1c4c75360 Merge pull request #1141 from ryanpetrello/swagger
a bit of extra Swagger doc tinkering
2018-02-06 14:33:24 -05:00
Ryan Petrello
cc3659d375 fix a busted swagger import 2018-02-06 13:43:31 -05:00
Ryan Petrello
b1695fe107 add instructions for generating Swagger/OpenAPI docs 2018-02-06 13:37:33 -05:00
Jake McDermott
8cd0870253 Merge pull request #1135 from chrismeyersfsu/tests-recent_jobs_xss
xss test for per-host recent jobs popup
2018-02-06 11:51:05 -05:00
Ryan Petrello
84dc40d141 Merge pull request #1124 from ryanpetrello/swagger
add support for building swagger/OpenAPI JSON
2018-02-06 11:12:36 -05:00
Ryan Petrello
8b976031cb use VERSION_TARGET for Swagger doc generation 2018-02-06 10:48:51 -05:00
Chris Meyers
aaf87c0c04 xss test for per-host recent jobs popup 2018-02-06 10:37:00 -05:00
Ryan Petrello
7ff9f0b7d1 build example Swagger request and response bodies from our API tests 2018-02-06 10:36:25 -05:00
Ryan Petrello
527594285f more Swagger template markup 2018-02-06 10:12:58 -05:00
Ryan Petrello
07dfab648c add some tests to prove that OpenAPI JSON compilation works 2018-02-06 10:12:58 -05:00
Ryan Petrello
10974159b5 add support for marking Swagger paths deprecated 2018-02-06 10:12:58 -05:00
Ryan Petrello
ac7c5f8648 clean up API markdown docs 2018-02-06 10:12:57 -05:00
Ryan Petrello
57c22c20b2 add support for building swagger/OpenAPI JSON
to build, run `make swagger`; a file named `swagger.json` will be
written to the current working directory
2018-02-06 10:12:57 -05:00
Matthew Jones
c61efc0af8 Add information on enabled flag 2018-02-05 15:44:26 -05:00
Ryan Petrello
772fcc9149 Merge pull request #1097 from rbywater/feature/preferipv4
Add ability to select to prefer IPv4 addresses for ansible_ssh_host
2018-02-05 14:57:10 -05:00
Matthew Jones
8e94a9e599 Adding capacity docs
Updating capacity for callback jobs to include parent process impact
2018-02-05 09:49:01 -05:00
Shane McDonald
1e9b0c2786 Merge pull request #1130 from shanemcd/fix-etcd-template
Fix variable reference in k8s etcd template
2018-02-05 09:18:20 -05:00
Richard Bywater
5e5790e7d1 Use correct source_vars syntax 2018-02-05 12:45:52 +13:00
Richard Bywater
9f8b9b8d7f Fix unit test 2018-02-05 08:55:10 +13:00
Richard Bywater
6d69087db8 Add prefer_ipv4 to whitelist and add unit test for config value 2018-02-05 08:55:10 +13:00
Richard Bywater
a737663dde Add ability to select to prefer IPv4 addresses for ansible_ssh_host
Currently Cloudforms can return a mix of IPv4 and IPv6 addresses in the
ipaddresses field and this mix comes in a "random" order (that is the
first entry may be IPv4 sometimes but IPv6 other times). If you wish to
always use IPv4 for the ansible_ssh_host value then this is problematic.

This change adds a new prefer_ipv4 flag which will look for the first
IPv4 address in the ipaddresses list and uses that instead of just the
first entry.
2018-02-05 08:55:10 +13:00
Shane McDonald
dce934577b Fix variable reference in k8s etcd template 2018-02-03 10:29:53 -05:00
Jake McDermott
3d421cc595 Merge pull request #1078 from jakemcdermott/saml-ldap-updates
update configuration views for multiple LDAP servers, SAML 2FA, and SAML attribute mapping
2018-02-02 12:15:44 -05:00
Ryan Petrello
93c8cc9f8e Merge pull request #696 from jladdjr/awx_349_custom_cred_write_multiple_files
Feature: Multi-file support for Credential Types
2018-02-02 11:39:11 -05:00
Chris Meyers
1808559586 Merge pull request #1102 from chrismeyersfsu/tests-job_schedules_xss
add xss test for jobs schedules
2018-02-02 11:29:42 -05:00
Jim Ladd
d558299b1f Add test for injecting multiple files 2018-02-02 11:07:13 -05:00
Bill Nottingham
ef5b040f70 Merge pull request #1121 from jeis2497052/devel
Propose small spelling changes
2018-02-02 10:55:23 -05:00
John Eismeier
026cbeb018 Propose small spelling changes 2018-02-02 10:49:55 -05:00
Matthew Jones
6163cc6b5c Merge pull request #1058 from ansible/scalable_clustering
Implement Container Cluster-based dynamic scaling
2018-02-02 09:22:06 -05:00
Brian Duffy
68057560e5 [bugfix-pem-validation] added unit test to simulate catted data
Signed-off-by: Brian Duffy <bmduffy@gmail.com>
2018-02-02 01:20:31 +00:00
Brian Duffy
047ff7b55f [bugfix-pem-validation]
Signed-off-by: Brian Duffy <bmduffy@gmail.com>
2018-02-01 23:50:02 +00:00
Marliana Lara
d4a461e5b4 Switch Array.map() in favor of Array.forEach() 2018-02-01 16:57:10 -05:00
Marliana Lara
f9265ee329 Create an InstancePolicyList directive to replace the pre-existing
modal implementation

* Remove Instance-List-Policy controller
* Replace let with const when values aren't being reassigned
* Update CapacityAdjuster directive to use replace:true
* Assign less values that are specific to element
* Add more error handling
2018-02-01 16:57:10 -05:00
Marliana Lara
fa70d108d7 Apply UI feedback changes
* Remove input slider css mixin
* Remove unused dependencies
* Improve error handling by plugging in the ProcessErrors factory
2018-02-01 16:57:10 -05:00
Marliana Lara
e07f441e32 Add Instance enable/disable toggle to list 2018-02-01 16:57:10 -05:00
Marliana Lara
70786c53a7 Add capacity adjuster directive 2018-02-01 16:57:10 -05:00
Marliana Lara
342958ece3 Add stringToNumber directive 2018-02-01 16:57:09 -05:00
Marliana Lara
368101812c Add Instance and InstanceGroup models 2018-02-01 16:57:09 -05:00
Matthew Jones
70bf78e29f Apply capacity algorithm changes
* This also adds fields to the instance view for tracking cpu and
  memory usage as well as information on what the capacity ranges are
* Also adds a flag for enabling/disabling instances which removes them
  from all queues and has them stop processing new work
* The capacity is now based almost exclusively on some value relative
  to forks
* capacity_adjustment allows you to commit an instance to a certain
  amount of forks, cpu focused or memory focused
* Each job run adds a single fork overhead (that's the reasoning
  behind the +1)
2018-02-01 16:57:09 -05:00
Matthew Jones
6a85fc38dd Add scalable cluster kubernetes support 2018-02-01 16:57:09 -05:00
Matthew Jones
6e9930a45f Use on_commit hook for triggering ig policy
* also Apply console handlers to loggers for dev environment
2018-02-01 16:56:43 -05:00
Matthew Jones
d9e774c4b6 Updates for automatic triggering of policies
* Switch policy router queue to not be "tower" so that we don't
  fall into a chicken/egg scenario
* Show fixed policy list in serializer so a user can determine if
  an instance is manually managed
* Change IG membership mixin to not directly handle applying topology
  changes. Instead it just makes sure the policy instance list is
  accurate
* Add create/delete hooks for instances and groups to trigger policy
  re-evaluation
* Update policy algorithm for fairer distribution
* Fix an issue where CELERY_ROUTES wasn't renamed after celery/django
  upgrade
* Update unit tests to be more explicit
* Update count calculations used by algorithm to only consider
  non-manual instances
* Adding unit tests and fixture
* Don't propagate logging messages from awx.main.tasks and
  awx.main.scheduler
* Use advisory lock to prevent policy eval conflicts
* Allow updating instance groups from view
2018-02-01 16:56:16 -05:00
Matthew Jones
56abfa732e Adding initial instance group policies
and policy evaluation planner
2018-02-01 16:56:15 -05:00
Matthew Jones
c819560d39 Add automatic deprovisioning support, only enabled for openshift
* Implement a config watcher for service restarts
* If the configmap bind point changes then restart all services
2018-02-01 16:51:40 -05:00
Chris Meyers
0e97dc4b84 Beat and celery clustering fixes
* use embedded beat rather than standalone
* dynamically set celeryd hostname at runtime
* add embeded beat flag to celery startup
* Embedded beat mode routes will piggyback off of celery worker setup
signal
2018-02-01 16:47:33 -05:00
Matthew Jones
624289bed7 Add support for directly managing instance groups
* Associating/Disassociating an instance with a group
* Triggering a topology rebuild on that change
* Force rabbitmq cleanup of offline nodes
* Automatically check for dependent service startup
* Fetch and set hostname for celery so it doesn't clobber other
  celeries
* Rely on celery init signals to dyanmically set listen queues
* Removing old total_capacity instance manager property
2018-02-01 16:46:44 -05:00
Matthew Jones
6ede1dfbea Update openshift installer to support rabbitmq autoscale
* Switch rabbitmq container out for one that supports autoscale
* Add etcd pod to support autoscale negotiation
2018-02-01 16:38:10 -05:00
Chris Meyers
c9ff3e99b8 celeryd attach to queues dynamically
* Based on the tower topology (Instance and InstanceGroup
relationships), have celery dyamically listen to queues on boot
* Add celery task capable of "refreshing" what queues each celeryd
worker listens to. This will be used to support changes in the topology.
* Cleaned up some celery task definitions.
* Converged wrongly targeted job launch/finish messages to 'tower'
queue, rather than a 1-off queue.
* Dynamically route celery tasks destined for the local node
* separate beat process

add support for separate beat process
2018-02-01 16:37:33 -05:00
John Mitchell
7e400413db Merge pull request #625 from jlmitch5/fixXSS
fix xss vulnerabilities
2018-02-01 11:49:35 -05:00
Chris Meyers
290a296f9f add xss test for jobs schedules
* Test for tooltip regression on job schedules list entries
2018-02-01 10:55:13 -05:00
mabashian
e57d200d6e Implemented generic prompt modal for launching and saving launch configurations. Added UI support for prompting on job template schedules. 2018-01-31 15:40:23 -05:00
Jim Ladd
4c1dddcaf9 Respond to PR feedback 2018-01-31 11:22:01 -05:00
John Mitchell
28596b7d5e fix xss vulnerabilities
- on host recent jobs popover
- on schedule name tooltip
2018-01-30 16:30:00 -05:00
Jake McDermott
a2e274d1f9 Merge pull request #623 from jakemcdermott/fix-ansible-tower-7871
bump templates form credential_types page limit
2018-01-30 14:48:36 -05:00
Ryan Petrello
d96cc51431 Merge pull request #624 from ryanpetrello/release_3.2.3
fix a bug when testing UDP-based logging configuration
2018-01-30 10:27:39 -05:00
Jake McDermott
4cd6a6e566 add fields for saml + 2fa 2018-01-30 00:28:13 -05:00
Jake McDermott
ed138fccf6 add forms + select for additional ldap servers 2018-01-30 00:28:02 -05:00
Jake McDermott
44d223b6c9 add fields for team and organization saml attribute mappings 2018-01-30 00:27:51 -05:00
Ryan Petrello
982539f444 fix a bug when testing UDP-based logging configuration
see: https://github.com/ansible/ansible-tower/issues/7868
2018-01-29 12:05:51 -05:00
Jake McDermott
4c79e6912e bump templates form credential_types page limit 2018-01-28 21:50:30 -05:00
Jim Ladd
4b13bcdce2 Update tests for custom credentials 2018-01-28 21:02:48 -05:00
Jim Ladd
18178c83b3 Validate single and multi-file injection 2018-01-28 21:02:47 -05:00
Jim Ladd
7aa1ae69b3 Add backwards compatibility for injecting single file 2018-01-28 20:50:44 -05:00
Jim Ladd
286a70f2ca Add support for multi-file injection in custom creds 2018-01-28 20:50:43 -05:00
Matthew Jones
42098bfa6d Merge pull request #621 from ryanpetrello/set_stat_workflow_race_condition
don't process artifacts from custom `set_stat` calls asynchronously
2018-01-24 10:27:19 -05:00
Wayne Witzel III
b205630490 Merge pull request #622 from wwitzel3/release_3.2.3
Wait for Slack RTM API websocket connection to be established
2018-01-24 08:59:45 -05:00
Wayne Witzel III
aa469d730e Wait for Slack RTM API websocket connection to be established 2018-01-24 13:48:42 +00:00
Ryan Petrello
d57470ce49 don't process artifacts from custom set_stat calls asynchronously
previously, we persisted custom artifacts to the database on
`Job.artifacts` via the callback receiver.  when the callback receiver
is backed up processing events, this can result in race conditions for
workflows where a playbook calls `set_stat()`, but the artifact data is
not persisted in the database before the next job in the workflow starts

see: https://github.com/ansible/ansible-tower/issues/7831
2018-01-23 17:09:23 -05:00
Ryan Petrello
fa9c6287f7 Merge pull request #620 from ryanpetrello/fix-815
don't overwrite env['ANSIBLE_LIBRARY'] when fact caching is enabled
2018-01-15 13:55:42 -05:00
Ryan Petrello
2955842c44 don't overwrite env['ANSIBLE_LIBRARY'] when fact caching is enabled
see: https://github.com/ansible/awx/issues/815
see: https://github.com/ansible/ansible-tower/issues/7830
2018-01-15 13:39:46 -05:00
Ryan Petrello
64028dba66 Merge pull request #619 from ryanpetrello/file_based_tower_fact_cache
replace our memcached-based fact cache implementation with local files
2018-01-15 11:57:18 -05:00
Ryan Petrello
e1d50a43fd only allow facts to cache in the proper file system location 2018-01-15 11:45:49 -05:00
Ryan Petrello
983b192a45 replace our memcached-based fact cache implementation with local files
see: https://github.com/ansible/ansible-tower/issues/7840
2018-01-15 09:16:44 -05:00
Ryan Petrello
e0c04df1ee Merge pull request #618 from ryanpetrello/become_who_you_were_meant_to_be
add support for new "BECOME" prompt in Ansible 2.5+
2018-01-12 11:45:08 -05:00
Ryan Petrello
563f730268 add support for new "BECOME" prompt in Ansible 2.5+
see: https://github.com/ansible/ansible-tower/issues/7850
2018-01-12 10:40:40 -05:00
Ryan Petrello
89b9d7ac8b Merge pull request #617 from ryanpetrello/release_3.2.3
fix a bug in inventory generation for isolated nodes
2018-01-11 11:04:09 -05:00
Ryan Petrello
b8758044e0 fix a bug in inventory generation for isolated nodes
see: https://github.com/ansible/ansible-tower/issues/7849
related: https://github.com/ansible/awx/pull/551
2018-01-11 10:41:58 -05:00
Ryan Petrello
4c40791d06 properly handle unicode for isolated job buffers
from: https://docs.python.org/2/library/stringio.html#module-cStringIO
"Unlike the StringIO module, this module is not able to accept Unicode
strings that cannot be encoded as plain ASCII strings."

see: https://github.com/ansible/ansible-tower/issues/7846
2018-01-10 10:56:59 -05:00
Aaron Tan
a2fd78add4 Implement item copy feature
See acceptance doc for implement details.

Signed-off-by: Aaron Tan <jangsutsr@gmail.com>
2018-01-02 10:20:44 -05:00
355 changed files with 10924 additions and 3190 deletions

View File

@@ -1,31 +0,0 @@
sudo: false
language: python
python:
- '2.7'
env:
- TOXENV=api-lint
- TOXENV=api
- TOXENV=ui-lint
- TOXENV=ui
install:
- pip install tox
script:
- tox
# after_success:
# - TOXENV=coveralls tox
addons:
apt:
packages:
- swig
- libxmlsec1-dev
- postgresql-9.5
- libssl-dev
cache:
pip: true
directories:
- node_modules
- .tox
services:
- mongodb
# Enable when we stop using sqlite for API tests
# - postgresql

View File

@@ -24,6 +24,7 @@ Have questions about this document or anything not covered here? Come chat with
* [Start a shell](#start-the-shell)
* [Create a superuser](#create-a-superuser)
* [Load the data](#load-the-data)
* [Building API Documentation](#build-documentation)
* [Accessing the AWX web interface](#accessing-the-awx-web-interface)
* [Purging containers and images](#purging-containers-and-images)
* [What should I work on?](#what-should-i-work-on)
@@ -261,6 +262,20 @@ You can optionally load some demo data. This will create a demo project, invento
> This information will persist in the database running in the `tools_postgres_1` container, until the container is removed. You may periodically need to recreate
this container, and thus the database, if the database schema changes in an upstream commit.
##### Building API Documentation
AWX includes support for building [Swagger/OpenAPI
documentation](https://swagger.io). To build the documentation locally, run:
```bash
(container)/awx_devel$ make swagger
```
This will write a file named `swagger.json` that contains the API specification
in OpenAPI format. A variety of online tools are available for translating
this data into more consumable formats (such as HTML). http://editor.swagger.io
is an example of one such service.
### Accessing the AWX web interface
You can now log into the AWX web interface at [https://localhost:8043](https://localhost:8043), and access the API directly at [https://localhost:8043/api/](https://localhost:8043/api/).

View File

@@ -82,9 +82,9 @@ We currently support running AWX as a containerized application using Docker ima
The [installer](./installer) directory contains an [inventory](./installer/inventory) file, and a playbook, [install.yml](./installer/install.yml). You'll begin by setting variables in the inventory file according to the platform you wish to use, and then you'll start the image build and deployment process by running the playbook.
In the sections below, you'll find deployment details and instructions for each platform:
- [Docker and Docker Compose](#docker-and-docker-compose)
- [OpenShift](#openshift)
- [Kubernetes](#kubernetes).
- [Kubernetes](#kubernetes)
- [Docker or Docker Compose](#docker-or-docker-compose).
### Official vs Building Images

View File

@@ -23,7 +23,7 @@ COMPOSE_HOST ?= $(shell hostname)
VENV_BASE ?= /venv
SCL_PREFIX ?=
CELERY_SCHEDULE_FILE ?= /celerybeat-schedule
CELERY_SCHEDULE_FILE ?= /var/lib/awx/beat.db
DEV_DOCKER_TAG_BASE ?= gcr.io/ansible-tower-engineering
# Python packages to install only from source (not from binary wheels)
@@ -216,13 +216,11 @@ init:
. $(VENV_BASE)/awx/bin/activate; \
fi; \
$(MANAGEMENT_COMMAND) provision_instance --hostname=$(COMPOSE_HOST); \
$(MANAGEMENT_COMMAND) register_queue --queuename=tower --hostnames=$(COMPOSE_HOST);\
$(MANAGEMENT_COMMAND) register_queue --queuename=tower --instance_percent=100;\
if [ "$(AWX_GROUP_QUEUES)" == "tower,thepentagon" ]; then \
$(MANAGEMENT_COMMAND) provision_instance --hostname=isolated; \
$(MANAGEMENT_COMMAND) register_queue --queuename='thepentagon' --hostnames=isolated --controller=tower; \
$(MANAGEMENT_COMMAND) generate_isolated_key | ssh -o "StrictHostKeyChecking no" root@isolated 'cat > /root/.ssh/authorized_keys'; \
elif [ "$(AWX_GROUP_QUEUES)" != "tower" ]; then \
$(MANAGEMENT_COMMAND) register_queue --queuename=$(firstword $(subst $(comma), ,$(AWX_GROUP_QUEUES))) --hostnames=$(COMPOSE_HOST); \
fi;
# Refresh development environment after pulling new code.
@@ -299,7 +297,7 @@ uwsgi: collectstatic
@if [ "$(VENV_BASE)" ]; then \
. $(VENV_BASE)/awx/bin/activate; \
fi; \
uwsgi -b 32768 --socket 127.0.0.1:8050 --module=awx.wsgi:application --home=/venv/awx --chdir=/awx_devel/ --vacuum --processes=5 --harakiri=120 --master --no-orphans --py-autoreload 1 --max-requests=1000 --stats /tmp/stats.socket --master-fifo=/awxfifo --lazy-apps --logformat "%(addr) %(method) %(uri) - %(proto) %(status)" --hook-accepting1-once="exec:/bin/sh -c '[ -f /tmp/celery_pid ] && kill -1 `cat /tmp/celery_pid`'"
uwsgi -b 32768 --socket 127.0.0.1:8050 --module=awx.wsgi:application --home=/venv/awx --chdir=/awx_devel/ --vacuum --processes=5 --harakiri=120 --master --no-orphans --py-autoreload 1 --max-requests=1000 --stats /tmp/stats.socket --master-fifo=/awxfifo --lazy-apps --logformat "%(addr) %(method) %(uri) - %(proto) %(status)" --hook-accepting1-once="exec:/bin/sh -c '[ -f /tmp/celery_pid ] && kill -1 `cat /tmp/celery_pid` || true'"
daphne:
@if [ "$(VENV_BASE)" ]; then \
@@ -326,7 +324,7 @@ celeryd:
@if [ "$(VENV_BASE)" ]; then \
. $(VENV_BASE)/awx/bin/activate; \
fi; \
celery worker -A awx -l DEBUG -B -Ofair --autoscale=100,4 --schedule=$(CELERY_SCHEDULE_FILE) -Q tower_scheduler,tower_broadcast_all,$(COMPOSE_HOST),$(AWX_GROUP_QUEUES) -n celery@$(COMPOSE_HOST) --pidfile /tmp/celery_pid
celery worker -A awx -l DEBUG -B -Ofair --autoscale=100,4 --schedule=$(CELERY_SCHEDULE_FILE) -Q tower_broadcast_all -n celery@$(COMPOSE_HOST) --pidfile /tmp/celery_pid
# Run to start the zeromq callback receiver
receiver:
@@ -365,6 +363,12 @@ pyflakes: reports
pylint: reports
@(set -o pipefail && $@ | reports/$@.report)
swagger: reports
@if [ "$(VENV_BASE)" ]; then \
. $(VENV_BASE)/awx/bin/activate; \
fi; \
(set -o pipefail && py.test awx/conf/tests/functional awx/main/tests/functional/api awx/main/tests/docs --release=$(VERSION_TARGET) | tee reports/$@.report)
check: flake8 pep8 # pyflakes pylint
TEST_DIRS ?= awx/main/tests/unit awx/main/tests/functional awx/conf/tests awx/sso/tests

View File

@@ -5,6 +5,7 @@
import inspect
import logging
import time
import six
# Django
from django.conf import settings
@@ -26,6 +27,10 @@ from rest_framework import generics
from rest_framework.response import Response
from rest_framework import status
from rest_framework import views
from rest_framework.permissions import AllowAny
# cryptography
from cryptography.fernet import InvalidToken
# AWX
from awx.api.filters import FieldLookupBackend
@@ -33,9 +38,9 @@ from awx.main.models import * # noqa
from awx.main.access import access_registry
from awx.main.utils import * # noqa
from awx.main.utils.db import get_all_field_names
from awx.api.serializers import ResourceAccessListElementSerializer
from awx.api.serializers import ResourceAccessListElementSerializer, CopySerializer
from awx.api.versioning import URLPathVersioning, get_request_version
from awx.api.metadata import SublistAttachDetatchMetadata
from awx.api.metadata import SublistAttachDetatchMetadata, Metadata
__all__ = ['APIView', 'GenericAPIView', 'ListAPIView', 'SimpleListAPIView',
'ListCreateAPIView', 'SubListAPIView', 'SubListCreateAPIView',
@@ -47,7 +52,8 @@ __all__ = ['APIView', 'GenericAPIView', 'ListAPIView', 'SimpleListAPIView',
'ResourceAccessList',
'ParentMixin',
'DeleteLastUnattachLabelMixin',
'SubListAttachDetachAPIView',]
'SubListAttachDetachAPIView',
'CopyAPIView']
logger = logging.getLogger('awx.api.generics')
analytics_logger = logging.getLogger('awx.analytics.performance')
@@ -91,8 +97,17 @@ def get_view_description(cls, request, html=False):
return mark_safe(desc)
def get_default_schema():
if settings.SETTINGS_MODULE == 'awx.settings.development':
from awx.api.swagger import AutoSchema
return AutoSchema()
else:
return views.APIView.schema
class APIView(views.APIView):
schema = get_default_schema()
versioning_class = URLPathVersioning
def initialize_request(self, request, *args, **kwargs):
@@ -176,27 +191,14 @@ class APIView(views.APIView):
and in the browsable API.
"""
func = self.settings.VIEW_DESCRIPTION_FUNCTION
return func(self.__class__, self._request, html)
return func(self.__class__, getattr(self, '_request', None), html)
def get_description_context(self):
return {
'view': self,
'docstring': type(self).__doc__ or '',
'new_in_13': getattr(self, 'new_in_13', False),
'new_in_14': getattr(self, 'new_in_14', False),
'new_in_145': getattr(self, 'new_in_145', False),
'new_in_148': getattr(self, 'new_in_148', False),
'new_in_200': getattr(self, 'new_in_200', False),
'new_in_210': getattr(self, 'new_in_210', False),
'new_in_220': getattr(self, 'new_in_220', False),
'new_in_230': getattr(self, 'new_in_230', False),
'new_in_240': getattr(self, 'new_in_240', False),
'new_in_300': getattr(self, 'new_in_300', False),
'new_in_310': getattr(self, 'new_in_310', False),
'new_in_320': getattr(self, 'new_in_320', False),
'new_in_330': getattr(self, 'new_in_330', False),
'new_in_api_v2': getattr(self, 'new_in_api_v2', False),
'deprecated': getattr(self, 'deprecated', False),
'swagger_method': getattr(self.request, 'swagger_method', None),
}
def get_description(self, request, html=False):
@@ -214,7 +216,7 @@ class APIView(views.APIView):
context['deprecated'] = True
description = render_to_string(template_list, context)
if context.get('deprecated'):
if context.get('deprecated') and context.get('swagger_method') is None:
# render deprecation messages at the very top
description = '\n'.join([render_to_string('api/_deprecated.md', context), description])
return description
@@ -747,3 +749,152 @@ class ResourceAccessList(ParentMixin, ListAPIView):
for r in roles:
ancestors.update(set(r.ancestors.all()))
return User.objects.filter(roles__in=list(ancestors)).distinct()
def trigger_delayed_deep_copy(*args, **kwargs):
from awx.main.tasks import deep_copy_model_obj
connection.on_commit(lambda: deep_copy_model_obj.delay(*args, **kwargs))
class CopyAPIView(GenericAPIView):
serializer_class = CopySerializer
permission_classes = (AllowAny,)
copy_return_serializer_class = None
new_in_330 = True
new_in_api_v2 = True
def _get_copy_return_serializer(self, *args, **kwargs):
if not self.copy_return_serializer_class:
return self.get_serializer(*args, **kwargs)
serializer_class_store = self.serializer_class
self.serializer_class = self.copy_return_serializer_class
ret = self.get_serializer(*args, **kwargs)
self.serializer_class = serializer_class_store
return ret
@staticmethod
def _decrypt_model_field_if_needed(obj, field_name, field_val):
if field_name in getattr(type(obj), 'REENCRYPTION_BLACKLIST_AT_COPY', []):
return field_val
if isinstance(field_val, dict):
for sub_field in field_val:
if isinstance(sub_field, six.string_types) \
and isinstance(field_val[sub_field], six.string_types):
try:
field_val[sub_field] = decrypt_field(obj, field_name, sub_field)
except InvalidToken:
# Catching the corner case with v1 credential fields
field_val[sub_field] = decrypt_field(obj, sub_field)
elif isinstance(field_val, six.string_types):
field_val = decrypt_field(obj, field_name)
return field_val
def _build_create_dict(self, obj):
ret = {}
if self.copy_return_serializer_class:
all_fields = Metadata().get_serializer_info(
self._get_copy_return_serializer(), method='POST'
)
for field_name, field_info in all_fields.items():
if not hasattr(obj, field_name) or field_info.get('read_only', True):
continue
ret[field_name] = CopyAPIView._decrypt_model_field_if_needed(
obj, field_name, getattr(obj, field_name)
)
return ret
@staticmethod
def copy_model_obj(old_parent, new_parent, model, obj, creater, copy_name='', create_kwargs=None):
fields_to_preserve = set(getattr(model, 'FIELDS_TO_PRESERVE_AT_COPY', []))
fields_to_discard = set(getattr(model, 'FIELDS_TO_DISCARD_AT_COPY', []))
m2m_to_preserve = {}
o2m_to_preserve = {}
create_kwargs = create_kwargs or {}
for field_name in fields_to_discard:
create_kwargs.pop(field_name, None)
for field in model._meta.get_fields():
try:
field_val = getattr(obj, field.name)
except AttributeError:
continue
# Adjust copy blacklist fields here.
if field.name in fields_to_discard or field.name in [
'id', 'pk', 'polymorphic_ctype', 'unifiedjobtemplate_ptr', 'created_by', 'modified_by'
] or field.name.endswith('_role'):
create_kwargs.pop(field.name, None)
continue
if field.one_to_many:
if field.name in fields_to_preserve:
o2m_to_preserve[field.name] = field_val
elif field.many_to_many:
if field.name in fields_to_preserve and not old_parent:
m2m_to_preserve[field.name] = field_val
elif field.many_to_one and not field_val:
create_kwargs.pop(field.name, None)
elif field.many_to_one and field_val == old_parent:
create_kwargs[field.name] = new_parent
elif field.name == 'name' and not old_parent:
create_kwargs[field.name] = copy_name or field_val + ' copy'
elif field.name in fields_to_preserve:
create_kwargs[field.name] = CopyAPIView._decrypt_model_field_if_needed(
obj, field.name, field_val
)
new_obj = model.objects.create(**create_kwargs)
# Need to save separatedly because Djang-crum get_current_user would
# not work properly in non-request-response-cycle context.
new_obj.created_by = creater
new_obj.save()
for m2m in m2m_to_preserve:
for related_obj in m2m_to_preserve[m2m].all():
getattr(new_obj, m2m).add(related_obj)
if not old_parent:
sub_objects = []
for o2m in o2m_to_preserve:
for sub_obj in o2m_to_preserve[o2m].all():
sub_model = type(sub_obj)
sub_objects.append((sub_model.__module__, sub_model.__name__, sub_obj.pk))
return new_obj, sub_objects
ret = {obj: new_obj}
for o2m in o2m_to_preserve:
for sub_obj in o2m_to_preserve[o2m].all():
ret.update(CopyAPIView.copy_model_obj(obj, new_obj, type(sub_obj), sub_obj, creater))
return ret
def get(self, request, *args, **kwargs):
obj = self.get_object()
create_kwargs = self._build_create_dict(obj)
for key in create_kwargs:
create_kwargs[key] = getattr(create_kwargs[key], 'pk', None) or create_kwargs[key]
return Response({'can_copy': request.user.can_access(self.model, 'add', create_kwargs)})
def post(self, request, *args, **kwargs):
obj = self.get_object()
create_kwargs = self._build_create_dict(obj)
create_kwargs_check = {}
for key in create_kwargs:
create_kwargs_check[key] = getattr(create_kwargs[key], 'pk', None) or create_kwargs[key]
if not request.user.can_access(self.model, 'add', create_kwargs_check):
raise PermissionDenied()
serializer = self.get_serializer(data=request.data)
if not serializer.is_valid():
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
new_obj, sub_objs = CopyAPIView.copy_model_obj(
None, None, self.model, obj, request.user, create_kwargs=create_kwargs,
copy_name=serializer.validated_data.get('name', '')
)
if hasattr(new_obj, 'admin_role') and request.user not in new_obj.admin_role:
new_obj.admin_role.members.add(request.user)
if sub_objs:
permission_check_func = None
if hasattr(type(self), 'deep_copy_permission_check_func'):
permission_check_func = (
type(self).__module__, type(self).__name__, 'deep_copy_permission_check_func'
)
trigger_delayed_deep_copy(
self.model.__module__, self.model.__name__,
obj.pk, new_obj.pk, request.user.pk, sub_objs,
permission_check_func=permission_check_func
)
serializer = self._get_copy_return_serializer(new_obj)
return Response(serializer.data, status=status.HTTP_201_CREATED)

View File

@@ -190,23 +190,6 @@ class Metadata(metadata.SimpleMetadata):
finally:
delattr(view, '_request')
# Add version number in which view was added to Tower.
added_in_version = '1.2'
for version in ('3.2.0', '3.1.0', '3.0.0', '2.4.0', '2.3.0', '2.2.0',
'2.1.0', '2.0.0', '1.4.8', '1.4.5', '1.4', '1.3'):
if getattr(view, 'new_in_%s' % version.replace('.', ''), False):
added_in_version = version
break
metadata['added_in_version'] = added_in_version
# Add API version number in which view was added to Tower.
added_in_api_version = 'v1'
for version in ('v2',):
if getattr(view, 'new_in_api_%s' % version, False):
added_in_api_version = version
break
metadata['added_in_api_version'] = added_in_api_version
# Add type(s) handled by this view/serializer.
if hasattr(view, 'get_serializer'):
serializer = view.get_serializer()

View File

@@ -33,7 +33,7 @@ class OrderedDictLoader(yaml.SafeLoader):
key = self.construct_object(key_node, deep=deep)
try:
hash(key)
except TypeError, exc:
except TypeError as exc:
raise yaml.constructor.ConstructorError(
"while constructing a mapping", node.start_mark,
"found unacceptable key (%s)" % exc, key_node.start_mark

View File

@@ -5,6 +5,8 @@
from rest_framework import renderers
from rest_framework.request import override_method
import six
class BrowsableAPIRenderer(renderers.BrowsableAPIRenderer):
'''
@@ -69,8 +71,8 @@ class PlainTextRenderer(renderers.BaseRenderer):
format = 'txt'
def render(self, data, media_type=None, renderer_context=None):
if not isinstance(data, basestring):
data = unicode(data)
if not isinstance(data, six.string_types):
data = six.text_type(data)
return data.encode(self.charset)

View File

@@ -130,6 +130,22 @@ def reverse_gfk(content_object, request):
}
class CopySerializer(serializers.Serializer):
name = serializers.CharField()
def validate(self, attrs):
name = attrs.get('name')
view = self.context.get('view', None)
obj = view.get_object()
if name == obj.name:
raise serializers.ValidationError(_(
'The original object is already named {}, a copy from'
' it cannot have the same name.'.format(name)
))
return attrs
class BaseSerializerMetaclass(serializers.SerializerMetaclass):
'''
Custom metaclass to enable attribute inheritance from Meta objects on
@@ -1003,6 +1019,7 @@ class ProjectSerializer(UnifiedJobTemplateSerializer, ProjectOptionsSerializer):
notification_templates_error = self.reverse('api:project_notification_templates_error_list', kwargs={'pk': obj.pk}),
access_list = self.reverse('api:project_access_list', kwargs={'pk': obj.pk}),
object_roles = self.reverse('api:project_object_roles_list', kwargs={'pk': obj.pk}),
copy = self.reverse('api:project_copy', kwargs={'pk': obj.pk}),
))
if obj.organization:
res['organization'] = self.reverse('api:organization_detail',
@@ -1156,6 +1173,7 @@ class InventorySerializer(BaseSerializerWithVariables):
access_list = self.reverse('api:inventory_access_list', kwargs={'pk': obj.pk}),
object_roles = self.reverse('api:inventory_object_roles_list', kwargs={'pk': obj.pk}),
instance_groups = self.reverse('api:inventory_instance_groups_list', kwargs={'pk': obj.pk}),
copy = self.reverse('api:inventory_copy', kwargs={'pk': obj.pk}),
))
if obj.insights_credential:
res['insights_credential'] = self.reverse('api:credential_detail', kwargs={'pk': obj.insights_credential.pk})
@@ -1173,7 +1191,7 @@ class InventorySerializer(BaseSerializerWithVariables):
if host_filter:
try:
SmartFilter().query_from_string(host_filter)
except RuntimeError, e:
except RuntimeError as e:
raise models.base.ValidationError(e)
return host_filter
@@ -1513,6 +1531,7 @@ class CustomInventoryScriptSerializer(BaseSerializer):
res = super(CustomInventoryScriptSerializer, self).get_related(obj)
res.update(dict(
object_roles = self.reverse('api:inventory_script_object_roles_list', kwargs={'pk': obj.pk}),
copy = self.reverse('api:inventory_script_copy', kwargs={'pk': obj.pk}),
))
if obj.organization:
@@ -2070,6 +2089,7 @@ class CredentialSerializer(BaseSerializer):
object_roles = self.reverse('api:credential_object_roles_list', kwargs={'pk': obj.pk}),
owner_users = self.reverse('api:credential_owner_users_list', kwargs={'pk': obj.pk}),
owner_teams = self.reverse('api:credential_owner_teams_list', kwargs={'pk': obj.pk}),
copy = self.reverse('api:credential_copy', kwargs={'pk': obj.pk}),
))
# TODO: remove when API v1 is removed
@@ -2176,6 +2196,7 @@ class CredentialSerializer(BaseSerializer):
_('You cannot change the credential type of the credential, as it may break the functionality'
' of the resources using it.'),
)
return credential_type
@@ -2547,6 +2568,7 @@ class JobTemplateSerializer(JobTemplateMixin, UnifiedJobTemplateSerializer, JobO
labels = self.reverse('api:job_template_label_list', kwargs={'pk': obj.pk}),
object_roles = self.reverse('api:job_template_object_roles_list', kwargs={'pk': obj.pk}),
instance_groups = self.reverse('api:job_template_instance_groups_list', kwargs={'pk': obj.pk}),
copy = self.reverse('api:job_template_copy', kwargs={'pk': obj.pk}),
))
if obj.host_config_key:
res['callback'] = self.reverse('api:job_template_callback', kwargs={'pk': obj.pk})
@@ -2790,10 +2812,6 @@ class JobRelaunchSerializer(BaseSerializer):
def validate(self, attrs):
obj = self.context.get('obj')
if not obj.credential and not obj.vault_credential:
raise serializers.ValidationError(
dict(credential=[_("Neither credential nor vault credential provided.")])
)
if obj.project is None:
raise serializers.ValidationError(dict(errors=[_("Job Template Project is missing or undefined.")]))
if obj.inventory is None or obj.inventory.pending_deletion:
@@ -2968,7 +2986,14 @@ class SystemJobSerializer(UnifiedJobSerializer):
return res
def get_result_stdout(self, obj):
return obj.result_stdout
try:
return obj.result_stdout
except StdoutMaxBytesExceeded as e:
return _(
"Standard Output too large to display ({text_size} bytes), "
"only download supported for sizes over {supported_size} bytes").format(
text_size=e.total, supported_size=e.supported
)
class SystemJobCancelSerializer(SystemJobSerializer):
@@ -3107,6 +3132,12 @@ class LaunchConfigurationBaseSerializer(BaseSerializer):
ret['extra_data'] = obj.display_extra_data()
return ret
def get_summary_fields(self, obj):
summary_fields = super(LaunchConfigurationBaseSerializer, self).get_summary_fields(obj)
# Credential would be an empty dictionary in this case
summary_fields.pop('credential', None)
return summary_fields
def validate(self, attrs):
attrs = super(LaunchConfigurationBaseSerializer, self).validate(attrs)
@@ -3116,19 +3147,27 @@ class LaunchConfigurationBaseSerializer(BaseSerializer):
elif self.instance:
ujt = self.instance.unified_job_template
# Insert survey_passwords to track redacted variables
# Replace $encrypted$ submissions with db value if exists
# build additional field survey_passwords to track redacted variables
if 'extra_data' in attrs:
extra_data = parse_yaml_or_json(attrs.get('extra_data', {}))
if hasattr(ujt, 'survey_password_variables'):
# Prepare additional field survey_passwords for save
password_dict = {}
for key in ujt.survey_password_variables():
if key in extra_data:
password_dict[key] = REPLACE_STR
if not self.instance or password_dict != self.instance.survey_passwords:
attrs['survey_passwords'] = password_dict
attrs['survey_passwords'] = password_dict.copy()
# Force dict type (cannot preserve YAML formatting if passwords are involved)
if not isinstance(attrs['extra_data'], dict):
attrs['extra_data'] = parse_yaml_or_json(attrs['extra_data'])
# Encrypt the extra_data for save, only current password vars in JT survey
encrypt_dict(attrs['extra_data'], password_dict.keys())
# For any raw $encrypted$ string, either
# - replace with existing DB value
# - raise a validation error
# - remove key from extra_data if survey default is present
if self.instance:
db_extra_data = parse_yaml_or_json(self.instance.extra_data)
else:
@@ -3136,8 +3175,13 @@ class LaunchConfigurationBaseSerializer(BaseSerializer):
for key in password_dict.keys():
if attrs['extra_data'].get(key, None) == REPLACE_STR:
if key not in db_extra_data:
raise serializers.ValidationError(
_('Provided variable {} has no database value to replace with.').format(key))
element = ujt.pivot_spec(ujt.survey_spec)[key]
if 'default' in element and element['default']:
attrs['survey_passwords'].pop(key, None)
attrs['extra_data'].pop(key, None)
else:
raise serializers.ValidationError(
{"extra_data": _('Provided variable {} has no database value to replace with.').format(key)})
else:
attrs['extra_data'][key] = db_extra_data[key]
@@ -3220,6 +3264,9 @@ class WorkflowJobTemplateNodeSerializer(LaunchConfigurationBaseSerializer):
cred = deprecated_fields['credential']
attrs['credential'] = cred
if cred is not None:
if not ujt_obj.ask_credential_on_launch:
raise serializers.ValidationError({"credential": _(
"Related template is not configured to accept credentials on launch.")})
cred = Credential.objects.get(pk=cred)
view = self.context.get('view', None)
if (not view) or (not view.request) or (view.request.user not in cred.use_role):
@@ -3684,15 +3731,30 @@ class JobLaunchSerializer(BaseSerializer):
distinct_cred_kinds = []
for cred in accepted.get('credentials', []):
if cred.unique_hash() in distinct_cred_kinds:
errors['credentials'] = _('Cannot assign multiple %s credentials.' % cred.credential_type.name)
errors.setdefault('credentials', []).append(_(
'Cannot assign multiple {} credentials.'
).format(cred.unique_hash(display=True)))
distinct_cred_kinds.append(cred.unique_hash())
# Prohibit removing credentials from the JT list (unsupported for now)
template_credentials = template.credentials.all()
if 'credentials' in attrs:
removed_creds = set(template_credentials) - set(attrs['credentials'])
provided_mapping = Credential.unique_dict(attrs['credentials'])
for cred in removed_creds:
if cred.unique_hash() in provided_mapping.keys():
continue # User replaced credential with new of same type
errors.setdefault('credentials', []).append(_(
'Removing {} credential at launch time without replacement is not supported. '
'Provided list lacked credential(s): {}.'
).format(cred.unique_hash(display=True), ', '.join([str(c) for c in removed_creds])))
# verify that credentials (either provided or existing) don't
# require launch-time passwords that have not been provided
if 'credentials' in accepted:
launch_credentials = accepted['credentials']
else:
launch_credentials = template.credentials.all()
launch_credentials = template_credentials
passwords = attrs.get('credential_passwords', {}) # get from original attrs
passwords_lacking = []
for cred in launch_credentials:
@@ -3782,6 +3844,7 @@ class NotificationTemplateSerializer(BaseSerializer):
res.update(dict(
test = self.reverse('api:notification_template_test', kwargs={'pk': obj.pk}),
notifications = self.reverse('api:notification_template_notification_list', kwargs={'pk': obj.pk}),
copy = self.reverse('api:notification_template_copy', kwargs={'pk': obj.pk}),
))
if obj.organization:
res['organization'] = self.reverse('api:organization_detail', kwargs={'pk': obj.organization.pk})
@@ -3887,6 +3950,7 @@ class SchedulePreviewSerializer(BaseSerializer):
# - BYYEARDAY
# - BYWEEKNO
# - Multiple DTSTART or RRULE elements
# - Can't contain both COUNT and UNTIL
# - COUNT > 999
def validate_rrule(self, value):
rrule_value = value
@@ -3921,6 +3985,8 @@ class SchedulePreviewSerializer(BaseSerializer):
raise serializers.ValidationError(_("BYYEARDAY not supported."))
if 'byweekno' in rrule_value.lower():
raise serializers.ValidationError(_("BYWEEKNO not supported."))
if 'COUNT' in rrule_value and 'UNTIL' in rrule_value:
raise serializers.ValidationError(_("RRULE may not contain both COUNT and UNTIL"))
if match_count:
count_val = match_count.groups()[0].strip().split("=")
if int(count_val[1]) > 999:
@@ -3977,8 +4043,10 @@ class InstanceSerializer(BaseSerializer):
class Meta:
model = Instance
fields = ("id", "type", "url", "related", "uuid", "hostname", "created", "modified",
"version", "capacity", "consumed_capacity", "percent_capacity_remaining", "jobs_running")
read_only_fields = ('uuid', 'hostname', 'version')
fields = ("id", "type", "url", "related", "uuid", "hostname", "created", "modified", 'capacity_adjustment',
"version", "capacity", "consumed_capacity", "percent_capacity_remaining", "jobs_running",
"cpu", "memory", "cpu_capacity", "mem_capacity", "enabled")
def get_related(self, obj):
res = super(InstanceSerializer, self).get_related(obj)
@@ -4011,7 +4079,8 @@ class InstanceGroupSerializer(BaseSerializer):
model = InstanceGroup
fields = ("id", "type", "url", "related", "name", "created", "modified",
"capacity", "committed_capacity", "consumed_capacity",
"percent_capacity_remaining", "jobs_running", "instances", "controller")
"percent_capacity_remaining", "jobs_running", "instances", "controller",
"policy_instance_percentage", "policy_instance_minimum", "policy_instance_list")
def get_related(self, obj):
res = super(InstanceGroupSerializer, self).get_related(obj)

103
awx/api/swagger.py Normal file
View File

@@ -0,0 +1,103 @@
import json
import warnings
from coreapi.document import Object, Link
from rest_framework import exceptions
from rest_framework.permissions import AllowAny
from rest_framework.renderers import CoreJSONRenderer
from rest_framework.response import Response
from rest_framework.schemas import SchemaGenerator, AutoSchema as DRFAuthSchema
from rest_framework.views import APIView
from rest_framework_swagger import renderers
class AutoSchema(DRFAuthSchema):
def get_link(self, path, method, base_url):
link = super(AutoSchema, self).get_link(path, method, base_url)
try:
serializer = self.view.get_serializer()
except Exception:
serializer = None
warnings.warn('{}.get_serializer() raised an exception during '
'schema generation. Serializer fields will not be '
'generated for {} {}.'
.format(self.view.__class__.__name__, method, path))
link.__dict__['deprecated'] = getattr(self.view, 'deprecated', False)
# auto-generate a topic/tag for the serializer based on its model
if hasattr(self.view, 'swagger_topic'):
link.__dict__['topic'] = str(self.view.swagger_topic).title()
elif serializer and hasattr(serializer, 'Meta'):
link.__dict__['topic'] = str(
serializer.Meta.model._meta.verbose_name_plural
).title()
elif hasattr(self.view, 'model'):
link.__dict__['topic'] = str(self.view.model._meta.verbose_name_plural).title()
else:
warnings.warn('Could not determine a Swagger tag for path {}'.format(path))
return link
def get_description(self, path, method):
self.view._request = self.view.request
setattr(self.view.request, 'swagger_method', method)
description = super(AutoSchema, self).get_description(path, method)
return description
class SwaggerSchemaView(APIView):
_ignore_model_permissions = True
exclude_from_schema = True
permission_classes = [AllowAny]
renderer_classes = [
CoreJSONRenderer,
renderers.OpenAPIRenderer,
renderers.SwaggerUIRenderer
]
def get(self, request):
generator = SchemaGenerator(
title='Ansible Tower API',
patterns=None,
urlconf=None
)
schema = generator.get_schema(request=request)
# python core-api doesn't support the deprecation yet, so track it
# ourselves and return it in a response header
_deprecated = []
# By default, DRF OpenAPI serialization places all endpoints in
# a single node based on their root path (/api). Instead, we want to
# group them by topic/tag so that they're categorized in the rendered
# output
document = schema._data.pop('api')
for path, node in document.items():
if isinstance(node, Object):
for action in node.values():
topic = getattr(action, 'topic', None)
if topic:
schema._data.setdefault(topic, Object())
schema._data[topic]._data[path] = node
if isinstance(action, Object):
for link in action.links.values():
if link.deprecated:
_deprecated.append(link.url)
elif isinstance(node, Link):
topic = getattr(node, 'topic', None)
if topic:
schema._data.setdefault(topic, Object())
schema._data[topic]._data[path] = node
if not schema:
raise exceptions.ValidationError(
'The schema generator did not return a schema Document'
)
return Response(
schema,
headers={'X-Deprecated-Paths': json.dumps(_deprecated)}
)

View File

@@ -1,14 +0,0 @@
{% if not version_label_flag or version_label_flag == 'true' %}
{% if new_in_13 %}> _Added in AWX 1.3_{% endif %}
{% if new_in_14 %}> _Added in AWX 1.4_{% endif %}
{% if new_in_145 %}> _Added in Ansible Tower 1.4.5_{% endif %}
{% if new_in_148 %}> _Added in Ansible Tower 1.4.8_{% endif %}
{% if new_in_200 %}> _Added in Ansible Tower 2.0.0_{% endif %}
{% if new_in_220 %}> _Added in Ansible Tower 2.2.0_{% endif %}
{% if new_in_230 %}> _Added in Ansible Tower 2.3.0_{% endif %}
{% if new_in_240 %}> _Added in Ansible Tower 2.4.0_{% endif %}
{% if new_in_300 %}> _Added in Ansible Tower 3.0.0_{% endif %}
{% if new_in_310 %}> _New in Ansible Tower 3.1.0_{% endif %}
{% if new_in_320 %}> _New in Ansible Tower 3.2.0_{% endif %}
{% if new_in_330 %}> _New in Ansible Tower 3.3.0_{% endif %}
{% endif %}

View File

@@ -0,0 +1,3 @@
Relaunch an Ad Hoc Command:
Make a POST request to this resource to launch a job. If any passwords or variables are required then they should be passed in via POST data. In order to determine what values are required in order to launch a job based on this job template you may make a GET request to this endpoint.

View File

@@ -1,4 +1,5 @@
Site configuration settings and general information.
{% ifmeth GET %}
# Site configuration settings and general information
Make a GET request to this resource to retrieve the configuration containing
the following fields (some fields may not be visible to all users):
@@ -11,6 +12,10 @@ the following fields (some fields may not be visible to all users):
* `license_info`: Information about the current license.
* `version`: Version of Ansible Tower package installed.
* `eula`: The current End-User License Agreement
{% endifmeth %}
{% ifmeth POST %}
# Install or update an existing license
(_New in Ansible Tower 2.0.0_) Make a POST request to this resource as a super
user to install or update the existing license. The license data itself can
@@ -18,3 +23,11 @@ be POSTed as a normal json data structure.
(_New in Ansible Tower 2.1.1_) The POST must include a `eula_accepted` boolean
element indicating acceptance of the End-User License Agreement.
{% endifmeth %}
{% ifmeth DELETE %}
# Delete an existing license
(_New in Ansible Tower 2.0.0_) Make a DELETE request to this resource as a super
user to delete the existing license
{% endifmeth %}

View File

@@ -1,3 +1 @@
{{ docstring }}
{% include "api/_new_in_awx.md" %}

View File

@@ -1,3 +1,5 @@
{% ifmeth POST %}
# Generate an Auth Token
Make a POST request to this resource with `username` and `password` fields to
obtain an authentication token to use for subsequent requests.
@@ -32,6 +34,10 @@ agent that originally obtained it.
Each request that uses the token for authentication will refresh its expiration
timestamp and keep it from expiring. A token only expires when it is not used
for the configured timeout interval (default 1800 seconds).
{% endifmeth %}
A DELETE request with the token set will cause the token to be invalidated and
no further requests can be made with it.
{% ifmeth DELETE %}
# Delete an Auth Token
A DELETE request with the token header set will cause the token to be
invalidated and no further requests can be made with it.
{% endifmeth %}

View File

@@ -1,9 +1,13 @@
{% ifmeth GET %}
# Retrieve {{ model_verbose_name|title }} Variable Data:
Make a GET request to this resource to retrieve all variables defined for this
Make a GET request to this resource to retrieve all variables defined for a
{{ model_verbose_name }}.
{% endifmeth %}
{% ifmeth PUT PATCH %}
# Update {{ model_verbose_name|title }} Variable Data:
Make a PUT request to this resource to update variables defined for this
Make a PUT or PATCH request to this resource to update variables defined for a
{{ model_verbose_name }}.
{% endifmeth %}

View File

@@ -38,5 +38,3 @@ Data about failed and successfull hosts by inventory will be given as:
"id": 2,
"name": "Test Inventory"
},
{% include "api/_new_in_awx.md" %}

View File

@@ -1,3 +1,5 @@
# View Statistics for Job Runs
Make a GET request to this resource to retrieve aggregate statistics about job runs suitable for graphing.
## Parmeters and Filtering
@@ -33,5 +35,3 @@ Data will be returned in the following format:
Each element contains an epoch timestamp represented in seconds and a numerical value indicating
the number of events during that time period
{% include "api/_new_in_awx.md" %}

View File

@@ -1,3 +1 @@
Make a GET request to this resource to retrieve aggregate statistics for Tower.
{% include "api/_new_in_awx.md" %}

View File

@@ -1,4 +1,4 @@
# List All {{ model_verbose_name_plural|title }} for this {{ parent_model_verbose_name|title }}:
# List All {{ model_verbose_name_plural|title }} for {{ parent_model_verbose_name|title|anora }}:
Make a GET request to this resource to retrieve a list of all
{{ model_verbose_name_plural }} directly or indirectly belonging to this

View File

@@ -1,9 +1,7 @@
# List Potential Child Groups for this {{ parent_model_verbose_name|title }}:
# List Potential Child Groups for {{ parent_model_verbose_name|title|anora }}:
Make a GET request to this resource to retrieve a list of
{{ model_verbose_name_plural }} available to be added as children of the
current {{ parent_model_verbose_name }}.
{% include "api/_list_common.md" %}
{% include "api/_new_in_awx.md" %}

View File

@@ -1,4 +1,4 @@
# List All {{ model_verbose_name_plural|title }} for this {{ parent_model_verbose_name|title }}:
# List All {{ model_verbose_name_plural|title }} for {{ parent_model_verbose_name|title|anora }}:
Make a GET request to this resource to retrieve a list of all
{{ model_verbose_name_plural }} of which the selected

View File

@@ -1,3 +1,5 @@
# List Fact Scans for a Host Specific Host Scan
Make a GET request to this resource to retrieve system tracking data for a particular scan
You may filter by datetime:
@@ -7,5 +9,3 @@ You may filter by datetime:
and module
`?datetime=2015-06-01&module=ansible`
{% include "api/_new_in_awx.md" %}

View File

@@ -1,3 +1,5 @@
# List Fact Scans for a Host by Module and Date
Make a GET request to this resource to retrieve system tracking scans by module and date/time
You may filter scan runs using the `from` and `to` properties:
@@ -7,5 +9,3 @@ You may filter scan runs using the `from` and `to` properties:
You may also filter by module
`?module=packages`
{% include "api/_new_in_awx.md" %}

View File

@@ -0,0 +1 @@
# List Red Hat Insights for a Host

View File

@@ -29,5 +29,3 @@ Response code from this action will be:
- 202 if some inventory source updates were successful, but some failed
- 400 if all of the inventory source updates failed
- 400 if there are no inventory sources in the inventory
{% include "api/_new_in_awx.md" %}

View File

@@ -1,7 +1,9 @@
# List Root {{ model_verbose_name_plural|title }} for this {{ parent_model_verbose_name|title }}:
{% ifmeth GET %}
# List Root {{ model_verbose_name_plural|title }} for {{ parent_model_verbose_name|title|anora }}:
Make a GET request to this resource to retrieve a list of root (top-level)
{{ model_verbose_name_plural }} associated with this
{{ parent_model_verbose_name }}.
{% include "api/_list_common.md" %}
{% endifmeth %}

View File

@@ -9,5 +9,3 @@ cancelled. The response will include the following field:
Make a POST request to this resource to cancel a pending or running inventory
update. The response status code will be 202 if successful, or 405 if the
update cannot be canceled.
{% include "api/_new_in_awx.md" %}

View File

@@ -9,5 +9,3 @@ from its inventory source. The response will include the following field:
Make a POST request to this resource to update the inventory source. If
successful, the response status code will be 202. If the inventory source is
not defined or cannot be updated, a 405 status code will be returned.
{% include "api/_new_in_awx.md" %}

View File

@@ -1,4 +1,4 @@
# Group Tree for this {{ model_verbose_name|title }}:
# Group Tree for {{ model_verbose_name|title|anora }}:
Make a GET request to this resource to retrieve a hierarchical view of groups
associated with the selected {{ model_verbose_name }}.
@@ -11,5 +11,3 @@ also containing a list of its children.
Each group data structure includes the following fields:
{% include "api/_result_fields_common.md" %}
{% include "api/_new_in_awx.md" %}

View File

@@ -1,10 +1,15 @@
# Cancel Job
{% ifmeth GET %}
# Determine if a Job can be cancelled
Make a GET request to this resource to determine if the job can be cancelled.
The response will include the following field:
* `can_cancel`: Indicates whether this job can be canceled (boolean, read-only)
{% endifmeth %}
{% ifmeth POST %}
# Cancel a Job
Make a POST request to this resource to cancel a pending or running job. The
response status code will be 202 if successful, or 405 if the job cannot be
canceled.
{% endifmeth %}

View File

@@ -23,5 +23,3 @@ Will show only failed plays. Alternatively `false` may be used.
?play__icontains=test
Will filter plays matching the substring `test`
{% include "api/_new_in_awx.md" %}

View File

@@ -25,5 +25,3 @@ Will show only failed plays. Alternatively `false` may be used.
?task__icontains=test
Will filter tasks matching the substring `test`
{% include "api/_new_in_awx.md" %}

View File

@@ -1,3 +1,3 @@
Relaunch a job:
Relaunch a Job:
Make a POST request to this resource to launch a job. If any passwords or variables are required then they should be passed in via POST data. In order to determine what values are required in order to launch a job based on this job template you may make a GET request to this endpoint.
Make a POST request to this resource to launch a job. If any passwords or variables are required then they should be passed in via POST data. In order to determine what values are required in order to launch a job based on this job template you may make a GET request to this endpoint.

View File

@@ -1,4 +1,5 @@
# Start Job
{% ifmeth GET %}
# Determine if a Job can be started
Make a GET request to this resource to determine if the job can be started and
whether any passwords are required to start the job. The response will include
@@ -7,10 +8,14 @@ the following fields:
* `can_start`: Flag indicating if this job can be started (boolean, read-only)
* `passwords_needed_to_start`: Password names required to start the job (array,
read-only)
{% endifmeth %}
{% ifmeth POST %}
# Start a Job
Make a POST request to this resource to start the job. If any passwords are
required, they must be passed via POST data.
If successful, the response status code will be 202. If any required passwords
are not provided, a 400 status code will be returned. If the job cannot be
started, a 405 status code will be returned.
{% endifmeth %}

View File

@@ -1,13 +1,7 @@
{% with 'false' as version_label_flag %}
{% include "api/sub_list_create_api_view.md" %}
{% endwith %}
Labels not associated with any other resources are deleted. A label can become disassociated with a resource as a result of 3 events.
1. A label is explicitly disassociated with a related job template
2. A job is deleted with labels
3. A cleanup job deletes a job with labels
{% with 'true' as version_label_flag %}
{% include "api/_new_in_awx.md" %}
{% endwith %}

View File

@@ -1,8 +1,8 @@
{% ifmeth GET %}
# List {{ model_verbose_name_plural|title }}:
Make a GET request to this resource to retrieve the list of
{{ model_verbose_name_plural }}.
{% include "api/_list_common.md" %}
{% include "api/_new_in_awx.md" %}
{% endifmeth %}

View File

@@ -1,6 +1,6 @@
{% include "api/list_api_view.md" %}
# Create {{ model_verbose_name_plural|title }}:
# Create {{ model_verbose_name|title|anora }}:
Make a POST request to this resource with the following {{ model_verbose_name }}
fields to create a new {{ model_verbose_name }}:
@@ -8,5 +8,3 @@ fields to create a new {{ model_verbose_name }}:
{% with write_only=1 %}
{% include "api/_result_fields_common.md" with serializer_fields=serializer_create_fields %}
{% endwith %}
{% include "api/_new_in_awx.md" %}

View File

@@ -1,4 +1,4 @@
# Retrieve {{ model_verbose_name|title }} Playbooks:
Make GET request to this resource to retrieve a list of playbooks available
for this {{ model_verbose_name }}.
for {{ model_verbose_name|anora }}.

View File

@@ -9,5 +9,3 @@ cancelled. The response will include the following field:
Make a POST request to this resource to cancel a pending or running project
update. The response status code will be 202 if successful, or 405 if the
update cannot be canceled.
{% include "api/_new_in_awx.md" %}

View File

@@ -8,5 +8,3 @@ from its SCM source. The response will include the following field:
Make a POST request to this resource to update the project. If the project
cannot be updated, a 405 status code will be returned.
{% include "api/_new_in_awx.md" %}

View File

@@ -2,11 +2,9 @@
### Note: starting from api v2, this resource object can be accessed via its named URL.
{% endif %}
# Retrieve {{ model_verbose_name|title }}:
# Retrieve {{ model_verbose_name|title|anora }}:
Make GET request to this resource to retrieve a single {{ model_verbose_name }}
record containing the following fields:
{% include "api/_result_fields_common.md" %}
{% include "api/_new_in_awx.md" %}

View File

@@ -2,15 +2,17 @@
### Note: starting from api v2, this resource object can be accessed via its named URL.
{% endif %}
# Retrieve {{ model_verbose_name|title }}:
{% ifmeth GET %}
# Retrieve {{ model_verbose_name|title|anora }}:
Make GET request to this resource to retrieve a single {{ model_verbose_name }}
record containing the following fields:
{% include "api/_result_fields_common.md" %}
{% endifmeth %}
# Delete {{ model_verbose_name|title }}:
{% ifmeth DELETE %}
# Delete {{ model_verbose_name|title|anora }}:
Make a DELETE request to this resource to delete this {{ model_verbose_name }}.
{% include "api/_new_in_awx.md" %}
{% endifmeth %}

View File

@@ -2,14 +2,17 @@
### Note: starting from api v2, this resource object can be accessed via its named URL.
{% endif %}
# Retrieve {{ model_verbose_name|title }}:
{% ifmeth GET %}
# Retrieve {{ model_verbose_name|title|anora }}:
Make GET request to this resource to retrieve a single {{ model_verbose_name }}
record containing the following fields:
{% include "api/_result_fields_common.md" %}
{% endifmeth %}
# Update {{ model_verbose_name|title }}:
{% ifmeth PUT PATCH %}
# Update {{ model_verbose_name|title|anora }}:
Make a PUT or PATCH request to this resource to update this
{{ model_verbose_name }}. The following fields may be modified:
@@ -17,9 +20,12 @@ Make a PUT or PATCH request to this resource to update this
{% with write_only=1 %}
{% include "api/_result_fields_common.md" with serializer_fields=serializer_update_fields %}
{% endwith %}
{% endifmeth %}
{% ifmeth PUT %}
For a PUT request, include **all** fields in the request.
{% endifmeth %}
{% ifmeth PATCH %}
For a PATCH request, include only the fields that are being modified.
{% include "api/_new_in_awx.md" %}
{% endifmeth %}

View File

@@ -2,14 +2,17 @@
### Note: starting from api v2, this resource object can be accessed via its named URL.
{% endif %}
# Retrieve {{ model_verbose_name|title }}:
{% ifmeth GET %}
# Retrieve {{ model_verbose_name|title|anora }}:
Make GET request to this resource to retrieve a single {{ model_verbose_name }}
record containing the following fields:
{% include "api/_result_fields_common.md" %}
{% endifmeth %}
# Update {{ model_verbose_name|title }}:
{% ifmeth PUT PATCH %}
# Update {{ model_verbose_name|title|anora }}:
Make a PUT or PATCH request to this resource to update this
{{ model_verbose_name }}. The following fields may be modified:
@@ -17,13 +20,18 @@ Make a PUT or PATCH request to this resource to update this
{% with write_only=1 %}
{% include "api/_result_fields_common.md" with serializer_fields=serializer_update_fields %}
{% endwith %}
{% endifmeth %}
{% ifmeth PUT %}
For a PUT request, include **all** fields in the request.
{% endifmeth %}
{% ifmeth PATCH %}
For a PATCH request, include only the fields that are being modified.
{% endifmeth %}
# Delete {{ model_verbose_name|title }}:
{% ifmeth DELETE %}
# Delete {{ model_verbose_name|title|anora }}:
Make a DELETE request to this resource to delete this {{ model_verbose_name }}.
{% include "api/_new_in_awx.md" %}
{% endifmeth %}

View File

@@ -0,0 +1 @@
# Test Logging Configuration

View File

@@ -1,9 +1,9 @@
# List {{ model_verbose_name_plural|title }} for this {{ parent_model_verbose_name|title }}:
{% ifmeth GET %}
# List {{ model_verbose_name_plural|title }} for {{ parent_model_verbose_name|title|anora }}:
Make a GET request to this resource to retrieve a list of
{{ model_verbose_name_plural }} associated with the selected
{{ parent_model_verbose_name }}.
{% include "api/_list_common.md" %}
{% include "api/_new_in_awx.md" %}
{% endifmeth %}

View File

@@ -1,6 +1,6 @@
{% include "api/sub_list_api_view.md" %}
# Create {{ model_verbose_name_plural|title }} for this {{ parent_model_verbose_name|title }}:
# Create {{ model_verbose_name|title|anora }} for {{ parent_model_verbose_name|title|anora }}:
Make a POST request to this resource with the following {{ model_verbose_name }}
fields to create a new {{ model_verbose_name }} associated with this
@@ -25,7 +25,7 @@ delete the associated {{ model_verbose_name }}.
}
{% else %}
# Add {{ model_verbose_name_plural|title }} for this {{ parent_model_verbose_name|title }}:
# Add {{ model_verbose_name_plural|title }} for {{ parent_model_verbose_name|title|anora }}:
Make a POST request to this resource with only an `id` field to associate an
existing {{ model_verbose_name }} with this {{ parent_model_verbose_name }}.
@@ -37,5 +37,3 @@ remove the {{ model_verbose_name }} from this {{ parent_model_verbose_name }}
{% if model_verbose_name != "label" %} without deleting the {{ model_verbose_name }}{% endif %}.
{% endif %}
{% endif %}
{% include "api/_new_in_awx.md" %}

View File

@@ -1,12 +1,16 @@
# List Roles for this Team:
# List Roles for a Team:
{% ifmeth GET %}
Make a GET request to this resource to retrieve a list of roles associated with the selected team.
{% include "api/_list_common.md" %}
{% endifmeth %}
{% ifmeth POST %}
# Associate Roles with this Team:
Make a POST request to this resource to add or remove a role from this team. The following fields may be modified:
* `id`: The Role ID to add to the team. (int, required)
* `disassociate`: Provide if you want to remove the role. (any value, optional)
{% endifmeth %}

View File

@@ -25,5 +25,3 @@ dark background.
Files over {{ settings.STDOUT_MAX_BYTES_DISPLAY|filesizeformat }} (configurable)
will not display in the browser. Use the `txt_download` or `ansi_download`
formats to download the file directly to view it.
{% include "api/_new_in_awx.md" %}

View File

@@ -1,3 +1,5 @@
# Retrieve Information about the current User
Make a GET request to retrieve user information about the current user.
One result should be returned containing the following fields:

View File

@@ -1,12 +1,16 @@
# List Roles for this User:
# List Roles for a User:
{% ifmeth GET %}
Make a GET request to this resource to retrieve a list of roles associated with the selected user.
{% include "api/_list_common.md" %}
{% endifmeth %}
{% ifmeth POST %}
# Associate Roles with this User:
Make a POST request to this resource to add or remove a role from this user. The following fields may be modified:
* `id`: The Role ID to add to the user. (int, required)
* `disassociate`: Provide if you want to remove the role. (any value, optional)
{% endifmeth %}

View File

@@ -11,6 +11,7 @@ from awx.api.views import (
CredentialObjectRolesList,
CredentialOwnerUsersList,
CredentialOwnerTeamsList,
CredentialCopy,
)
@@ -22,6 +23,7 @@ urls = [
url(r'^(?P<pk>[0-9]+)/object_roles/$', CredentialObjectRolesList.as_view(), name='credential_object_roles_list'),
url(r'^(?P<pk>[0-9]+)/owner_users/$', CredentialOwnerUsersList.as_view(), name='credential_owner_users_list'),
url(r'^(?P<pk>[0-9]+)/owner_teams/$', CredentialOwnerTeamsList.as_view(), name='credential_owner_teams_list'),
url(r'^(?P<pk>[0-9]+)/copy/$', CredentialCopy.as_view(), name='credential_copy'),
]
__all__ = ['urls']

View File

@@ -20,6 +20,7 @@ from awx.api.views import (
InventoryAccessList,
InventoryObjectRolesList,
InventoryInstanceGroupsList,
InventoryCopy,
)
@@ -40,6 +41,7 @@ urls = [
url(r'^(?P<pk>[0-9]+)/access_list/$', InventoryAccessList.as_view(), name='inventory_access_list'),
url(r'^(?P<pk>[0-9]+)/object_roles/$', InventoryObjectRolesList.as_view(), name='inventory_object_roles_list'),
url(r'^(?P<pk>[0-9]+)/instance_groups/$', InventoryInstanceGroupsList.as_view(), name='inventory_instance_groups_list'),
url(r'^(?P<pk>[0-9]+)/copy/$', InventoryCopy.as_view(), name='inventory_copy'),
]
__all__ = ['urls']

View File

@@ -7,6 +7,7 @@ from awx.api.views import (
InventoryScriptList,
InventoryScriptDetail,
InventoryScriptObjectRolesList,
InventoryScriptCopy,
)
@@ -14,6 +15,7 @@ urls = [
url(r'^$', InventoryScriptList.as_view(), name='inventory_script_list'),
url(r'^(?P<pk>[0-9]+)/$', InventoryScriptDetail.as_view(), name='inventory_script_detail'),
url(r'^(?P<pk>[0-9]+)/object_roles/$', InventoryScriptObjectRolesList.as_view(), name='inventory_script_object_roles_list'),
url(r'^(?P<pk>[0-9]+)/copy/$', InventoryScriptCopy.as_view(), name='inventory_script_copy'),
]
__all__ = ['urls']

View File

@@ -19,6 +19,7 @@ from awx.api.views import (
JobTemplateAccessList,
JobTemplateObjectRolesList,
JobTemplateLabelList,
JobTemplateCopy,
)
@@ -41,6 +42,7 @@ urls = [
url(r'^(?P<pk>[0-9]+)/access_list/$', JobTemplateAccessList.as_view(), name='job_template_access_list'),
url(r'^(?P<pk>[0-9]+)/object_roles/$', JobTemplateObjectRolesList.as_view(), name='job_template_object_roles_list'),
url(r'^(?P<pk>[0-9]+)/labels/$', JobTemplateLabelList.as_view(), name='job_template_label_list'),
url(r'^(?P<pk>[0-9]+)/copy/$', JobTemplateCopy.as_view(), name='job_template_copy'),
]
__all__ = ['urls']

View File

@@ -8,6 +8,7 @@ from awx.api.views import (
NotificationTemplateDetail,
NotificationTemplateTest,
NotificationTemplateNotificationList,
NotificationTemplateCopy,
)
@@ -16,6 +17,7 @@ urls = [
url(r'^(?P<pk>[0-9]+)/$', NotificationTemplateDetail.as_view(), name='notification_template_detail'),
url(r'^(?P<pk>[0-9]+)/test/$', NotificationTemplateTest.as_view(), name='notification_template_test'),
url(r'^(?P<pk>[0-9]+)/notifications/$', NotificationTemplateNotificationList.as_view(), name='notification_template_notification_list'),
url(r'^(?P<pk>[0-9]+)/copy/$', NotificationTemplateCopy.as_view(), name='notification_template_copy'),
]
__all__ = ['urls']

View File

@@ -19,10 +19,11 @@ from awx.api.views import (
ProjectNotificationTemplatesSuccessList,
ProjectObjectRolesList,
ProjectAccessList,
ProjectCopy,
)
urls = [
urls = [
url(r'^$', ProjectList.as_view(), name='project_list'),
url(r'^(?P<pk>[0-9]+)/$', ProjectDetail.as_view(), name='project_detail'),
url(r'^(?P<pk>[0-9]+)/playbooks/$', ProjectPlaybooks.as_view(), name='project_playbooks'),
@@ -39,6 +40,7 @@ urls = [
name='project_notification_templates_success_list'),
url(r'^(?P<pk>[0-9]+)/object_roles/$', ProjectObjectRolesList.as_view(), name='project_object_roles_list'),
url(r'^(?P<pk>[0-9]+)/access_list/$', ProjectAccessList.as_view(), name='project_access_list'),
url(r'^(?P<pk>[0-9]+)/copy/$', ProjectCopy.as_view(), name='project_copy'),
]
__all__ = ['urls']

View File

@@ -2,6 +2,7 @@
# All Rights Reserved.
from __future__ import absolute_import, unicode_literals
from django.conf import settings
from django.conf.urls import include, url
from awx.api.views import (
@@ -123,5 +124,10 @@ app_name = 'api'
urlpatterns = [
url(r'^$', ApiRootView.as_view(), name='api_root_view'),
url(r'^(?P<version>(v2))/', include(v2_urls)),
url(r'^(?P<version>(v1|v2))/', include(v1_urls))
url(r'^(?P<version>(v1|v2))/', include(v1_urls)),
]
if settings.SETTINGS_MODULE == 'awx.settings.development':
from awx.api.swagger import SwaggerSchemaView
urlpatterns += [
url(r'^swagger/$', SwaggerSchemaView.as_view(), name='swagger_view'),
]

File diff suppressed because it is too large Load Diff

View File

@@ -1,6 +1,8 @@
# Django REST Framework
from rest_framework import serializers
import six
# Tower
from awx.api.fields import VerbatimField
from awx.api.serializers import BaseSerializer
@@ -45,12 +47,12 @@ class SettingFieldMixin(object):
"""Mixin to use a registered setting field class for API display/validation."""
def to_representation(self, obj):
if getattr(self, 'encrypted', False) and isinstance(obj, basestring) and obj:
if getattr(self, 'encrypted', False) and isinstance(obj, six.string_types) and obj:
return '$encrypted$'
return obj
def to_internal_value(self, value):
if getattr(self, 'encrypted', False) and isinstance(value, basestring) and value.startswith('$encrypted$'):
if getattr(self, 'encrypted', False) and isinstance(value, six.string_types) and value.startswith('$encrypted$'):
raise serializers.SkipField()
obj = super(SettingFieldMixin, self).to_internal_value(value)
return super(SettingFieldMixin, self).to_representation(obj)

View File

@@ -275,7 +275,7 @@ class SettingsWrapper(UserSettingsHolder):
setting_ids[setting.key] = setting.id
try:
value = decrypt_field(setting, 'value')
except ValueError, e:
except ValueError as e:
#TODO: Remove in Tower 3.3
logger.debug('encountered error decrypting field: %s - attempting fallback to old', e)
value = old_decrypt_field(setting, 'value')

View File

@@ -6,6 +6,8 @@ import glob
import os
import shutil
import six
# AWX
from awx.conf.registry import settings_registry
@@ -13,7 +15,7 @@ __all__ = ['comment_assignments', 'conf_to_dict']
def comment_assignments(patterns, assignment_names, dry_run=True, backup_suffix='.old'):
if isinstance(patterns, basestring):
if isinstance(patterns, six.string_types):
patterns = [patterns]
diffs = []
for pattern in patterns:
@@ -32,7 +34,7 @@ def comment_assignments(patterns, assignment_names, dry_run=True, backup_suffix=
def comment_assignments_in_file(filename, assignment_names, dry_run=True, backup_filename=None):
from redbaron import RedBaron, indent
if isinstance(assignment_names, basestring):
if isinstance(assignment_names, six.string_types):
assignment_names = [assignment_names]
else:
assignment_names = assignment_names[:]

View File

@@ -21,7 +21,7 @@ from awx.api.generics import * # noqa
from awx.api.permissions import IsSuperUser
from awx.api.versioning import reverse, get_request_version
from awx.main.utils import * # noqa
from awx.main.utils.handlers import BaseHTTPSHandler, LoggingConnectivityException
from awx.main.utils.handlers import BaseHTTPSHandler, UDPHandler, LoggingConnectivityException
from awx.main.tasks import handle_setting_changes
from awx.conf.license import get_licensed_features
from awx.conf.models import Setting
@@ -44,7 +44,6 @@ class SettingCategoryList(ListAPIView):
model = Setting # Not exactly, but needed for the view.
serializer_class = SettingCategorySerializer
filter_backends = []
new_in_310 = True
view_name = _('Setting Categories')
def get_queryset(self):
@@ -69,7 +68,6 @@ class SettingSingletonDetail(RetrieveUpdateDestroyAPIView):
model = Setting # Not exactly, but needed for the view.
serializer_class = SettingSingletonSerializer
filter_backends = []
new_in_310 = True
view_name = _('Setting Detail')
def get_queryset(self):
@@ -170,7 +168,6 @@ class SettingLoggingTest(GenericAPIView):
serializer_class = SettingSingletonSerializer
permission_classes = (IsSuperUser,)
filter_backends = []
new_in_320 = True
def post(self, request, *args, **kwargs):
defaults = dict()
@@ -202,7 +199,11 @@ class SettingLoggingTest(GenericAPIView):
for k, v in serializer.validated_data.items():
setattr(mock_settings, k, v)
mock_settings.LOG_AGGREGATOR_LEVEL = 'DEBUG'
BaseHTTPSHandler.perform_test(mock_settings)
if mock_settings.LOG_AGGREGATOR_PROTOCOL.upper() == 'UDP':
UDPHandler.perform_test(mock_settings)
return Response(status=status.HTTP_201_CREATED)
else:
BaseHTTPSHandler.perform_test(mock_settings)
except LoggingConnectivityException as e:
return Response({'error': str(e)}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
return Response(status=status.HTTP_200_OK)

View File

@@ -29,6 +29,8 @@ import threading
import uuid
import memcache
from six.moves import xrange
__all__ = ['event_context']

View File

@@ -25,4 +25,5 @@ import ansible
# Because of the way Ansible loads plugins, it's not possible to import
# ansible.plugins.callback.minimal when being loaded as the minimal plugin. Ugh.
execfile(os.path.join(os.path.dirname(ansible.__file__), 'plugins', 'callback', 'minimal.py'))
with open(os.path.join(os.path.dirname(ansible.__file__), 'plugins', 'callback', 'minimal.py')) as in_file:
exec(in_file.read())

View File

@@ -18,7 +18,11 @@
from __future__ import (absolute_import, division, print_function)
# Python
import codecs
import contextlib
import json
import os
import stat
import sys
import uuid
from copy import copy
@@ -292,10 +296,22 @@ class BaseCallbackModule(CallbackBase):
failures=stats.failures,
ok=stats.ok,
processed=stats.processed,
skipped=stats.skipped,
artifact_data=stats.custom.get('_run', {}) if hasattr(stats, 'custom') else {}
skipped=stats.skipped
)
# write custom set_stat artifact data to the local disk so that it can
# be persisted by awx after the process exits
custom_artifact_data = stats.custom.get('_run', {}) if hasattr(stats, 'custom') else {}
if custom_artifact_data:
# create the directory for custom stats artifacts to live in (if it doesn't exist)
custom_artifacts_dir = os.path.join(os.getenv('AWX_PRIVATE_DATA_DIR'), 'artifacts')
os.makedirs(custom_artifacts_dir, mode=stat.S_IXUSR + stat.S_IWUSR + stat.S_IRUSR)
custom_artifacts_path = os.path.join(custom_artifacts_dir, 'custom')
with codecs.open(custom_artifacts_path, 'w', encoding='utf-8') as f:
os.chmod(custom_artifacts_path, stat.S_IRUSR | stat.S_IWUSR)
json.dump(custom_artifact_data, f)
with self.capture_event_data('playbook_on_stats', **event_data):
super(BaseCallbackModule, self).v2_playbook_on_stats(stats)

View File

@@ -7,7 +7,9 @@ from collections import OrderedDict
import json
import mock
import os
import shutil
import sys
import tempfile
import pytest
@@ -259,3 +261,26 @@ def test_callback_plugin_strips_task_environ_variables(executor, cache, playbook
assert len(cache)
for event in cache.values():
assert os.environ['PATH'] not in json.dumps(event)
@pytest.mark.parametrize('playbook', [
{'custom_set_stat.yml': '''
- name: custom set_stat calls should persist to the local disk so awx can save them
connection: local
hosts: all
tasks:
- set_stats:
data:
foo: "bar"
'''}, # noqa
])
def test_callback_plugin_saves_custom_stats(executor, cache, playbook):
try:
private_data_dir = tempfile.mkdtemp()
with mock.patch.dict(os.environ, {'AWX_PRIVATE_DATA_DIR': private_data_dir}):
executor.run()
artifacts_path = os.path.join(private_data_dir, 'artifacts', 'custom')
with open(artifacts_path, 'r') as f:
assert json.load(f) == {'foo': 'bar'}
finally:
shutil.rmtree(os.path.join(private_data_dir))

View File

@@ -308,7 +308,7 @@ class BaseAccess(object):
if check_expiration and validation_info.get('time_remaining', None) is None:
raise PermissionDenied(_("License is missing."))
if check_expiration and validation_info.get("grace_period_remaining") <= 0:
raise PermissionDenied(_("License has expired."))
logger.error(_("License has expired."))
free_instances = validation_info.get('free_instances', 0)
available_instances = validation_info.get('available_instances', 0)
@@ -316,11 +316,11 @@ class BaseAccess(object):
if add_host_name:
host_exists = Host.objects.filter(name=add_host_name).exists()
if not host_exists and free_instances == 0:
raise PermissionDenied(_("License count of %s instances has been reached.") % available_instances)
logger.error(_("License count of %s instances has been reached.") % available_instances)
elif not host_exists and free_instances < 0:
raise PermissionDenied(_("License count of %s instances has been exceeded.") % available_instances)
logger.error(_("License count of %s instances has been exceeded.") % available_instances)
elif not add_host_name and free_instances < 0:
raise PermissionDenied(_("Host count exceeds available instances."))
raise logger.error(_("Host count exceeds available instances."))
if feature is not None:
if "features" in validation_info and not validation_info["features"].get(feature, False):
@@ -424,6 +424,18 @@ class InstanceAccess(BaseAccess):
return Instance.objects.filter(
rampart_groups__in=self.user.get_queryset(InstanceGroup)).distinct()
def can_attach(self, obj, sub_obj, relationship, data,
skip_sub_obj_read_check=False):
if relationship == 'rampart_groups' and isinstance(sub_obj, InstanceGroup):
return self.user.is_superuser
return super(InstanceAccess, self).can_attach(obj, sub_obj, relationship, *args, **kwargs)
def can_unattach(self, obj, sub_obj, relationship, data=None):
if relationship == 'rampart_groups' and isinstance(sub_obj, InstanceGroup):
return self.user.is_superuser
return super(InstanceAccess, self).can_unattach(obj, sub_obj, relationship, *args, **kwargs)
def can_add(self, data):
return False
@@ -444,13 +456,13 @@ class InstanceGroupAccess(BaseAccess):
organization__in=Organization.accessible_pk_qs(self.user, 'admin_role'))
def can_add(self, data):
return False
return self.user.is_superuser
def can_change(self, obj, data):
return False
return self.user.is_superuser
def can_delete(self, obj):
return False
return self.user.is_superuser
class UserAccess(BaseAccess):
@@ -596,6 +608,7 @@ class InventoryAccess(BaseAccess):
I can see inventory when:
- I'm a superuser.
- I'm an org admin of the inventory's org.
- I'm an inventory admin of the inventory's org.
- I have read, write or admin permissions on it.
I can change inventory when:
- I'm a superuser.
@@ -629,9 +642,9 @@ class InventoryAccess(BaseAccess):
def can_add(self, data):
# If no data is specified, just checking for generic add permission?
if not data:
return Organization.accessible_objects(self.user, 'admin_role').exists()
return Organization.accessible_objects(self.user, 'inventory_admin_role').exists()
return self.check_related('organization', Organization, data)
return self.check_related('organization', Organization, data, role_field='inventory_admin_role')
@check_superuser
def can_change(self, obj, data):
@@ -647,7 +660,7 @@ class InventoryAccess(BaseAccess):
# Verify that the user has access to the new organization if moving an
# inventory to a new organization. Otherwise, just check for admin permission.
return (
self.check_related('organization', Organization, data, obj=obj,
self.check_related('organization', Organization, data, obj=obj, role_field='inventory_admin_role',
mandatory=org_admin_mandatory) and
self.user in obj.admin_role
)
@@ -933,8 +946,12 @@ class CredentialAccess(BaseAccess):
- I'm a superuser.
- It's a user credential and it's my credential.
- It's a user credential and I'm an admin of an organization where that
user is a member of admin of the organization.
user is a member.
- It's a user credential and I'm a credential_admin of an organization
where that user is a member.
- It's a team credential and I'm an admin of the team's organization.
- It's a team credential and I'm a credential admin of the team's
organization.
- It's a team credential and I'm a member of the team.
I can change/delete when:
- I'm a superuser.
@@ -968,7 +985,8 @@ class CredentialAccess(BaseAccess):
return check_user_access(self.user, Team, 'change', team_obj, None)
if data and data.get('organization', None):
organization_obj = get_object_from_data('organization', Organization, data)
return check_user_access(self.user, Organization, 'change', organization_obj, None)
return any([check_user_access(self.user, Organization, 'change', organization_obj, None),
self.user in organization_obj.credential_admin_role])
return False
@check_superuser
@@ -979,7 +997,7 @@ class CredentialAccess(BaseAccess):
def can_change(self, obj, data):
if not obj:
return False
return self.user in obj.admin_role and self.check_related('organization', Organization, data, obj=obj)
return self.user in obj.admin_role and self.check_related('organization', Organization, data, obj=obj, role_field='credential_admin_role')
def can_delete(self, obj):
# Unassociated credentials may be marked deleted by anyone, though we
@@ -1055,6 +1073,7 @@ class ProjectAccess(BaseAccess):
I can see projects when:
- I am a superuser.
- I am an admin in an organization associated with the project.
- I am a project admin in an organization associated with the project.
- I am a user in an organization associated with the project.
- I am on a team associated with the project.
- I have been explicitly granted permission to run/check jobs using the
@@ -1075,12 +1094,12 @@ class ProjectAccess(BaseAccess):
@check_superuser
def can_add(self, data):
if not data: # So the browseable API will work
return Organization.accessible_objects(self.user, 'admin_role').exists()
return self.check_related('organization', Organization, data, mandatory=True)
return Organization.accessible_objects(self.user, 'project_admin_role').exists()
return self.check_related('organization', Organization, data, role_field='project_admin_role', mandatory=True)
@check_superuser
def can_change(self, obj, data):
if not self.check_related('organization', Organization, data, obj=obj):
if not self.check_related('organization', Organization, data, obj=obj, role_field='project_admin_role'):
return False
return self.user in obj.admin_role
@@ -1162,6 +1181,7 @@ class JobTemplateAccess(BaseAccess):
a user can create a job template if
- they are a superuser
- an org admin of any org that the project is a member
- if they are a project_admin for any org that project is a member of
- if they have user or team
based permissions tying the project to the inventory source for the
given action as well as the 'create' deploy permission.
@@ -1420,7 +1440,7 @@ class JobAccess(BaseAccess):
elif not jt_access:
return False
org_access = obj.inventory and self.user in obj.inventory.organization.admin_role
org_access = obj.inventory and self.user in obj.inventory.organization.inventory_admin_role
project_access = obj.project is None or self.user in obj.project.admin_role
credential_access = all([self.user in cred.use_role for cred in obj.credentials.all()])
@@ -1713,13 +1733,14 @@ class WorkflowJobTemplateAccess(BaseAccess):
Users who are able to create deploy jobs can also run normal and check (dry run) jobs.
'''
if not data: # So the browseable API will work
return Organization.accessible_objects(self.user, 'admin_role').exists()
return Organization.accessible_objects(self.user, 'workflow_admin_role').exists()
# will check this if surveys are added to WFJT
if 'survey_enabled' in data and data['survey_enabled']:
self.check_license(feature='surveys')
return self.check_related('organization', Organization, data, mandatory=True)
return self.check_related('organization', Organization, data, role_field='workflow_admin_role',
mandatory=True)
def can_copy(self, obj):
if self.save_messages:
@@ -1746,7 +1767,8 @@ class WorkflowJobTemplateAccess(BaseAccess):
if missing_inventories:
self.messages['inventories_unable_to_copy'] = missing_inventories
return self.check_related('organization', Organization, {'reference_obj': obj}, mandatory=True)
return self.check_related('organization', Organization, {'reference_obj': obj}, role_field='workflow_admin_role',
mandatory=True)
def can_start(self, obj, validate_license=True):
if validate_license:
@@ -1771,7 +1793,8 @@ class WorkflowJobTemplateAccess(BaseAccess):
if self.user.is_superuser:
return True
return self.check_related('organization', Organization, data, obj=obj) and self.user in obj.admin_role
return (self.check_related('organization', Organization, data, role_field='workflow_admin_field', obj=obj) and
self.user in obj.admin_role)
def can_delete(self, obj):
is_delete_allowed = self.user.is_superuser or self.user in obj.admin_role
@@ -1812,7 +1835,7 @@ class WorkflowJobAccess(BaseAccess):
def can_delete(self, obj):
return (obj.workflow_job_template and
obj.workflow_job_template.organization and
self.user in obj.workflow_job_template.organization.admin_role)
self.user in obj.workflow_job_template.organization.workflow_admin_role)
def get_method_capability(self, method, obj, parent_obj):
if method == 'start':
@@ -2147,13 +2170,9 @@ class ScheduleAccess(BaseAccess):
prefetch_related = ('unified_job_template', 'credentials',)
def filtered_queryset(self):
qs = self.model.objects.all()
unified_pk_qs = UnifiedJobTemplate.accessible_pk_qs(self.user, 'read_role')
inv_src_qs = InventorySource.objects.filter(inventory_id=Inventory._accessible_pk_qs(Inventory, self.user, 'read_role'))
return qs.filter(
Q(unified_job_template_id__in=unified_pk_qs) |
Q(unified_job_template_id__in=inv_src_qs.values_list('pk', flat=True)))
return self.model.objects.filter(
unified_job_template__in=UnifiedJobTemplateAccess(self.user).filtered_queryset()
)
@check_superuser
def can_add(self, data):
@@ -2196,7 +2215,7 @@ class NotificationTemplateAccess(BaseAccess):
def filtered_queryset(self):
return self.model.objects.filter(
Q(organization__in=self.user.admin_of_organizations) |
Q(organization__in=Organization.accessible_objects(self.user, 'notification_admin_role')) |
Q(organization__in=self.user.auditor_of_organizations)
).distinct()
@@ -2204,22 +2223,22 @@ class NotificationTemplateAccess(BaseAccess):
if self.user.is_superuser or self.user.is_system_auditor:
return True
if obj.organization is not None:
if self.user in obj.organization.admin_role or self.user in obj.organization.auditor_role:
if self.user in obj.organization.notification_admin_role or self.user in obj.organization.auditor_role:
return True
return False
@check_superuser
def can_add(self, data):
if not data:
return Organization.accessible_objects(self.user, 'admin_role').exists()
return self.check_related('organization', Organization, data, mandatory=True)
return Organization.accessible_objects(self.user, 'notification_admin_role').exists()
return self.check_related('organization', Organization, data, role_field='notification_admin_role', mandatory=True)
@check_superuser
def can_change(self, obj, data):
if obj.organization is None:
# only superusers are allowed to edit orphan notification templates
return False
return self.check_related('organization', Organization, data, obj=obj, mandatory=True)
return self.check_related('organization', Organization, data, obj=obj, role_field='notification_admin_role', mandatory=True)
def can_admin(self, obj, data):
return self.can_change(obj, data)
@@ -2231,7 +2250,7 @@ class NotificationTemplateAccess(BaseAccess):
def can_start(self, obj, validate_license=True):
if obj.organization is None:
return False
return self.user in obj.organization.admin_role
return self.user in obj.organization.notification_admin_role
class NotificationAccess(BaseAccess):
@@ -2243,7 +2262,7 @@ class NotificationAccess(BaseAccess):
def filtered_queryset(self):
return self.model.objects.filter(
Q(notification_template__organization__in=self.user.admin_of_organizations) |
Q(notification_template__organization__in=Organization.accessible_objects(self.user, 'notification_admin_role')) |
Q(notification_template__organization__in=self.user.auditor_of_organizations)
).distinct()

View File

@@ -14,7 +14,7 @@ from django.conf import settings
import awx
from awx.main.expect import run
from awx.main.utils import OutputEventFilter
from awx.main.utils import OutputEventFilter, get_system_task_capacity
from awx.main.queue import CallbackQueueDispatcher
logger = logging.getLogger('awx.isolated.manager')
@@ -381,10 +381,14 @@ class IsolatedManager(object):
logger.error(err_template.format(instance.hostname, instance.version, awx_application_version))
instance.capacity = 0
else:
if instance.capacity == 0 and task_result['capacity']:
if instance.capacity == 0 and task_result['capacity_cpu']:
logger.warning('Isolated instance {} has re-joined.'.format(instance.hostname))
instance.capacity = int(task_result['capacity'])
instance.save(update_fields=['capacity', 'version', 'modified'])
instance.cpu_capacity = int(task_result['capacity_cpu'])
instance.mem_capacity = int(task_result['capacity_mem'])
instance.capacity = get_system_task_capacity(scale=instance.capacity_adjustment,
cpu_capacity=int(task_result['capacity_cpu']),
mem_capacity=int(task_result['capacity_mem']))
instance.save(update_fields=['cpu_capacity', 'mem_capacity', 'capacity', 'version', 'modified'])
@classmethod
def health_check(cls, instance_qs, awx_application_version):
@@ -428,7 +432,7 @@ class IsolatedManager(object):
task_result = result['plays'][0]['tasks'][0]['hosts'][instance.hostname]
except (KeyError, IndexError):
task_result = {}
if 'capacity' in task_result:
if 'capacity_cpu' in task_result and 'capacity_mem' in task_result:
cls.update_capacity(instance, task_result, awx_application_version)
elif instance.capacity == 0:
logger.debug('Isolated instance {} previously marked as lost, could not re-join.'.format(

View File

@@ -47,7 +47,7 @@ def open_fifo_write(path, data):
This blocks the thread until an external process (such as ssh-agent)
reads data from the pipe.
'''
os.mkfifo(path, 0600)
os.mkfifo(path, 0o600)
thread.start_new_thread(lambda p, d: open(p, 'w').write(d), (path, data))

View File

@@ -74,7 +74,7 @@ class JSONField(upstream_JSONField):
class JSONBField(upstream_JSONBField):
def get_prep_lookup(self, lookup_type, value):
if isinstance(value, basestring) and value == "null":
if isinstance(value, six.string_types) and value == "null":
return 'null'
return super(JSONBField, self).get_prep_lookup(lookup_type, value)
@@ -356,7 +356,7 @@ class SmartFilterField(models.TextField):
value = urllib.unquote(value)
try:
SmartFilter().query_from_string(value)
except RuntimeError, e:
except RuntimeError as e:
raise models.base.ValidationError(e)
return super(SmartFilterField, self).get_prep_value(value)
@@ -506,6 +506,12 @@ class CredentialInputField(JSONSchemaField):
v != '$encrypted$',
model_instance.pk
]):
if not isinstance(getattr(model_instance, k), six.string_types):
raise django_exceptions.ValidationError(
_('secret values must be of type string, not {}').format(type(v).__name__),
code='invalid',
params={'value': v},
)
decrypted_values[k] = utils.decrypt_field(model_instance, k)
else:
decrypted_values[k] = v
@@ -695,11 +701,10 @@ class CredentialTypeInjectorField(JSONSchemaField):
'properties': {
'file': {
'type': 'object',
'properties': {
'template': {'type': 'string'},
'patternProperties': {
'^template(\.[a-zA-Z_]+[a-zA-Z0-9_]*)?$': {'type': 'string'},
},
'additionalProperties': False,
'required': ['template'],
},
'env': {
'type': 'object',
@@ -749,8 +754,22 @@ class CredentialTypeInjectorField(JSONSchemaField):
class TowerNamespace:
filename = None
valid_namespace['tower'] = TowerNamespace()
# ensure either single file or multi-file syntax is used (but not both)
template_names = [x for x in value.get('file', {}).keys() if x.startswith('template')]
if 'template' in template_names and len(template_names) > 1:
raise django_exceptions.ValidationError(
_('Must use multi-file syntax when injecting multiple files'),
code='invalid',
params={'value': value},
)
if 'template' not in template_names:
valid_namespace['tower'].filename = TowerNamespace()
for template_name in template_names:
template_name = template_name.split('.')[1]
setattr(valid_namespace['tower'].filename, template_name, 'EXAMPLE')
for type_, injector in value.items():
for key, tmpl in injector.items():
try:

View File

@@ -5,6 +5,8 @@
import datetime
import logging
import six
# Django
from django.core.management.base import BaseCommand
from django.utils.timezone import now
@@ -41,7 +43,7 @@ class Command(BaseCommand):
n_deleted_items = 0
pks_to_delete = set()
for asobj in ActivityStream.objects.iterator():
asobj_disp = '"%s" id: %s' % (unicode(asobj), asobj.id)
asobj_disp = '"%s" id: %s' % (six.text_type(asobj), asobj.id)
if asobj.timestamp >= self.cutoff:
if self.dry_run:
self.logger.info("would skip %s" % asobj_disp)

View File

@@ -5,6 +5,8 @@
import datetime
import logging
import six
# Django
from django.core.management.base import BaseCommand, CommandError
from django.db import transaction
@@ -66,7 +68,7 @@ class Command(BaseCommand):
jobs = Job.objects.filter(created__lt=self.cutoff)
for job in jobs.iterator():
job_display = '"%s" (%d host summaries, %d events)' % \
(unicode(job),
(six.text_type(job),
job.job_host_summaries.count(), job.job_events.count())
if job.status in ('pending', 'waiting', 'running'):
action_text = 'would skip' if self.dry_run else 'skipping'
@@ -87,7 +89,7 @@ class Command(BaseCommand):
ad_hoc_commands = AdHocCommand.objects.filter(created__lt=self.cutoff)
for ad_hoc_command in ad_hoc_commands.iterator():
ad_hoc_command_display = '"%s" (%d events)' % \
(unicode(ad_hoc_command),
(six.text_type(ad_hoc_command),
ad_hoc_command.ad_hoc_command_events.count())
if ad_hoc_command.status in ('pending', 'waiting', 'running'):
action_text = 'would skip' if self.dry_run else 'skipping'
@@ -107,7 +109,7 @@ class Command(BaseCommand):
skipped, deleted = 0, 0
project_updates = ProjectUpdate.objects.filter(created__lt=self.cutoff)
for pu in project_updates.iterator():
pu_display = '"%s" (type %s)' % (unicode(pu), unicode(pu.launch_type))
pu_display = '"%s" (type %s)' % (six.text_type(pu), six.text_type(pu.launch_type))
if pu.status in ('pending', 'waiting', 'running'):
action_text = 'would skip' if self.dry_run else 'skipping'
self.logger.debug('%s %s project update %s', action_text, pu.status, pu_display)
@@ -130,7 +132,7 @@ class Command(BaseCommand):
skipped, deleted = 0, 0
inventory_updates = InventoryUpdate.objects.filter(created__lt=self.cutoff)
for iu in inventory_updates.iterator():
iu_display = '"%s" (source %s)' % (unicode(iu), unicode(iu.source))
iu_display = '"%s" (source %s)' % (six.text_type(iu), six.text_type(iu.source))
if iu.status in ('pending', 'waiting', 'running'):
action_text = 'would skip' if self.dry_run else 'skipping'
self.logger.debug('%s %s inventory update %s', action_text, iu.status, iu_display)
@@ -153,7 +155,7 @@ class Command(BaseCommand):
skipped, deleted = 0, 0
system_jobs = SystemJob.objects.filter(created__lt=self.cutoff)
for sj in system_jobs.iterator():
sj_display = '"%s" (type %s)' % (unicode(sj), unicode(sj.job_type))
sj_display = '"%s" (type %s)' % (six.text_type(sj), six.text_type(sj.job_type))
if sj.status in ('pending', 'waiting', 'running'):
action_text = 'would skip' if self.dry_run else 'skipping'
self.logger.debug('%s %s system_job %s', action_text, sj.status, sj_display)
@@ -183,7 +185,7 @@ class Command(BaseCommand):
workflow_jobs = WorkflowJob.objects.filter(created__lt=self.cutoff)
for workflow_job in workflow_jobs.iterator():
workflow_job_display = '"{}" ({} nodes)'.format(
unicode(workflow_job),
six.text_type(workflow_job),
workflow_job.workflow_nodes.count())
if workflow_job.status in ('pending', 'waiting', 'running'):
action_text = 'would skip' if self.dry_run else 'skipping'
@@ -204,7 +206,7 @@ class Command(BaseCommand):
notifications = Notification.objects.filter(created__lt=self.cutoff)
for notification in notifications.iterator():
notification_display = '"{}" (started {}, {} type, {} sent)'.format(
unicode(notification), unicode(notification.created),
six.text_type(notification), six.text_type(notification.created),
notification.notification_type, notification.notifications_sent)
if notification.status in ('pending',):
action_text = 'would skip' if self.dry_run else 'skipping'
@@ -246,4 +248,3 @@ class Command(BaseCommand):
self.logger.log(99, '%s: %d would be deleted, %d would be skipped.', m.replace('_', ' '), deleted, skipped)
else:
self.logger.log(99, '%s: %d deleted, %d skipped.', m.replace('_', ' '), deleted, skipped)

View File

@@ -17,7 +17,7 @@ class Command(BaseCommand):
def handle(self, *args, **kwargs):
if getattr(settings, 'AWX_ISOLATED_PRIVATE_KEY', False):
print settings.AWX_ISOLATED_PUBLIC_KEY
print(settings.AWX_ISOLATED_PUBLIC_KEY)
return
key = rsa.generate_private_key(
@@ -41,4 +41,4 @@ class Command(BaseCommand):
) + " generated-by-awx@%s" % datetime.datetime.utcnow().isoformat()
)
pemfile.save()
print pemfile.value
print(pemfile.value)

View File

@@ -904,7 +904,6 @@ class Command(BaseCommand):
new_count = Host.objects.active_count()
if time_remaining <= 0 and not license_info.get('demo', False):
logger.error(LICENSE_EXPIRED_MESSAGE)
raise CommandError("License has expired!")
if free_instances < 0:
d = {
'new_count': new_count,
@@ -914,7 +913,6 @@ class Command(BaseCommand):
logger.error(DEMO_LICENSE_MESSAGE % d)
else:
logger.error(LICENSE_MESSAGE % d)
raise CommandError('License count exceeded!')
def mark_license_failure(self, save=True):
self.inventory_update.license_error = True

View File

@@ -17,6 +17,10 @@ class Command(BaseCommand):
help='Comma-Delimited Hosts to add to the Queue')
parser.add_argument('--controller', dest='controller', type=str,
default='', help='The controlling group (makes this an isolated group)')
parser.add_argument('--instance_percent', dest='instance_percent', type=int, default=0,
help='The percentage of active instances that will be assigned to this group'),
parser.add_argument('--instance_minimum', dest='instance_minimum', type=int, default=0,
help='The minimum number of instance that will be retained for this group from available instances')
def handle(self, **options):
queuename = options.get('queuename')
@@ -38,7 +42,9 @@ class Command(BaseCommand):
changed = True
else:
print("Creating instance group {}".format(queuename))
ig = InstanceGroup(name=queuename)
ig = InstanceGroup(name=queuename,
policy_instance_percentage=options.get('instance_percent'),
policy_instance_minimum=options.get('instance_minimum'))
if control_ig:
ig.controller = control_ig
ig.save()
@@ -60,5 +66,7 @@ class Command(BaseCommand):
sys.exit(1)
else:
print("Instance already registered {}".format(instance[0].hostname))
ig.policy_instance_list = instance_list
ig.save()
if changed:
print('(changed: True)')

View File

@@ -41,10 +41,9 @@ class Command(BaseCommand):
run.open_fifo_write(ssh_key_path, settings.AWX_ISOLATED_PRIVATE_KEY)
args = run.wrap_args_with_ssh_agent(args, ssh_key_path, ssh_auth_sock)
try:
print ' '.join(args)
print(' '.join(args))
subprocess.check_call(args)
except subprocess.CalledProcessError as e:
sys.exit(e.returncode)
finally:
shutil.rmtree(path)

View File

@@ -2,12 +2,9 @@
# All Rights Reserved.
import sys
from datetime import timedelta
import logging
from django.db import models
from django.utils.timezone import now
from django.db.models import Sum
from django.conf import settings
from awx.main.utils.filters import SmartFilter
@@ -93,11 +90,6 @@ class InstanceManager(models.Manager):
"""Return count of active Tower nodes for licensing."""
return self.all().count()
def total_capacity(self):
sumval = self.filter(modified__gte=now() - timedelta(seconds=settings.AWX_ACTIVE_NODE_TIME)) \
.aggregate(total_capacity=Sum('capacity'))['total_capacity']
return max(50, sumval)
def my_role(self):
# NOTE: TODO: Likely to repurpose this once standalone ramparts are a thing
return "tower"

View File

@@ -0,0 +1,62 @@
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
from decimal import Decimal
import awx.main.fields
class Migration(migrations.Migration):
dependencies = [
('main', '0019_v330_custom_virtualenv'),
]
operations = [
migrations.AddField(
model_name='instancegroup',
name='policy_instance_list',
field=awx.main.fields.JSONField(default=[], help_text='List of exact-match Instances that will always be automatically assigned to this group',
blank=True),
),
migrations.AddField(
model_name='instancegroup',
name='policy_instance_minimum',
field=models.IntegerField(default=0, help_text='Static minimum number of Instances to automatically assign to this group'),
),
migrations.AddField(
model_name='instancegroup',
name='policy_instance_percentage',
field=models.IntegerField(default=0, help_text='Percentage of Instances to automatically assign to this group'),
),
migrations.AddField(
model_name='instance',
name='capacity_adjustment',
field=models.DecimalField(decimal_places=2, default=Decimal('1.0'), max_digits=3),
),
migrations.AddField(
model_name='instance',
name='cpu',
field=models.IntegerField(default=0, editable=False)
),
migrations.AddField(
model_name='instance',
name='memory',
field=models.BigIntegerField(default=0, editable=False)
),
migrations.AddField(
model_name='instance',
name='cpu_capacity',
field=models.IntegerField(default=0, editable=False)
),
migrations.AddField(
model_name='instance',
name='mem_capacity',
field=models.IntegerField(default=0, editable=False)
),
migrations.AddField(
model_name='instance',
name='enabled',
field=models.BooleanField(default=True)
)
]

View File

@@ -0,0 +1,88 @@
# -*- coding: utf-8 -*-
# Generated by Django 1.11.7 on 2018-02-01 16:32
from __future__ import unicode_literals
import awx.main.fields
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('main', '0020_v330_instancegroup_policies'),
]
operations = [
migrations.AddField(
model_name='organization',
name='execute_role',
field=awx.main.fields.ImplicitRoleField(null=b'True', on_delete=django.db.models.deletion.CASCADE, parent_role=b'admin_role', related_name='+', to='main.Role'),
),
migrations.AddField(
model_name='organization',
name='credential_admin_role',
field=awx.main.fields.ImplicitRoleField(null=b'True', on_delete=django.db.models.deletion.CASCADE, parent_role=b'admin_role', related_name='+', to='main.Role'),
),
migrations.AddField(
model_name='organization',
name='inventory_admin_role',
field=awx.main.fields.ImplicitRoleField(null=b'True', on_delete=django.db.models.deletion.CASCADE, parent_role=b'admin_role', related_name='+', to='main.Role'),
),
migrations.AddField(
model_name='organization',
name='project_admin_role',
field=awx.main.fields.ImplicitRoleField(null=b'True', on_delete=django.db.models.deletion.CASCADE, parent_role=b'admin_role', related_name='+', to='main.Role'),
),
migrations.AddField(
model_name='organization',
name='workflow_admin_role',
field=awx.main.fields.ImplicitRoleField(null=b'True', on_delete=django.db.models.deletion.CASCADE, parent_role=b'admin_role', related_name='+', to='main.Role'),
),
migrations.AddField(
model_name='organization',
name='notification_admin_role',
field=awx.main.fields.ImplicitRoleField(null=b'True', on_delete=django.db.models.deletion.CASCADE, parent_role=b'admin_role', related_name='+', to='main.Role'),
),
migrations.AlterField(
model_name='credential',
name='admin_role',
field=awx.main.fields.ImplicitRoleField(null=b'True', on_delete=django.db.models.deletion.CASCADE, parent_role=[b'singleton:system_administrator', b'organization.credential_admin_role'], related_name='+', to='main.Role'),
),
migrations.AlterField(
model_name='inventory',
name='admin_role',
field=awx.main.fields.ImplicitRoleField(null=b'True', on_delete=django.db.models.deletion.CASCADE, parent_role=b'organization.inventory_admin_role', related_name='+', to='main.Role'),
),
migrations.AlterField(
model_name='project',
name='admin_role',
field=awx.main.fields.ImplicitRoleField(null=b'True', on_delete=django.db.models.deletion.CASCADE, parent_role=[b'organization.project_admin_role', b'singleton:system_administrator'], related_name='+', to='main.Role'),
),
migrations.AlterField(
model_name='workflowjobtemplate',
name='admin_role',
field=awx.main.fields.ImplicitRoleField(null=b'True', on_delete=django.db.models.deletion.CASCADE, parent_role=[b'singleton:system_administrator', b'organization.workflow_admin_role'], related_name='+', to='main.Role'),
),
migrations.AlterField(
model_name='workflowjobtemplate',
name='execute_role',
field=awx.main.fields.ImplicitRoleField(null=b'True', on_delete=django.db.models.deletion.CASCADE, parent_role=[b'admin_role', b'organization.execute_role'], related_name='+', to='main.Role'),
),
migrations.AlterField(
model_name='jobtemplate',
name='admin_role',
field=awx.main.fields.ImplicitRoleField(null=b'True', on_delete=django.db.models.deletion.CASCADE, parent_role=[b'project.organization.project_admin_role', b'inventory.organization.inventory_admin_role'], related_name='+', to='main.Role'),
),
migrations.AlterField(
model_name='jobtemplate',
name='execute_role',
field=awx.main.fields.ImplicitRoleField(null=b'True', on_delete=django.db.models.deletion.CASCADE, parent_role=[b'admin_role', b'project.organization.execute_role', b'inventory.organization.execute_role'], related_name='+', to='main.Role'),
),
migrations.AlterField(
model_name='organization',
name='member_role',
field=awx.main.fields.ImplicitRoleField(null=b'True', on_delete=django.db.models.deletion.CASCADE, parent_role=[b'admin_role', b'project_admin_role', b'inventory_admin_role', b'workflow_admin_role', b'notification_admin_role', b'execute_role'], related_name='+', to='main.Role'),
),
]

View File

@@ -0,0 +1,19 @@
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations
from awx.main.migrations import ActivityStreamDisabledMigration
from awx.main.migrations import _rbac as rbac
from awx.main.migrations import _migration_utils as migration_utils
class Migration(ActivityStreamDisabledMigration):
dependencies = [
('main', '0021_v330_declare_new_rbac_roles'),
]
operations = [
migrations.RunPython(migration_utils.set_current_apps_for_migrations),
migrations.RunPython(rbac.create_roles),
]

View File

@@ -3,6 +3,8 @@ import logging
from django.utils.timezone import now
from django.utils.text import slugify
import six
from awx.main.models.base import PERM_INVENTORY_SCAN, PERM_INVENTORY_DEPLOY
from awx.main import utils
@@ -13,7 +15,7 @@ logger = logging.getLogger('awx.main.migrations')
def _create_fact_scan_project(ContentType, Project, org):
ct = ContentType.objects.get_for_model(Project)
name = u"Tower Fact Scan - {}".format(org.name if org else "No Organization")
proj = Project(name=name,
proj = Project(name=name,
scm_url='https://github.com/ansible/awx-facts-playbooks',
scm_type='git',
scm_update_on_launch=True,
@@ -24,7 +26,7 @@ def _create_fact_scan_project(ContentType, Project, org):
polymorphic_ctype=ct)
proj.save()
slug_name = slugify(unicode(name)).replace(u'-', u'_')
slug_name = slugify(six.text_type(name)).replace(u'-', u'_')
proj.local_path = u'_%d__%s' % (int(proj.pk), slug_name)
proj.save()
@@ -51,10 +53,10 @@ def _migrate_scan_job_templates(apps):
Project = apps.get_model('main', 'Project')
project_no_org = None
# A scan job template with a custom project will retain the custom project.
JobTemplate.objects.filter(job_type=PERM_INVENTORY_SCAN, project__isnull=False).update(use_fact_cache=True, job_type=PERM_INVENTORY_DEPLOY)
# Scan jobs templates using Tower's default scan playbook will now point at
# the same playbook but in a github repo.
jts = _get_tower_scan_job_templates(JobTemplate)

View File

@@ -184,7 +184,7 @@ class AdHocCommand(UnifiedJob, JobNotificationMixin):
# NOTE: We sorta have to assume the host count matches and that forks default to 5
from awx.main.models.inventory import Host
count_hosts = Host.objects.filter( enabled=True, inventory__ad_hoc_commands__pk=self.pk).count()
return min(count_hosts, 5 if self.forks == 0 else self.forks) * 10
return min(count_hosts, 5 if self.forks == 0 else self.forks) + 1
def copy(self):
data = {}

View File

@@ -262,7 +262,7 @@ class Credential(PasswordFieldsModel, CommonModelNameNotUnique, ResourceMixin):
admin_role = ImplicitRoleField(
parent_role=[
'singleton:' + ROLE_SINGLETON_SYSTEM_ADMINISTRATOR,
'organization.admin_role',
'organization.credential_admin_role',
],
)
use_role = ImplicitRoleField(
@@ -594,7 +594,7 @@ class CredentialType(CommonModelNameNotUnique):
return
class TowerNamespace:
filename = None
pass
tower_namespace = TowerNamespace()
@@ -622,17 +622,25 @@ class CredentialType(CommonModelNameNotUnique):
if len(value):
namespace[field_name] = value
file_tmpl = self.injectors.get('file', {}).get('template')
if file_tmpl is not None:
# If a file template is provided, render the file and update the
# special `tower` template namespace so the filename can be
# referenced in other injectors
file_tmpls = self.injectors.get('file', {})
# If any file templates are provided, render the files and update the
# special `tower` template namespace so the filename can be
# referenced in other injectors
for file_label, file_tmpl in file_tmpls.items():
data = Template(file_tmpl).render(**namespace)
_, path = tempfile.mkstemp(dir=private_data_dir)
with open(path, 'w') as f:
f.write(data)
os.chmod(path, stat.S_IRUSR | stat.S_IWUSR)
namespace['tower'].filename = path
# determine if filename indicates single file or many
if file_label.find('.') == -1:
tower_namespace.filename = path
else:
if not hasattr(tower_namespace, 'filename'):
tower_namespace.filename = TowerNamespace()
file_label = file_label.split('.')[1]
setattr(tower_namespace.filename, file_label, path)
for env_var, tmpl in self.injectors.get('env', {}).items():
if env_var.startswith('ANSIBLE_') or env_var in self.ENV_BLACKLIST:
@@ -646,11 +654,21 @@ class CredentialType(CommonModelNameNotUnique):
extra_vars[var_name] = Template(tmpl).render(**namespace)
safe_extra_vars[var_name] = Template(tmpl).render(**safe_namespace)
def build_extra_vars_file(vars, private_dir):
handle, path = tempfile.mkstemp(dir = private_dir)
f = os.fdopen(handle, 'w')
f.write(json.dumps(vars))
f.close()
os.chmod(path, stat.S_IRUSR)
return path
if extra_vars:
args.extend(['-e', json.dumps(extra_vars)])
path = build_extra_vars_file(extra_vars, private_data_dir)
args.extend(['-e', '@%s' % path])
if safe_extra_vars:
safe_args.extend(['-e', json.dumps(safe_extra_vars)])
path = build_extra_vars_file(safe_extra_vars, private_data_dir)
safe_args.extend(['-e', '@%s' % path])
@CredentialType.default

View File

@@ -1,8 +1,10 @@
# Copyright (c) 2015 Ansible, Inc.
# All Rights Reserved.
from django.db import models
from django.db.models.signals import post_save
from decimal import Decimal
from django.db import models, connection
from django.db.models.signals import post_save, post_delete
from django.dispatch import receiver
from django.utils.translation import ugettext_lazy as _
from django.conf import settings
@@ -10,12 +12,15 @@ from django.utils.timezone import now, timedelta
from solo.models import SingletonModel
from awx import __version__ as awx_application_version
from awx.api.versioning import reverse
from awx.main.managers import InstanceManager, InstanceGroupManager
from awx.main.fields import JSONField
from awx.main.models.inventory import InventoryUpdate
from awx.main.models.jobs import Job
from awx.main.models.projects import ProjectUpdate
from awx.main.models.unified_jobs import UnifiedJob
from awx.main.utils import get_cpu_capacity, get_mem_capacity, get_system_task_capacity
__all__ = ('Instance', 'InstanceGroup', 'JobOrigin', 'TowerScheduleState',)
@@ -38,6 +43,30 @@ class Instance(models.Model):
default=100,
editable=False,
)
capacity_adjustment = models.DecimalField(
default=Decimal(1.0),
max_digits=3,
decimal_places=2,
)
enabled = models.BooleanField(
default=True
)
cpu = models.IntegerField(
default=0,
editable=False,
)
memory = models.BigIntegerField(
default=0,
editable=False,
)
cpu_capacity = models.IntegerField(
default=0,
editable=False,
)
mem_capacity = models.IntegerField(
default=0,
editable=False,
)
class Meta:
app_label = 'main'
@@ -63,6 +92,23 @@ class Instance(models.Model):
grace_period = settings.AWX_ISOLATED_PERIODIC_CHECK * 2
return self.modified < ref_time - timedelta(seconds=grace_period)
def is_controller(self):
return Instance.objects.filter(rampart_groups__controller__instances=self).exists()
def refresh_capacity(self):
cpu = get_cpu_capacity()
mem = get_mem_capacity()
self.capacity = get_system_task_capacity(self.capacity_adjustment)
self.cpu = cpu[0]
self.memory = mem[0]
self.cpu_capacity = cpu[1]
self.mem_capacity = mem[1]
self.version = awx_application_version
self.save(update_fields=['capacity', 'version', 'modified', 'cpu',
'memory', 'cpu_capacity', 'mem_capacity'])
class InstanceGroup(models.Model):
"""A model representing a Queue/Group of AWX Instances."""
@@ -85,6 +131,19 @@ class InstanceGroup(models.Model):
default=None,
null=True
)
policy_instance_percentage = models.IntegerField(
default=0,
help_text=_("Percentage of Instances to automatically assign to this group")
)
policy_instance_minimum = models.IntegerField(
default=0,
help_text=_("Static minimum number of Instances to automatically assign to this group")
)
policy_instance_list = JSONField(
default=[],
blank=True,
help_text=_("List of exact-match Instances that will always be automatically assigned to this group")
)
def get_absolute_url(self, request=None):
return reverse('api:instance_group_detail', kwargs={'pk': self.pk}, request=request)
@@ -119,6 +178,32 @@ class JobOrigin(models.Model):
app_label = 'main'
@receiver(post_save, sender=InstanceGroup)
def on_instance_group_saved(sender, instance, created=False, raw=False, **kwargs):
if created:
from awx.main.tasks import apply_cluster_membership_policies
connection.on_commit(lambda: apply_cluster_membership_policies.apply_async())
@receiver(post_save, sender=Instance)
def on_instance_saved(sender, instance, created=False, raw=False, **kwargs):
if created:
from awx.main.tasks import apply_cluster_membership_policies
connection.on_commit(lambda: apply_cluster_membership_policies.apply_async())
@receiver(post_delete, sender=InstanceGroup)
def on_instance_group_deleted(sender, instance, using, **kwargs):
from awx.main.tasks import apply_cluster_membership_policies
connection.on_commit(lambda: apply_cluster_membership_policies.apply_async())
@receiver(post_delete, sender=Instance)
def on_instance_deleted(sender, instance, using, **kwargs):
from awx.main.tasks import apply_cluster_membership_policies
connection.on_commit(lambda: apply_cluster_membership_policies.apply_async())
# Unfortunately, the signal can't just be connected against UnifiedJob; it
# turns out that creating a model's subclass doesn't fire the signal for the
# superclass model.

View File

@@ -50,6 +50,7 @@ class Inventory(CommonModelNameNotUnique, ResourceMixin):
an inventory source contains lists and hosts.
'''
FIELDS_TO_PRESERVE_AT_COPY = ['hosts', 'groups', 'instance_groups']
KIND_CHOICES = [
('', _('Hosts have a direct link to this inventory.')),
('smart', _('Hosts for inventory generated using the host_filter property.')),
@@ -131,7 +132,7 @@ class Inventory(CommonModelNameNotUnique, ResourceMixin):
blank=True,
)
admin_role = ImplicitRoleField(
parent_role='organization.admin_role',
parent_role='organization.inventory_admin_role',
)
update_role = ImplicitRoleField(
parent_role='admin_role',
@@ -505,6 +506,10 @@ class Host(CommonModelNameNotUnique):
A managed node
'''
FIELDS_TO_PRESERVE_AT_COPY = [
'name', 'description', 'groups', 'inventory', 'enabled', 'instance_id', 'variables'
]
class Meta:
app_label = 'main'
unique_together = (("name", "inventory"),) # FIXME: Add ('instance_id', 'inventory') after migration.
@@ -692,6 +697,10 @@ class Group(CommonModelNameNotUnique):
groups.
'''
FIELDS_TO_PRESERVE_AT_COPY = [
'name', 'description', 'inventory', 'children', 'parents', 'hosts', 'variables'
]
class Meta:
app_label = 'main'
unique_together = (("name", "inventory"),)
@@ -1265,7 +1274,7 @@ class InventorySourceOptions(BaseModel):
source_vars_dict = VarsDictProperty('source_vars')
def clean_instance_filters(self):
instance_filters = unicode(self.instance_filters or '')
instance_filters = six.text_type(self.instance_filters or '')
if self.source == 'ec2':
invalid_filters = []
instance_filter_re = re.compile(r'^((tag:.+)|([a-z][a-z\.-]*[a-z]))=.*$')
@@ -1291,7 +1300,7 @@ class InventorySourceOptions(BaseModel):
return ''
def clean_group_by(self):
group_by = unicode(self.group_by or '')
group_by = six.text_type(self.group_by or '')
if self.source == 'ec2':
get_choices = getattr(self, 'get_%s_group_by_choices' % self.source)
valid_choices = [x[0] for x in get_choices()]
@@ -1602,7 +1611,7 @@ class InventoryUpdate(UnifiedJob, InventorySourceOptions, JobNotificationMixin,
@property
def task_impact(self):
return 50
return 1
# InventoryUpdate credential required
# Custom and SCM InventoryUpdate credential not required

View File

@@ -2,21 +2,22 @@
# All Rights Reserved.
# Python
import codecs
import datetime
import logging
import os
import time
import json
import base64
from urlparse import urljoin
import six
# Django
from django.conf import settings
from django.db import models
#from django.core.cache import cache
import memcache
from dateutil import parser
from dateutil.tz import tzutc
from django.utils.encoding import smart_str
from django.utils.timezone import now
from django.utils.translation import ugettext_lazy as _
from django.core.exceptions import ValidationError, FieldDoesNotExist
@@ -220,6 +221,10 @@ class JobTemplate(UnifiedJobTemplate, JobOptions, SurveyJobTemplateMixin, Resour
A job template is a reusable job definition for applying a project (with
playbook) to an inventory source with a given credential.
'''
FIELDS_TO_PRESERVE_AT_COPY = [
'labels', 'instance_groups', 'credentials', 'survey_spec'
]
FIELDS_TO_DISCARD_AT_COPY = ['vault_credential', 'credential']
SOFT_UNIQUE_TOGETHER = [('polymorphic_ctype', 'name')]
class Meta:
@@ -266,10 +271,10 @@ class JobTemplate(UnifiedJobTemplate, JobOptions, SurveyJobTemplateMixin, Resour
allows_field='credentials'
)
admin_role = ImplicitRoleField(
parent_role=['project.organization.admin_role', 'inventory.organization.admin_role']
parent_role=['project.organization.project_admin_role', 'inventory.organization.inventory_admin_role']
)
execute_role = ImplicitRoleField(
parent_role=['admin_role'],
parent_role=['admin_role', 'project.organization.execute_role', 'inventory.organization.execute_role'],
)
read_role = ImplicitRoleField(
parent_role=['project.organization.auditor_role', 'inventory.organization.auditor_role', 'execute_role', 'admin_role'],
@@ -620,10 +625,10 @@ class Job(UnifiedJob, JobOptions, SurveyJobMixin, JobNotificationMixin, TaskMana
# NOTE: We sorta have to assume the host count matches and that forks default to 5
from awx.main.models.inventory import Host
if self.launch_type == 'callback':
count_hosts = 1
count_hosts = 2
else:
count_hosts = Host.objects.filter(inventory__jobs__pk=self.pk).count()
return min(count_hosts, 5 if self.forks == 0 else self.forks) * 10
return min(count_hosts, 5 if self.forks == 0 else self.forks) + 1
@property
def successful_hosts(self):
@@ -734,86 +739,68 @@ class Job(UnifiedJob, JobOptions, SurveyJobMixin, JobNotificationMixin, TaskMana
def get_notification_friendly_name(self):
return "Job"
@property
def memcached_fact_key(self):
return '{}'.format(self.inventory.id)
def memcached_fact_host_key(self, host_name):
return '{}-{}'.format(self.inventory.id, base64.b64encode(host_name.encode('utf-8')))
def memcached_fact_modified_key(self, host_name):
return '{}-{}-modified'.format(self.inventory.id, base64.b64encode(host_name.encode('utf-8')))
def _get_inventory_hosts(self, only=['name', 'ansible_facts', 'modified',]):
def _get_inventory_hosts(self, only=['name', 'ansible_facts', 'ansible_facts_modified', 'modified',]):
if not self.inventory:
return []
return self.inventory.hosts.only(*only)
def _get_memcache_connection(self):
return memcache.Client([settings.CACHES['default']['LOCATION']], debug=0)
def start_job_fact_cache(self):
if not self.inventory:
return
cache = self._get_memcache_connection()
host_names = []
for host in self._get_inventory_hosts():
host_key = self.memcached_fact_host_key(host.name)
modified_key = self.memcached_fact_modified_key(host.name)
if cache.get(modified_key) is None:
if host.ansible_facts_modified:
host_modified = host.ansible_facts_modified.replace(tzinfo=tzutc()).isoformat()
else:
host_modified = datetime.datetime.now(tzutc()).isoformat()
cache.set(host_key, json.dumps(host.ansible_facts))
cache.set(modified_key, host_modified)
host_names.append(host.name)
cache.set(self.memcached_fact_key, host_names)
def finish_job_fact_cache(self):
if not self.inventory:
return
cache = self._get_memcache_connection()
def start_job_fact_cache(self, destination, modification_times, timeout=None):
destination = os.path.join(destination, 'facts')
os.makedirs(destination, mode=0700)
hosts = self._get_inventory_hosts()
if timeout is None:
timeout = settings.ANSIBLE_FACT_CACHE_TIMEOUT
if timeout > 0:
# exclude hosts with fact data older than `settings.ANSIBLE_FACT_CACHE_TIMEOUT seconds`
timeout = now() - datetime.timedelta(seconds=timeout)
hosts = hosts.filter(ansible_facts_modified__gte=timeout)
for host in hosts:
host_key = self.memcached_fact_host_key(host.name)
modified_key = self.memcached_fact_modified_key(host.name)
modified = cache.get(modified_key)
if modified is None:
cache.delete(host_key)
filepath = os.sep.join(map(six.text_type, [destination, host.name]))
if not os.path.realpath(filepath).startswith(destination):
system_tracking_logger.error('facts for host {} could not be cached'.format(smart_str(host.name)))
continue
with codecs.open(filepath, 'w', encoding='utf-8') as f:
os.chmod(f.name, 0600)
json.dump(host.ansible_facts, f)
# make note of the time we wrote the file so we can check if it changed later
modification_times[filepath] = os.path.getmtime(filepath)
# Save facts if cache is newer than DB
modified = parser.parse(modified, tzinfos=[tzutc()])
if not host.ansible_facts_modified or modified > host.ansible_facts_modified:
ansible_facts = cache.get(host_key)
try:
ansible_facts = json.loads(ansible_facts)
except Exception:
ansible_facts = None
if ansible_facts is None:
cache.delete(host_key)
continue
host.ansible_facts = ansible_facts
host.ansible_facts_modified = modified
if 'insights' in ansible_facts and 'system_id' in ansible_facts['insights']:
host.insights_system_id = ansible_facts['insights']['system_id']
host.save()
def finish_job_fact_cache(self, destination, modification_times):
destination = os.path.join(destination, 'facts')
for host in self._get_inventory_hosts():
filepath = os.sep.join(map(six.text_type, [destination, host.name]))
if not os.path.realpath(filepath).startswith(destination):
system_tracking_logger.error('facts for host {} could not be cached'.format(smart_str(host.name)))
continue
if os.path.exists(filepath):
# If the file changed since we wrote it pre-playbook run...
modified = os.path.getmtime(filepath)
if modified > modification_times.get(filepath, 0):
with codecs.open(filepath, 'r', encoding='utf-8') as f:
try:
ansible_facts = json.load(f)
except ValueError:
continue
host.ansible_facts = ansible_facts
host.ansible_facts_modified = now()
if 'insights' in ansible_facts and 'system_id' in ansible_facts['insights']:
host.insights_system_id = ansible_facts['insights']['system_id']
host.save()
system_tracking_logger.info(
'New fact for inventory {} host {}'.format(
smart_str(host.inventory.name), smart_str(host.name)),
extra=dict(inventory_id=host.inventory.id, host_name=host.name,
ansible_facts=host.ansible_facts,
ansible_facts_modified=host.ansible_facts_modified.isoformat(),
job_id=self.id))
else:
# if the file goes missing, ansible removed it (likely via clear_facts)
host.ansible_facts = {}
host.ansible_facts_modified = now()
system_tracking_logger.info(
'New fact for inventory {} host {}'.format(
smart_str(host.inventory.name), smart_str(host.name)),
extra=dict(inventory_id=host.inventory.id, host_name=host.name,
ansible_facts=host.ansible_facts,
ansible_facts_modified=host.ansible_facts_modified.isoformat(),
job_id=self.id))
'Facts cleared for inventory {} host {}'.format(
smart_str(host.inventory.name), smart_str(host.name)))
host.save()
# Add on aliases for the non-related-model fields
@@ -1190,7 +1177,7 @@ class SystemJob(UnifiedJob, SystemJobOptions, JobNotificationMixin):
@property
def task_impact(self):
return 150
return 5
@property
def preferred_instance_groups(self):

View File

@@ -3,6 +3,8 @@ import os
import json
from copy import copy, deepcopy
import six
# Django
from django.conf import settings
from django.db import models
@@ -161,7 +163,7 @@ class SurveyJobTemplateMixin(models.Model):
decrypted_default = default
if (
survey_element['type'] == "password" and
isinstance(decrypted_default, basestring) and
isinstance(decrypted_default, six.string_types) and
decrypted_default.startswith('$encrypted$')
):
decrypted_default = decrypt_value(get_encryption_key('value', pk=None), decrypted_default)
@@ -184,7 +186,7 @@ class SurveyJobTemplateMixin(models.Model):
if (survey_element['type'] == "password"):
password_value = data.get(survey_element['variable'])
if (
isinstance(password_value, basestring) and
isinstance(password_value, six.string_types) and
password_value == '$encrypted$'
):
if survey_element.get('default') is None and survey_element['required']:
@@ -197,7 +199,7 @@ class SurveyJobTemplateMixin(models.Model):
errors.append("'%s' value missing" % survey_element['variable'])
elif survey_element['type'] in ["textarea", "text", "password"]:
if survey_element['variable'] in data:
if type(data[survey_element['variable']]) not in (str, unicode):
if not isinstance(data[survey_element['variable']], six.string_types):
errors.append("Value %s for '%s' expected to be a string." % (data[survey_element['variable']],
survey_element['variable']))
return errors
@@ -241,7 +243,7 @@ class SurveyJobTemplateMixin(models.Model):
errors.append("'%s' value is expected to be a list." % survey_element['variable'])
else:
choice_list = copy(survey_element['choices'])
if isinstance(choice_list, basestring):
if isinstance(choice_list, six.string_types):
choice_list = choice_list.split('\n')
for val in data[survey_element['variable']]:
if val not in choice_list:
@@ -249,7 +251,7 @@ class SurveyJobTemplateMixin(models.Model):
choice_list))
elif survey_element['type'] == 'multiplechoice':
choice_list = copy(survey_element['choices'])
if isinstance(choice_list, basestring):
if isinstance(choice_list, six.string_types):
choice_list = choice_list.split('\n')
if survey_element['variable'] in data:
if data[survey_element['variable']] not in choice_list:
@@ -372,7 +374,7 @@ class SurveyJobMixin(models.Model):
extra_vars = json.loads(self.extra_vars)
for key in self.survey_passwords:
value = extra_vars.get(key)
if value and isinstance(value, basestring) and value.startswith('$encrypted$'):
if value and isinstance(value, six.string_types) and value.startswith('$encrypted$'):
extra_vars[key] = decrypt_value(get_encryption_key('value', pk=None), value)
return json.dumps(extra_vars)
else:

View File

@@ -14,6 +14,8 @@ from django.contrib.auth.models import User
from django.utils.timezone import now as tz_now
from django.utils.translation import ugettext_lazy as _
import six
# AWX
from awx.api.versioning import reverse
from awx.main.fields import AutoOneToOneField, ImplicitRoleField
@@ -43,11 +45,31 @@ class Organization(CommonModel, NotificationFieldsModel, ResourceMixin, CustomVi
admin_role = ImplicitRoleField(
parent_role='singleton:' + ROLE_SINGLETON_SYSTEM_ADMINISTRATOR,
)
execute_role = ImplicitRoleField(
parent_role='admin_role',
)
project_admin_role = ImplicitRoleField(
parent_role='admin_role',
)
inventory_admin_role = ImplicitRoleField(
parent_role='admin_role',
)
credential_admin_role = ImplicitRoleField(
parent_role='admin_role',
)
workflow_admin_role = ImplicitRoleField(
parent_role='admin_role',
)
notification_admin_role = ImplicitRoleField(
parent_role='admin_role',
)
auditor_role = ImplicitRoleField(
parent_role='singleton:' + ROLE_SINGLETON_SYSTEM_AUDITOR,
)
member_role = ImplicitRoleField(
parent_role='admin_role',
parent_role=['admin_role', 'execute_role', 'project_admin_role',
'inventory_admin_role', 'workflow_admin_role',
'notification_admin_role']
)
read_role = ImplicitRoleField(
parent_role=['member_role', 'auditor_role'],
@@ -159,7 +181,7 @@ class AuthToken(BaseModel):
def reason_long(reason):
for x in AuthToken.REASON_CHOICES:
if x[0] == reason:
return unicode(x[1])
return six.text_type(x[1])
return None
@classmethod

View File

@@ -15,6 +15,8 @@ from django.utils.text import slugify
from django.core.exceptions import ValidationError
from django.utils.timezone import now, make_aware, get_default_timezone
import six
# AWX
from awx.api.versioning import reverse
from awx.main.models.base import * # noqa
@@ -124,7 +126,7 @@ class ProjectOptions(models.Model):
def clean_scm_url(self):
if self.scm_type == 'insights':
self.scm_url = settings.INSIGHTS_URL_BASE
scm_url = unicode(self.scm_url or '')
scm_url = six.text_type(self.scm_url or '')
if not self.scm_type:
return ''
try:
@@ -135,7 +137,7 @@ class ProjectOptions(models.Model):
scm_url_parts = urlparse.urlsplit(scm_url)
if self.scm_type and not any(scm_url_parts):
raise ValidationError(_('SCM URL is required.'))
return unicode(self.scm_url or '')
return six.text_type(self.scm_url or '')
def clean_credential(self):
if not self.scm_type:
@@ -229,6 +231,8 @@ class Project(UnifiedJobTemplate, ProjectOptions, ResourceMixin, CustomVirtualEn
'''
SOFT_UNIQUE_TOGETHER = [('polymorphic_ctype', 'name', 'organization')]
FIELDS_TO_PRESERVE_AT_COPY = ['labels', 'instance_groups', 'credentials']
FIELDS_TO_DISCARD_AT_COPY = ['local_path']
class Meta:
app_label = 'main'
@@ -282,7 +286,7 @@ class Project(UnifiedJobTemplate, ProjectOptions, ResourceMixin, CustomVirtualEn
)
admin_role = ImplicitRoleField(parent_role=[
'organization.admin_role',
'organization.project_admin_role',
'singleton:' + ROLE_SINGLETON_SYSTEM_ADMINISTRATOR,
])
@@ -326,7 +330,7 @@ class Project(UnifiedJobTemplate, ProjectOptions, ResourceMixin, CustomVirtualEn
update_fields.append('scm_delete_on_next_update')
# Create auto-generated local path if project uses SCM.
if self.pk and self.scm_type and not self.local_path.startswith('_'):
slug_name = slugify(unicode(self.name)).replace(u'-', u'_')
slug_name = slugify(six.text_type(self.name)).replace(u'-', u'_')
self.local_path = u'_%d__%s' % (int(self.pk), slug_name)
if 'local_path' not in update_fields:
update_fields.append('local_path')
@@ -492,7 +496,7 @@ class ProjectUpdate(UnifiedJob, ProjectOptions, JobNotificationMixin, TaskManage
@property
def task_impact(self):
return 0 if self.job_type == 'run' else 20
return 0 if self.job_type == 'run' else 1
@property
def result_stdout(self):

View File

@@ -33,29 +33,42 @@ ROLE_SINGLETON_SYSTEM_ADMINISTRATOR='system_administrator'
ROLE_SINGLETON_SYSTEM_AUDITOR='system_auditor'
role_names = {
'system_administrator' : _('System Administrator'),
'system_auditor' : _('System Auditor'),
'adhoc_role' : _('Ad Hoc'),
'admin_role' : _('Admin'),
'auditor_role' : _('Auditor'),
'execute_role' : _('Execute'),
'member_role' : _('Member'),
'read_role' : _('Read'),
'update_role' : _('Update'),
'use_role' : _('Use'),
'system_administrator': _('System Administrator'),
'system_auditor': _('System Auditor'),
'adhoc_role': _('Ad Hoc'),
'admin_role': _('Admin'),
'project_admin_role': _('Project Admin'),
'inventory_admin_role': _('Inventory Admin'),
'credential_admin_role': _('Credential Admin'),
'workflow_admin_role': _('Workflow Admin'),
'notification_admin_role': _('Notification Admin'),
'auditor_role': _('Auditor'),
'execute_role': _('Execute'),
'member_role': _('Member'),
'read_role': _('Read'),
'update_role': _('Update'),
'use_role': _('Use'),
}
role_descriptions = {
'system_administrator' : _('Can manage all aspects of the system'),
'system_auditor' : _('Can view all settings on the system'),
'adhoc_role' : _('May run ad hoc commands on an inventory'),
'admin_role' : _('Can manage all aspects of the %s'),
'auditor_role' : _('Can view all settings for the %s'),
'execute_role' : _('May run the %s'),
'member_role' : _('User is a member of the %s'),
'read_role' : _('May view settings for the %s'),
'update_role' : _('May update project or inventory or group using the configured source update system'),
'use_role' : _('Can use the %s in a job template'),
'system_administrator': _('Can manage all aspects of the system'),
'system_auditor': _('Can view all settings on the system'),
'adhoc_role': _('May run ad hoc commands on an inventory'),
'admin_role': _('Can manage all aspects of the %s'),
'project_admin_role': _('Can manage all projects of the %s'),
'inventory_admin_role': _('Can manage all inventories of the %s'),
'credential_admin_role': _('Can manage all credentials of the %s'),
'workflow_admin_role': _('Can manage all workflows of the %s'),
'notification_admin_role': _('Can manage all notifications of the %s'),
'auditor_role': _('Can view all settings for the %s'),
'execute_role': {
'organization': _('May run any executable resources in the organization'),
'default': _('May run the %s'),
},
'member_role': _('User is a member of the %s'),
'read_role': _('May view settings for the %s'),
'update_role': _('May update project or inventory or group using the configured source update system'),
'use_role': _('Can use the %s in a job template'),
}
@@ -170,12 +183,22 @@ class Role(models.Model):
global role_descriptions
description = role_descriptions[self.role_field]
content_type = self.content_type
if '%s' in description and content_type:
model_name = None
if content_type:
model = content_type.model_class()
model_name = re.sub(r'([a-z])([A-Z])', r'\1 \2', model.__name__).lower()
description = description % model_name
return description
value = description
if type(description) == dict:
value = description.get(model_name)
if value is None:
value = description.get('default')
if '%s' in value and content_type:
value = value % model_name
return value
@staticmethod
def rebuild_role_ancestor_list(additions, removals):

View File

@@ -150,14 +150,13 @@ class Schedule(CommonModel, LaunchTimeConfig):
# > UTC time.
raise ValueError('RRULE UNTIL values must be specified in UTC')
try:
first_event = x[0]
if first_event < now() - datetime.timedelta(days=365 * 5):
# For older DTSTART values, if there are more than 1000 recurrences...
if len(x[:1001]) > 1000:
raise ValueError('RRULE values that yield more than 1000 events are not allowed.')
except IndexError:
pass
if 'MINUTELY' in rrule or 'HOURLY' in rrule:
try:
first_event = x[0]
if first_event < now() - datetime.timedelta(days=365 * 5):
raise ValueError('RRULE values with more than 1000 events are not allowed.')
except IndexError:
pass
return x
def __unicode__(self):

View File

@@ -432,7 +432,7 @@ class UnifiedJobTemplate(PolymorphicModel, CommonModelNameNotUnique, Notificatio
copy_m2m_relationships(self, unified_jt, fields)
return unified_jt
def _accept_or_ignore_job_kwargs(self, _exclude_errors=None, **kwargs):
def _accept_or_ignore_job_kwargs(self, _exclude_errors=(), **kwargs):
'''
Override in subclass if template accepts _any_ prompted params
'''

View File

@@ -110,6 +110,13 @@ class WorkflowNodeBase(CreatedModifiedModel, LaunchTimeConfig):
class WorkflowJobTemplateNode(WorkflowNodeBase):
FIELDS_TO_PRESERVE_AT_COPY = [
'unified_job_template', 'workflow_job_template', 'success_nodes', 'failure_nodes',
'always_nodes', 'credentials', 'inventory', 'extra_data', 'survey_passwords',
'char_prompts'
]
REENCRYPTION_BLACKLIST_AT_COPY = ['extra_data', 'survey_passwords']
workflow_job_template = models.ForeignKey(
'WorkflowJobTemplate',
related_name='workflow_job_template_nodes',
@@ -283,6 +290,9 @@ class WorkflowJobOptions(BaseModel):
class WorkflowJobTemplate(UnifiedJobTemplate, WorkflowJobOptions, SurveyJobTemplateMixin, ResourceMixin):
SOFT_UNIQUE_TOGETHER = [('polymorphic_ctype', 'name', 'organization')]
FIELDS_TO_PRESERVE_AT_COPY = [
'labels', 'instance_groups', 'workflow_job_template_nodes', 'credentials', 'survey_spec'
]
class Meta:
app_label = 'main'
@@ -296,10 +306,11 @@ class WorkflowJobTemplate(UnifiedJobTemplate, WorkflowJobOptions, SurveyJobTempl
)
admin_role = ImplicitRoleField(parent_role=[
'singleton:' + ROLE_SINGLETON_SYSTEM_ADMINISTRATOR,
'organization.admin_role'
'organization.workflow_admin_role'
])
execute_role = ImplicitRoleField(parent_role=[
'admin_role'
'admin_role',
'organization.execute_role',
])
read_role = ImplicitRoleField(parent_role=[
'singleton:' + ROLE_SINGLETON_SYSTEM_AUDITOR,
@@ -353,7 +364,7 @@ class WorkflowJobTemplate(UnifiedJobTemplate, WorkflowJobOptions, SurveyJobTempl
workflow_job.copy_nodes_from_original(original=self)
return workflow_job
def _accept_or_ignore_job_kwargs(self, **kwargs):
def _accept_or_ignore_job_kwargs(self, _exclude_errors=(), **kwargs):
prompted_fields = {}
rejected_fields = {}
accepted_vars, rejected_vars, errors_dict = self.accept_or_ignore_variables(kwargs.get('extra_vars', {}))
@@ -394,11 +405,6 @@ class WorkflowJobTemplate(UnifiedJobTemplate, WorkflowJobOptions, SurveyJobTempl
node_list.append(node.pk)
return node_list
def user_copy(self, user):
new_wfjt = self.copy_unified_jt()
new_wfjt.copy_nodes_from_original(original=self, user=user)
return new_wfjt
class WorkflowJob(UnifiedJob, WorkflowJobOptions, SurveyJobMixin, JobNotificationMixin):
class Meta:

View File

@@ -1,6 +1,7 @@
# Copyright (c) 2016 Ansible, Inc.
# All Rights Reserved.
import time
import logging
from slackclient import SlackClient
@@ -9,6 +10,7 @@ from django.utils.translation import ugettext_lazy as _
from awx.main.notifications.base import AWXBaseEmailBackend
logger = logging.getLogger('awx.main.notifications.slack_backend')
WEBSOCKET_TIMEOUT = 30
class SlackBackend(AWXBaseEmailBackend):
@@ -30,7 +32,18 @@ class SlackBackend(AWXBaseEmailBackend):
if not self.connection.rtm_connect():
if not self.fail_silently:
raise Exception("Slack Notification Token is invalid")
return True
start = time.time()
time.clock()
elapsed = 0
while elapsed < WEBSOCKET_TIMEOUT:
events = self.connection.rtm_read()
if any(event['type'] == 'hello' for event in events):
return True
elapsed = time.time() - start
time.sleep(0.5)
raise RuntimeError("Slack Notification unable to establish websocket connection after {} seconds".format(WEBSOCKET_TIMEOUT))
def close(self):
if self.connection is None:

View File

@@ -5,6 +5,8 @@
import logging
import os
from six.moves import xrange
# Django
from django.conf import settings
@@ -46,6 +48,6 @@ class CallbackQueueDispatcher(object):
delivery_mode="persistent" if settings.PERSISTENT_CALLBACK_MESSAGES else "transient",
routing_key=self.connection_queue)
return
except Exception, e:
except Exception as e:
self.logger.info('Publish Job Event Exception: %r, retry=%d', e,
retry_count, exc_info=True)

View File

@@ -485,13 +485,13 @@ class TaskManager():
task.job_explanation += ' '.join((
'Task was marked as running in Tower but its ',
'controller management daemon was not present in',
'Celery, so it has been marked as failed.',
'the job queue, so it has been marked as failed.',
'Task may still be running, but contactability is unknown.'
))
else:
task.job_explanation += ' '.join((
'Task was marked as running in Tower but was not present in',
'Celery, so it has been marked as failed.',
'the job queue, so it has been marked as failed.',
))
try:
task.save(update_fields=['status', 'start_args', 'job_explanation'])

View File

@@ -21,12 +21,12 @@ class LogErrorsTask(Task):
super(LogErrorsTask, self).on_failure(exc, task_id, args, kwargs, einfo)
@shared_task
@shared_task(base=LogErrorsTask)
def run_job_launch(job_id):
TaskManager().schedule()
@shared_task
@shared_task(base=LogErrorsTask)
def run_job_complete(job_id):
TaskManager().schedule()

View File

@@ -16,6 +16,8 @@ from django.dispatch import receiver
from crum import get_current_request, get_current_user
from crum.signals import current_user_getter
import six
# AWX
from awx.main.models import * # noqa
from awx.api.serializers import * # noqa
@@ -88,7 +90,7 @@ def emit_update_inventory_computed_fields(sender, **kwargs):
elif sender == Group.inventory_sources.through:
sender_name = 'group.inventory_sources'
else:
sender_name = unicode(sender._meta.verbose_name)
sender_name = six.text_type(sender._meta.verbose_name)
if kwargs['signal'] == post_save:
if sender == Job:
return
@@ -118,7 +120,7 @@ def emit_update_inventory_on_created_or_deleted(sender, **kwargs):
pass
else:
return
sender_name = unicode(sender._meta.verbose_name)
sender_name = six.text_type(sender._meta.verbose_name)
logger.debug("%s created or deleted, updating inventory computed fields: %r %r",
sender_name, sender, kwargs)
try:
@@ -577,5 +579,5 @@ def delete_inventory_for_org(sender, instance, **kwargs):
for inventory in inventories:
try:
inventory.schedule_deletion(user_id=getattr(user, 'id', None))
except RuntimeError, e:
except RuntimeError as e:
logger.debug(e)

Some files were not shown because too many files have changed in this diff Show More