Compare commits

...

720 Commits
1.0.2 ... 1.0.4

Author SHA1 Message Date
Matthew Jones
8505783350 Merge remote-tracking branch 'tower/release_3.2.3' into devel
* tower/release_3.2.3:
  fix unicode bugs with log statements
  use --export option for ansible-inventory
  add support for new "BECOME" prompt in Ansible 2.5+ for adhoc commands
  enforce strings for secret password inputs on Credentials
  fix a bug for "users should be able to change type of unused credential"
  fix xss vulnerabilities - on host recent jobs popover - on schedule name tooltip
  fix a bug when testing UDP-based logging configuration
  bump templates form credential_types page limit
  Wait for Slack RTM API websocket connection to be established
  don't process artifacts from custom `set_stat` calls asynchronously
  don't overwrite env['ANSIBLE_LIBRARY'] when fact caching is enabled
  only allow facts to cache in the proper file system location
  replace our memcached-based fact cache implementation with local files
  add support for new "BECOME" prompt in Ansible 2.5+
  fix a bug in inventory generation for isolated nodes
  properly handle unicode for isolated job buffers
2018-02-20 12:22:25 -05:00
Ryan Petrello
76ff925b77 Merge pull request #1298 from sjenning/add-import-playbook
add import_playbook as a top-level playbook indicator
2018-02-20 09:54:23 -05:00
Seth Jennings
42ff1cfd67 add import_playbook as top-level playbook indicator 2018-02-19 16:03:08 -06:00
Ryan Petrello
90bb43ce74 Merge pull request #1292 from ryanpetrello/fix-1291
don't require credentials to relaunch a job
2018-02-19 12:01:42 -05:00
Ryan Petrello
56e3d98e62 don't require credentials to relaunch a job
see: https://github.com/ansible/awx/issues/1291
2018-02-19 11:15:55 -05:00
Matthew Jones
7d51b3b6b6 Merge pull request #1116 from bmduffy/bugfix-pem-validation
[bugfix-pem-validation]
2018-02-19 07:53:19 -05:00
Brian Duffy
4270e3a17b [bugfix] updated pem validation unit tests 2018-02-18 15:11:42 +00:00
Brian Duffy
098f4eb198 [bugfix-pem-validation] pass flake8 2018-02-18 01:46:31 +00:00
Jake McDermott
ae1167ab15 Merge pull request #1282 from ansible/jakemcdermott-patch-1
fix last run job variable reference
2018-02-16 16:35:35 -05:00
Jake McDermott
5b5411fecd fix last run job variable reference 2018-02-16 16:32:13 -05:00
Brian Duffy
235213bd3b updated regex 2018-02-16 16:06:33 +00:00
Wayne Witzel III
2c71a27630 Merge pull request #1123 from wwitzel3/new-permissions
New RBAC Roles
2018-02-15 16:56:03 -05:00
Alan Rominger
1a6819cdea Merge pull request #630 from AlanCoding/text_type
Fix unicode bugs with log statements
2018-02-15 15:52:29 -05:00
AlanCoding
465e605464 fix unicode bugs with log statements 2018-02-15 15:26:58 -05:00
Alan Rominger
22f1a53266 Merge pull request #1233 from AlanCoding/no_turning_back
Raise 400 error on removal of credential on launch
2018-02-15 14:11:57 -05:00
Ryan Petrello
733b4b874e Merge pull request #1255 from ryanpetrello/license-compliance
changes to license compliance
2018-02-15 09:30:41 -05:00
AlanCoding
3d433350d3 raise 400 error on removal of credential on launch
Definition of removal is providing a `credentials` list on launch
that lacks a type of credential that the job template has.
This assures that every category of credential the job template
has will also exist on jobs ran from that job template.
This restriction already existed, but this makes the endpoint
fail instead of re-adding the credentials.
This change makes manual launch congruent with saved launch
configurations.
2018-02-15 08:16:03 -05:00
Wayne Witzel III
30a5617825 Address PR feedback 2018-02-14 22:53:33 +00:00
Alan Rominger
5935c410e4 Merge pull request #629 from AlanCoding/export
Use --export option for ansible-inventory
2018-02-14 15:56:05 -05:00
Alan Rominger
c90cf7c5e2 Merge pull request #1253 from AlanCoding/group_vars
Use --export option for ansible-inventory
2018-02-14 15:52:00 -05:00
Ryan Petrello
218dfb680e changes to license compliance
now if a license is expired or over the managed node limit, it won't
prevent host creation or Job/JobTemplate launches

see: https://github.com/ansible/ansible-tower/issues/7860
2018-02-14 15:51:19 -05:00
AlanCoding
b01deb393e use --export option for ansible-inventory 2018-02-14 14:48:13 -05:00
Chris Church
410111b8c8 Merge pull request #1241 from cclauss/six.string_types_in_mixins.py
six.string_types in mixins.py
2018-02-14 13:38:44 -05:00
AlanCoding
05e6eda453 use --export option for ansible-inventory 2018-02-14 12:34:41 -05:00
Bill Nottingham
4a4b44955b Merge pull request #1247 from pkoro/anchor-link-fix
Fix in anchor link
2018-02-14 10:37:43 -05:00
Ryan Petrello
9d82098162 Merge pull request #1249 from ryanpetrello/no-travis
we don't use travis for tests; remove .travis.yml
2018-02-14 10:15:48 -05:00
Ryan Petrello
1c62c142f1 we don't use travis for tests; remove .travis.yml 2018-02-14 10:07:17 -05:00
Paschalis Korosoglou
5215bbcbf6 Fix in anchor link 2018-02-14 16:05:58 +02:00
Matthew Jones
0d2daecf49 Merge pull request #1243 from matburt/fix_clustering_isolated
Fix isolated instance clustering implementation
2018-02-14 08:32:24 -05:00
cclauss
552b69592c six.string_types in mixins.py 2018-02-14 08:35:14 +01:00
Matthew Jones
ffe5a92eb9 Update isolated instance capacity calculaltion 2018-02-13 21:51:50 -05:00
Matthew Jones
925d9efecf Fixing up isolated node execution after cluster changes
* Rework queue detection to include control groups and isolated instances
* Fix up development tooling around isolated nodes
* Update unit tests
2018-02-13 21:51:38 -05:00
Jake McDermott
c1b6595a0b Merge pull request #1201 from jakemcdermott/item_copy_ui
api-backed copy ui
2018-02-13 17:42:00 -05:00
Jake McDermott
d4e46a35ce get exact match on ids 2018-02-13 17:15:59 -05:00
Jake McDermott
bf0683f7fe replace usage of all and spread 2018-02-13 17:15:56 -05:00
Jake McDermott
0ff94c63f2 use edit capability for showing copy on most views 2018-02-13 17:15:52 -05:00
Jake McDermott
16153daa14 add e2e test for inventory script copy 2018-02-13 17:15:48 -05:00
Jake McDermott
a680d188c0 implement model based copy for inventory scripts 2018-02-13 17:15:44 -05:00
Jake McDermott
d56f1a0120 add e2e test for credential copy 2018-02-13 17:15:41 -05:00
Jake McDermott
50d95ddc3f implement model-based credential copy 2018-02-13 17:15:37 -05:00
Jake McDermott
21a32f90ce add e2e test for notification template copy 2018-02-13 17:15:34 -05:00
Jake McDermott
09d3e6cd98 implement model-based copy for notification templates 2018-02-13 17:15:30 -05:00
Jake McDermott
29f1d695ae add NotificationTemplate model 2018-02-13 17:15:26 -05:00
Jake McDermott
e0f3e4feb7 add e2e test for inventory copy 2018-02-13 17:15:20 -05:00
Jake McDermott
e9ce9621f2 implement model-based copy for inventories 2018-02-13 17:15:16 -05:00
Jake McDermott
a02eda1bea add e2e test for project copy 2018-02-13 17:15:12 -05:00
Jake McDermott
779385ddb6 implement model based copy for projects 2018-02-13 17:15:05 -05:00
Jake McDermott
e5fd483d06 implement model-based copy for job templates and workflow templates 2018-02-13 17:15:01 -05:00
Jake McDermott
8679651d4c add e2e test for template copy and delete warnings 2018-02-13 17:14:57 -05:00
Jake McDermott
4c988fbc02 add WorkflowJobTemplate model 2018-02-13 17:14:48 -05:00
Jake McDermott
c40feb52b7 add base model unit test 2018-02-13 17:14:43 -05:00
Jake McDermott
78b975b2a9 add copy to base model 2018-02-13 17:14:38 -05:00
Jake McDermott
cfba11f8d7 slight cleanup of templates list controller
lint / fix all of the indentation issues
smaller functions
use a variable for any string a user sees
2018-02-13 17:14:30 -05:00
Jake McDermott
73fa8521d0 add e2e test for job and workflow template copy 2018-02-13 17:14:18 -05:00
Jake McDermott
894f0cf2c5 update current workflow copy implementation to be compatible with recent api changes 2018-02-13 17:13:54 -05:00
Chris Church
67ec811e8d Merge pull request #1186 from cclauss/execfile-file-reduce-StandardError
Miscellaneous Python 3 changes: execfile(), file(), reduce(), StandardError
2018-02-13 15:11:24 -05:00
Chris Church
31d0e55c2a Merge pull request #1175 from cclauss/unicode-to-six-u
Change unicode() --> six.text_type() for Python 3
2018-02-13 15:11:11 -05:00
Ryan Petrello
3a0f2ce2fe Merge pull request #628 from ryanpetrello/sudo-become-adhoc
add support for new "BECOME" prompt in Ansible 2.5+ for adhoc commands
2018-02-13 14:38:30 -05:00
Ryan Petrello
613d48cdbc add support for new "BECOME" prompt in Ansible 2.5+ for adhoc commands
see: https://github.com/ansible/ansible-tower/issues/7850
2018-02-13 14:26:27 -05:00
Alan Rominger
39362aab4b Merge pull request #1204 from AlanCoding/default_omission
Omit placeholder vars with survey password defaults
2018-02-13 12:58:11 -05:00
Alan Rominger
6cb3267ebe Merge pull request #1214 from AlanCoding/fix_schedule_qs
Change schedule queryset logic to avoid server error
2018-02-13 12:54:05 -05:00
Bill Nottingham
f8c66b826a Merge pull request #1217 from wenottingham/eat-your-celery-messages
Tweak celery-related messages.
2018-02-13 11:48:21 -05:00
Bill Nottingham
7b288ef98a Tweak celery-related messages. 2018-02-13 10:52:14 -05:00
AlanCoding
58a94be428 Omit placeholder vars with survey password defaults
WFJT nodes & schedules (launch configs) will accept POST/PATCH/PUT
with variables in extra_data that have $encrypted$ for their value
if a valid survey default exists.

In this case, the variable is simply removed from the extra_data.
This is done so that it does not affect pre-existing value
substitution for $encrypted$ values from the config itself
2018-02-13 09:07:59 -05:00
AlanCoding
960845883d change schedule qs logic, avoid server error 2018-02-13 08:32:00 -05:00
Ryan Petrello
eda53eb548 Merge pull request #627 from ryanpetrello/fix-7898
enforce strings for secret password inputs on Credentials
2018-02-12 17:11:02 -05:00
Ryan Petrello
82e41b40bb enforce strings for secret password inputs on Credentials
see: https://github.com/ansible/ansible-tower/issues/7898
2018-02-12 17:03:32 -05:00
Alan Rominger
0268d575f8 Merge pull request #1193 from AlanCoding/no_sneaking_credential_in
Validation clause for WFJT node to follow credential prompt rule
2018-02-12 12:46:12 -05:00
Brian Duffy
6b5a6e9226 [bugfix-pem-validation] left a print statement in 2018-02-12 16:44:32 +00:00
Ryan Petrello
56d01cda6b Merge pull request #1205 from ryanpetrello/fix-pexpect-test
improve a bwrap test
2018-02-12 10:50:00 -05:00
Ryan Petrello
194c2dcf0b improve a bwrap test 2018-02-12 10:14:37 -05:00
Ryan Petrello
b38be89d1a Merge pull request #1203 from ryanpetrello/update-pexpect
upgrade to the latest pexpect
2018-02-12 09:49:26 -05:00
Ryan Petrello
2a168faf6a upgrade to the latest pexpect
see: https://github.com/ansible/awx/issues/417
2018-02-12 09:18:14 -05:00
Ryan Petrello
83b5377387 Merge pull request #1187 from ryanpetrello/file-your-vars-away-for-a-rainy-day
pass extra vars via file rather than via commandline
2018-02-12 08:48:19 -05:00
cclauss
2e623ad80c Change unicode() --> six.text_type() for Python 3 2018-02-11 21:09:12 +01:00
Ryan Petrello
7e42c54868 Merge pull request #1184 from cclauss/basestring-to-six.string_types
basestring to six.string_types for Python 3
2018-02-10 09:49:16 -05:00
Bill Nottingham
aa5bd9f5bf Pass extra vars via file rather than via commandline, including custom creds.
The extra vars file created lives in the playbook private runtime
directory, and will be reaped along with the rest of the directory.

Adjust assorted unit tests as necessary.
2018-02-10 09:27:24 -05:00
Wayne Witzel III
13e777f01b Rename migration files 2018-02-10 02:52:26 +00:00
Wayne Witzel III
819b318fe5 Add Org Execute 2018-02-10 02:52:26 +00:00
Wayne Witzel III
9e7bd55579 Add Notification Admin 2018-02-10 02:52:26 +00:00
Wayne Witzel III
fbece6bdde Updating and adding tests for new RBAC roles 2018-02-10 02:52:26 +00:00
Wayne Witzel III
9fdd00785f Add new RBAC role migrations 2018-02-10 02:52:26 +00:00
Wayne Witzel III
b478740f28 Add Workflow Admin 2018-02-10 02:52:25 +00:00
Wayne Witzel III
109841c350 Add Credential Admin role 2018-02-10 02:52:25 +00:00
Wayne Witzel III
6c951aa883 Add Inventory Admin role 2018-02-10 02:52:25 +00:00
Wayne Witzel III
e7e83afd00 Add Project Admin role 2018-02-10 02:52:25 +00:00
Brian Duffy
7d956a3b68 [bugfix-pem-validation] update from code review 2018-02-10 01:08:29 +00:00
AlanCoding
02ac139d5c validation clause for WFJT node to follow cred prompt rule 2018-02-09 16:17:21 -05:00
Jake McDermott
605a2c7e01 Merge pull request #1189 from jakemcdermott/fix-multivault-select
fix recent multi-vault select breakage
2018-02-09 13:47:17 -05:00
Jake McDermott
484caf29b6 fix recent multi-vault select breakage 2018-02-09 12:52:16 -05:00
Jake McDermott
b2b519e48d Merge pull request #1096 from mabashian/169-v1
UI support for prompting on job template schedules
2018-02-09 11:34:25 -05:00
Jake McDermott
e8e6f50573 Merge branch 'devel' into 169-v1 2018-02-09 11:32:40 -05:00
cclauss
260aec543e Misc Python 3 changes: execfile(), file(), reduce(), StandardError 2018-02-09 17:17:05 +01:00
Marliana Lara
7c95cd008f Merge pull request #1152 from marshmalien/feat/ui_clustering_bugs
Fix UI bugs related to UI Clustering
2018-02-09 11:13:59 -05:00
Ryan Petrello
0ff11ac026 Merge pull request #1185 from ryanpetrello/stop-it-uwsgi
fix celery pid restart issues
2018-02-09 11:07:01 -05:00
Ryan Petrello
605c5e3276 fix celery pid restart issues 2018-02-09 11:03:00 -05:00
cclauss
c371b869dc basestring to six.string_types for Python 3 2018-02-09 16:28:36 +01:00
Shane McDonald
476dbe58c5 Merge pull request #1183 from ryanpetrello/swagger
normalize dates in the Swagger output to minimize diffs
2018-02-09 10:18:19 -05:00
Ryan Petrello
3c43aaef21 normalize dates in the Swagger output to minimize diffs 2018-02-09 10:16:27 -05:00
Ryan Petrello
76d5c02e07 Merge pull request #1181 from ryanpetrello/swagger
move swagger doc metadata out of the awx repo
2018-02-09 10:09:03 -05:00
Ryan Petrello
fe02abe630 move swagger doc metadata out of the awx repo 2018-02-09 09:45:23 -05:00
Ryan Petrello
ce9cb24995 Merge pull request #1171 from cclauss/from-six-import-xrange
from six.moves import xrange for Python 3
2018-02-09 09:02:38 -05:00
Jake McDermott
6cb6c61e5c Merge pull request #1176 from jakemcdermott/stabilize-xss
use project details view to check permissions list
2018-02-08 17:32:39 -05:00
Jake McDermott
67e5d083b8 use project details view to check permissions list 2018-02-08 17:26:54 -05:00
Ryan Petrello
5932c54126 Merge pull request #1165 from ryanpetrello/remove_new_in
remove the `new_in_<version>` in API doc gen
2018-02-08 17:07:50 -05:00
cclauss
e1a8b69736 from six.moves import xrange for Python 3 2018-02-08 22:41:33 +01:00
Ryan Petrello
7472026cca remove the new_in_<version> in API doc gen
see: https://github.com/ansible/awx/issues/73
2018-02-08 16:21:22 -05:00
Jake McDermott
8475bdfdc4 Merge pull request #1170 from shanemcd/fix_standalone_docker_wait_fors
Fix wait_fors in standalone Docker installs
2018-02-08 16:08:31 -05:00
Ryan Petrello
bd2f1568fb Merge pull request #626 from ryanpetrello/release_3.2.3
fix a bug for "users should be able to change type of unused credential"
2018-02-08 15:59:22 -05:00
Alan Rominger
b3dcfc8c18 Merge pull request #903 from ansible/item_copy
Implement item copy feature
2018-02-08 15:51:16 -05:00
Ryan Petrello
72715df751 fix a bug for "users should be able to change type of unused credential"
see: https://github.com/ansible/ansible-tower/issues/7516
related: https://github.com/ansible/tower/pull/441
2018-02-08 15:44:14 -05:00
Shane McDonald
6b3ca32827 Fix wait_fors in standalone Docker installs 2018-02-08 15:08:44 -05:00
Ryan Petrello
1ccdb305e3 Merge pull request #1164 from cclauss/use-new-style-exceptions
Modernize Python 2 code to get ready for Python 3
2018-02-08 14:10:25 -05:00
Ryan Petrello
033bec693b Merge pull request #1166 from ryanpetrello/fix-system-job-stdout
properly handle STDOUT_MAX_BYTES_DISPLAY for system jobs
2018-02-08 13:55:59 -05:00
Ryan Petrello
f2c5859fde properly handle STDOUT_MAX_BYTES_DISPLAY for system jobs
see: https://github.com/ansible/ansible-tower/issues/7890
2018-02-08 11:37:05 -05:00
cclauss
e18838a4b7 Modernize Python 2 code to get ready for Python 3 2018-02-08 17:26:22 +01:00
Shane McDonald
48300da443 Merge pull request #1163 from ryanpetrello/swagger
add indention to swagger docs
2018-02-08 10:52:47 -05:00
Ryan Petrello
5b9dc41015 add indention to swagger docs
this will make it easier to spot changes as our APIs change
2018-02-08 10:51:42 -05:00
Alan Rominger
01c6463b1b Merge pull request #1162 from AlanCoding/remove_cred_sf
Remove credential from node and schedule summary fields
2018-02-08 10:37:46 -05:00
Alan Rominger
181399df7a Merge pull request #1159 from AlanCoding/reschedule_msg
Verbose error messages for failure to re-schedule
2018-02-08 10:28:11 -05:00
Ryan Petrello
9bc0a0743b Merge pull request #1161 from ryanpetrello/zone-names
update zoneinfo endpoint to be a list of dicts
2018-02-08 09:48:11 -05:00
Ryan Petrello
c1d0768e37 Merge pull request #1160 from ryanpetrello/fix-old-rrule-dtstart
add a few schedule RRULE parsing improvements
2018-02-08 09:47:59 -05:00
Marliana Lara
d743faf33e Fix UI bugs related to instance groups views
* Fix bug where capacity_adjustment sets to "1.00" when instance is toggled
* Hookup websockets for instance group jobs and instance jobs
* Add Wait spinner to Capacity_Adjuster, Instance association modal, and Instance group delete
* Add updateDataset event listener to update instance and instanceGroups list after smartSearch query
2018-02-08 09:33:24 -05:00
AlanCoding
0f66892d06 remove credential from node and schedule summary fields 2018-02-08 09:22:55 -05:00
Ryan Petrello
c866d85b8c update zoneinfo endpoint to be a list of dicts 2018-02-08 09:12:26 -05:00
Ryan Petrello
3c799b007e don't allow rrule values that contain both COUNT and UNTIL
see: https://github.com/ansible/ansible-tower/issues/7887
2018-02-08 08:59:52 -05:00
Ryan Petrello
887f16023a improve detection of expensive DTSTART RRULE values 2018-02-08 08:54:30 -05:00
AlanCoding
87b59903a5 verbose error messages for failure to re-schedule 2018-02-08 08:46:56 -05:00
Bill Nottingham
e982f6ed06 Merge pull request #1154 from wenottingham/namespaces-the-final-frontier
Have bubblewrap mount a new /proc in the wrapped environment.
2018-02-07 17:24:38 -05:00
Ryan Petrello
fb5428dd63 Merge pull request #1151 from ansible/jakemcdermott-patch-1-1
always return schema from get_default_schema
2018-02-07 16:56:48 -05:00
Alan Rominger
b38aa3dfb6 Merge pull request #1153 from AlanCoding/fix_wfjt_scheduling
fix bug scheduling WFJT without prompts
2018-02-07 15:49:13 -05:00
Bill Nottingham
c1a0e2cd16 Have bubblewrap mount a new /proc in the wrapped environment.
Since we're running with a new pid namespace, we should have
a new /proc that is in that namespace. Otherwise things will
be weird.
2018-02-07 15:47:03 -05:00
AlanCoding
fe69a23a4e fix bug scheduling WFJT without prompts 2018-02-07 14:34:25 -05:00
Jake McDermott
90f555d684 always return schema from get_default_schema 2018-02-07 13:42:01 -05:00
Matthew Jones
4002f2071d Adding instance group policy unit tests
also remove async call for applying topology change
2018-02-07 11:14:53 -05:00
Ryan Petrello
244dfa1c92 Merge pull request #1145 from ryanpetrello/swagger
fix a bad swagger-related import that breaks the build
2018-02-07 09:12:28 -05:00
Ryan Petrello
1adb4cefec fix a bad swagger-related import that breaks the build 2018-02-07 08:56:59 -05:00
Bill Nottingham
4abcbf949a Merge pull request #1142 from geerlingguy/fix-some-text
Fix grammar for tasks - replace 'state' with 'stage'.
2018-02-06 19:28:20 -05:00
Jeff Geerling
19f0b9ba92 Fix grammar for tasks - replace 'state' with 'stage'. 2018-02-06 16:57:59 -06:00
Ryan Petrello
b1c4c75360 Merge pull request #1141 from ryanpetrello/swagger
a bit of extra Swagger doc tinkering
2018-02-06 14:33:24 -05:00
Ryan Petrello
cc3659d375 fix a busted swagger import 2018-02-06 13:43:31 -05:00
Ryan Petrello
b1695fe107 add instructions for generating Swagger/OpenAPI docs 2018-02-06 13:37:33 -05:00
Jake McDermott
8cd0870253 Merge pull request #1135 from chrismeyersfsu/tests-recent_jobs_xss
xss test for per-host recent jobs popup
2018-02-06 11:51:05 -05:00
Ryan Petrello
84dc40d141 Merge pull request #1124 from ryanpetrello/swagger
add support for building swagger/OpenAPI JSON
2018-02-06 11:12:36 -05:00
Ryan Petrello
8b976031cb use VERSION_TARGET for Swagger doc generation 2018-02-06 10:48:51 -05:00
Chris Meyers
aaf87c0c04 xss test for per-host recent jobs popup 2018-02-06 10:37:00 -05:00
Ryan Petrello
7ff9f0b7d1 build example Swagger request and response bodies from our API tests 2018-02-06 10:36:25 -05:00
Ryan Petrello
527594285f more Swagger template markup 2018-02-06 10:12:58 -05:00
Ryan Petrello
07dfab648c add some tests to prove that OpenAPI JSON compilation works 2018-02-06 10:12:58 -05:00
Ryan Petrello
10974159b5 add support for marking Swagger paths deprecated 2018-02-06 10:12:58 -05:00
Ryan Petrello
ac7c5f8648 clean up API markdown docs 2018-02-06 10:12:57 -05:00
Ryan Petrello
57c22c20b2 add support for building swagger/OpenAPI JSON
to build, run `make swagger`; a file named `swagger.json` will be
written to the current working directory
2018-02-06 10:12:57 -05:00
Matthew Jones
c61efc0af8 Add information on enabled flag 2018-02-05 15:44:26 -05:00
Ryan Petrello
772fcc9149 Merge pull request #1097 from rbywater/feature/preferipv4
Add ability to select to prefer IPv4 addresses for ansible_ssh_host
2018-02-05 14:57:10 -05:00
Matthew Jones
8e94a9e599 Adding capacity docs
Updating capacity for callback jobs to include parent process impact
2018-02-05 09:49:01 -05:00
Shane McDonald
1e9b0c2786 Merge pull request #1130 from shanemcd/fix-etcd-template
Fix variable reference in k8s etcd template
2018-02-05 09:18:20 -05:00
Richard Bywater
5e5790e7d1 Use correct source_vars syntax 2018-02-05 12:45:52 +13:00
Richard Bywater
9f8b9b8d7f Fix unit test 2018-02-05 08:55:10 +13:00
Richard Bywater
6d69087db8 Add prefer_ipv4 to whitelist and add unit test for config value 2018-02-05 08:55:10 +13:00
Richard Bywater
a737663dde Add ability to select to prefer IPv4 addresses for ansible_ssh_host
Currently Cloudforms can return a mix of IPv4 and IPv6 addresses in the
ipaddresses field and this mix comes in a "random" order (that is the
first entry may be IPv4 sometimes but IPv6 other times). If you wish to
always use IPv4 for the ansible_ssh_host value then this is problematic.

This change adds a new prefer_ipv4 flag which will look for the first
IPv4 address in the ipaddresses list and uses that instead of just the
first entry.
2018-02-05 08:55:10 +13:00
Shane McDonald
dce934577b Fix variable reference in k8s etcd template 2018-02-03 10:29:53 -05:00
Jake McDermott
3d421cc595 Merge pull request #1078 from jakemcdermott/saml-ldap-updates
update configuration views for multiple LDAP servers, SAML 2FA, and SAML attribute mapping
2018-02-02 12:15:44 -05:00
Ryan Petrello
93c8cc9f8e Merge pull request #696 from jladdjr/awx_349_custom_cred_write_multiple_files
Feature: Multi-file support for Credential Types
2018-02-02 11:39:11 -05:00
Chris Meyers
1808559586 Merge pull request #1102 from chrismeyersfsu/tests-job_schedules_xss
add xss test for jobs schedules
2018-02-02 11:29:42 -05:00
Jim Ladd
d558299b1f Add test for injecting multiple files 2018-02-02 11:07:13 -05:00
Bill Nottingham
ef5b040f70 Merge pull request #1121 from jeis2497052/devel
Propose small spelling changes
2018-02-02 10:55:23 -05:00
John Eismeier
026cbeb018 Propose small spelling changes 2018-02-02 10:49:55 -05:00
Matthew Jones
6163cc6b5c Merge pull request #1058 from ansible/scalable_clustering
Implement Container Cluster-based dynamic scaling
2018-02-02 09:22:06 -05:00
Ryan Petrello
ed1bacdc08 Merge pull request #1090 from ryanpetrello/awx-ansible-overview
add documentation for how awx uses/interacts with ansible
2018-02-02 08:44:11 -05:00
Jake McDermott
f39fa35d86 Merge pull request #1103 from mabashian/1091-status-icons
Tweaked smart status icon styling to prevent overlap with action buttons
2018-02-01 23:03:37 -05:00
Matthew Jones
9266444b19 Merge pull request #1117 from ryanpetrello/fatal-celery-reload
run the celery reload in a shell so the uwsgi hook isn't fatal on fail
2018-02-01 22:39:00 -05:00
Ryan Petrello
35230eded1 run the celery reload in a shell so the uwsgi hook isn't fatal on fail 2018-02-01 22:32:08 -05:00
Brian Duffy
68057560e5 [bugfix-pem-validation] added unit test to simulate catted data
Signed-off-by: Brian Duffy <bmduffy@gmail.com>
2018-02-02 01:20:31 +00:00
Brian Duffy
047ff7b55f [bugfix-pem-validation]
Signed-off-by: Brian Duffy <bmduffy@gmail.com>
2018-02-01 23:50:02 +00:00
Jake McDermott
ecacf64c28 Merge pull request #1105 from mabashian/1023-activity-stream-inv-link
Fixed inventory links in activity stream
2018-02-01 18:23:54 -05:00
Jake McDermott
d01e6ab8b6 Merge pull request #1106 from mabashian/1014-team-link-user-permissions
Fixed team links in users permissions tab
2018-02-01 18:23:00 -05:00
Jake McDermott
5653b47aa3 Merge pull request #1112 from mabashian/994-empty-list-text
Updated empty list text
2018-02-01 18:22:11 -05:00
Marliana Lara
d4a461e5b4 Switch Array.map() in favor of Array.forEach() 2018-02-01 16:57:10 -05:00
Marliana Lara
f9265ee329 Create an InstancePolicyList directive to replace the pre-existing
modal implementation

* Remove Instance-List-Policy controller
* Replace let with const when values aren't being reassigned
* Update CapacityAdjuster directive to use replace:true
* Assign less values that are specific to element
* Add more error handling
2018-02-01 16:57:10 -05:00
Marliana Lara
fa70d108d7 Apply UI feedback changes
* Remove input slider css mixin
* Remove unused dependencies
* Improve error handling by plugging in the ProcessErrors factory
2018-02-01 16:57:10 -05:00
Marliana Lara
e07f441e32 Add Instance enable/disable toggle to list 2018-02-01 16:57:10 -05:00
Marliana Lara
70786c53a7 Add capacity adjuster directive 2018-02-01 16:57:10 -05:00
Marliana Lara
342958ece3 Add stringToNumber directive 2018-02-01 16:57:09 -05:00
Marliana Lara
368101812c Add Instance and InstanceGroup models 2018-02-01 16:57:09 -05:00
Matthew Jones
70bf78e29f Apply capacity algorithm changes
* This also adds fields to the instance view for tracking cpu and
  memory usage as well as information on what the capacity ranges are
* Also adds a flag for enabling/disabling instances which removes them
  from all queues and has them stop processing new work
* The capacity is now based almost exclusively on some value relative
  to forks
* capacity_adjustment allows you to commit an instance to a certain
  amount of forks, cpu focused or memory focused
* Each job run adds a single fork overhead (that's the reasoning
  behind the +1)
2018-02-01 16:57:09 -05:00
Matthew Jones
6a85fc38dd Add scalable cluster kubernetes support 2018-02-01 16:57:09 -05:00
Matthew Jones
6e9930a45f Use on_commit hook for triggering ig policy
* also Apply console handlers to loggers for dev environment
2018-02-01 16:56:43 -05:00
Matthew Jones
d9e774c4b6 Updates for automatic triggering of policies
* Switch policy router queue to not be "tower" so that we don't
  fall into a chicken/egg scenario
* Show fixed policy list in serializer so a user can determine if
  an instance is manually managed
* Change IG membership mixin to not directly handle applying topology
  changes. Instead it just makes sure the policy instance list is
  accurate
* Add create/delete hooks for instances and groups to trigger policy
  re-evaluation
* Update policy algorithm for fairer distribution
* Fix an issue where CELERY_ROUTES wasn't renamed after celery/django
  upgrade
* Update unit tests to be more explicit
* Update count calculations used by algorithm to only consider
  non-manual instances
* Adding unit tests and fixture
* Don't propagate logging messages from awx.main.tasks and
  awx.main.scheduler
* Use advisory lock to prevent policy eval conflicts
* Allow updating instance groups from view
2018-02-01 16:56:16 -05:00
Matthew Jones
56abfa732e Adding initial instance group policies
and policy evaluation planner
2018-02-01 16:56:15 -05:00
Matthew Jones
c819560d39 Add automatic deprovisioning support, only enabled for openshift
* Implement a config watcher for service restarts
* If the configmap bind point changes then restart all services
2018-02-01 16:51:40 -05:00
Chris Meyers
0e97dc4b84 Beat and celery clustering fixes
* use embedded beat rather than standalone
* dynamically set celeryd hostname at runtime
* add embeded beat flag to celery startup
* Embedded beat mode routes will piggyback off of celery worker setup
signal
2018-02-01 16:47:33 -05:00
Matthew Jones
624289bed7 Add support for directly managing instance groups
* Associating/Disassociating an instance with a group
* Triggering a topology rebuild on that change
* Force rabbitmq cleanup of offline nodes
* Automatically check for dependent service startup
* Fetch and set hostname for celery so it doesn't clobber other
  celeries
* Rely on celery init signals to dyanmically set listen queues
* Removing old total_capacity instance manager property
2018-02-01 16:46:44 -05:00
Matthew Jones
6ede1dfbea Update openshift installer to support rabbitmq autoscale
* Switch rabbitmq container out for one that supports autoscale
* Add etcd pod to support autoscale negotiation
2018-02-01 16:38:10 -05:00
Chris Meyers
c9ff3e99b8 celeryd attach to queues dynamically
* Based on the tower topology (Instance and InstanceGroup
relationships), have celery dyamically listen to queues on boot
* Add celery task capable of "refreshing" what queues each celeryd
worker listens to. This will be used to support changes in the topology.
* Cleaned up some celery task definitions.
* Converged wrongly targeted job launch/finish messages to 'tower'
queue, rather than a 1-off queue.
* Dynamically route celery tasks destined for the local node
* separate beat process

add support for separate beat process
2018-02-01 16:37:33 -05:00
Ryan Petrello
7bc3d85913 Merge pull request #1114 from ryanpetrello/fix-dateutil-bug
work around a bug in dateutil that incorrectly parses Z dates
2018-02-01 16:06:13 -05:00
Ryan Petrello
0a8df7fde2 work around a bug in dateutil that incorrectly parses Z dates
related: https://github.com/dateutil/dateutil/issues/349
2018-02-01 15:51:59 -05:00
Ryan Petrello
b39269c4c2 Merge pull request #1113 from ryanpetrello/fix-schedule-related
fix a bug which can break the schedules list endpoint
2018-02-01 14:50:37 -05:00
Chris Meyers
09981c0020 Merge pull request #1107 from ansible/docs-saml2
Extend saml docs to include new fields added
2018-02-01 14:33:51 -05:00
Ryan Petrello
81bdbef785 fix a bug which can break the schedules list endpoint
see: https://github.com/ansible/ansible-tower/issues/7881
related: https://github.com/ansible/awx/pull/1095
2018-02-01 14:30:56 -05:00
Chris Meyers
3c541a4695 Merge pull request #1111 from ansible/jakemcdermott-update-ldap-docs
Updates to ldap documentation
2018-02-01 14:30:23 -05:00
Jake McDermott
5a1ae9b816 Update ldap.md 2018-02-01 13:57:07 -05:00
mabashian
8c261892ee Updated empty list text 2018-02-01 13:54:33 -05:00
Matthew Jones
b89d4349c0 Merge pull request #1080 from Xiol/feat-projects-vol
Allow AWX projects directory to be a volume
2018-02-01 13:17:34 -05:00
Jake McDermott
3e98363811 Merge pull request #1104 from ansible/doc-formatting
Update saml.md
2018-02-01 11:55:46 -05:00
John Mitchell
7e400413db Merge pull request #625 from jlmitch5/fixXSS
fix xss vulnerabilities
2018-02-01 11:49:35 -05:00
Chris Meyers
f24289b2ba Extend saml docs to include new fields added 2018-02-01 11:27:01 -05:00
mabashian
9170c557a7 Fixed team links in users permissions tab 2018-02-01 11:23:51 -05:00
Chris Meyers
a47b403f8d Update saml.md 2018-02-01 11:05:08 -05:00
mabashian
83aa7bfac4 Fixed inventory links in activity stream 2018-02-01 11:04:16 -05:00
Chris Meyers
290a296f9f add xss test for jobs schedules
* Test for tooltip regression on job schedules list entries
2018-02-01 10:55:13 -05:00
mabashian
db0b2e6cb6 Tweaked smart status icon styling to prevent overlap with action buttons 2018-02-01 10:44:08 -05:00
Ryan Petrello
f391b7ace4 Merge pull request #1095 from ryanpetrello/schedule-related-proj-inv
add related links to the inventory and project for a schedule
2018-01-31 15:53:13 -05:00
Ryan Petrello
008c9e4320 Merge pull request #1094 from ryanpetrello/leaky-mock
remove some leaky mock.patch() that were causing sporadic test failures
2018-01-31 15:52:58 -05:00
mabashian
e57d200d6e Implemented generic prompt modal for launching and saving launch configurations. Added UI support for prompting on job template schedules. 2018-01-31 15:40:23 -05:00
Ryan Petrello
8ddc1c61ef add related links to the inventory and project for a schedule
see: https://github.com/ansible/awx/issues/276
2018-01-31 15:18:11 -05:00
Ryan Petrello
0aa6c7b83f remove some leaky mock.patch() that were causing sporadic test failures 2018-01-31 15:12:59 -05:00
Jake McDermott
e43879d44e Merge pull request #1092 from dovshap/patch-1
Update INSTALL.md
2018-01-31 14:11:11 -05:00
Ryan Petrello
2a6f6111dc add documentation for how awx uses/interacts with ansible 2018-01-31 14:02:18 -05:00
dovshap
6b0659d63a Update INSTALL.md
fix bad link in contents
2018-01-31 11:00:03 -08:00
Jim Ladd
4c1dddcaf9 Respond to PR feedback 2018-01-31 11:22:01 -05:00
Shane McDonald
426e901cdf Merge pull request #1089 from KAMiKAZOW/patch-1
Fix CentOS typo in CONTRIBUTING.md
2018-01-30 21:11:10 -05:00
KAMiKAZOW
ac55f93cfb CentOS typo in CONTRIBUTING.md 2018-01-31 03:07:52 +01:00
Ryan Petrello
c32c3db35e Merge pull request #1086 from ryanpetrello/fix-enabled-sso-auth
fix a bug which causes /api/v2/auth/ to list disabled auth backends
2018-01-30 16:35:26 -05:00
John Mitchell
28596b7d5e fix xss vulnerabilities
- on host recent jobs popover
- on schedule name tooltip
2018-01-30 16:30:00 -05:00
Ryan Petrello
20a999f846 Merge pull request #1085 from ryanpetrello/fix-7876
fix a bug in custom venv support that breaks legacy `POST /api/v1/jobs`
2018-01-30 16:21:10 -05:00
Ryan Petrello
81af34fce3 fix a bug which causes /api/v2/auth/ to list disabled auth backends
see: https://github.com/ansible/awx/issues/1073
2018-01-30 16:20:29 -05:00
Ryan Petrello
8fed469975 fix a bug in custom venv support that breaks legacy POST /api/v1/jobs
see: https://github.com/ansible/ansible-tower/issues/7876
2018-01-30 15:29:11 -05:00
Jake McDermott
a2e274d1f9 Merge pull request #623 from jakemcdermott/fix-ansible-tower-7871
bump templates form credential_types page limit
2018-01-30 14:48:36 -05:00
Ryan Petrello
d96cc51431 Merge pull request #624 from ryanpetrello/release_3.2.3
fix a bug when testing UDP-based logging configuration
2018-01-30 10:27:39 -05:00
Dane Elwell
c6d4a62263 Allow AWX projects directory to be a volume
Signed-off-by: Dane Elwell <dane.elwell@ukfast.co.uk>
2018-01-30 09:49:44 +00:00
Jake McDermott
4cd6a6e566 add fields for saml + 2fa 2018-01-30 00:28:13 -05:00
Jake McDermott
ed138fccf6 add forms + select for additional ldap servers 2018-01-30 00:28:02 -05:00
Jake McDermott
44d223b6c9 add fields for team and organization saml attribute mappings 2018-01-30 00:27:51 -05:00
Ryan Petrello
a9b77eb706 Merge pull request #1066 from ryanpetrello/fix-schedules-without-inventories
fix a few bugs for scheduled jobs that run without inventories
2018-01-29 16:08:17 -05:00
Ryan Petrello
e642af82cc fix a few bugs for scheduled jobs that run without inventories
see: https://github.com/ansible/ansible-tower/issues/7865
see: https://github.com/ansible/ansible-tower/issues/7866
2018-01-29 15:15:57 -05:00
Ryan Petrello
b0a755d7b5 Merge pull request #1076 from ryanpetrello/rrule-until-changes
adhere to RFC5545 regarding UNTIL timezones
2018-01-29 13:31:17 -05:00
Ryan Petrello
6753f1ca35 adhere to RFC5545 regarding UNTIL timezones
If the "DTSTART" property is specified as a date with UTC time or a date with
local time and time zone reference, then the UNTIL rule part MUST be specified
as a date with UTC time.
2018-01-29 12:42:31 -05:00
Ryan Petrello
982539f444 fix a bug when testing UDP-based logging configuration
see: https://github.com/ansible/ansible-tower/issues/7868
2018-01-29 12:05:51 -05:00
Ryan Petrello
f8d9d5f51a Merge pull request #1067 from ryanpetrello/fix-7869
don't allow distant DTSTART values for schedules; it's slow
2018-01-29 12:00:36 -05:00
Wayne Witzel III
bad8c65321 Merge pull request #1074 from wwitzel3/devel
Load Celery inspector manually when needed
2018-01-29 12:00:24 -05:00
Ryan Petrello
6f0c937236 don't allow distant DTSTART values for schedules; it's slow
see: https://github.com/ansible/ansible-tower/issues/7869
2018-01-29 10:16:03 -05:00
Wayne Witzel III
55a616cba6 Load Celery inspector manually when needed 2018-01-29 14:57:03 +00:00
Jake McDermott
4c79e6912e bump templates form credential_types page limit 2018-01-28 21:50:30 -05:00
Jim Ladd
4b13bcdce2 Update tests for custom credentials 2018-01-28 21:02:48 -05:00
Jim Ladd
18178c83b3 Validate single and multi-file injection 2018-01-28 21:02:47 -05:00
Jim Ladd
7aa1ae69b3 Add backwards compatibility for injecting single file 2018-01-28 20:50:44 -05:00
Jim Ladd
286a70f2ca Add support for multi-file injection in custom creds 2018-01-28 20:50:43 -05:00
Shane McDonald
87365e5969 Merge pull request #1071 from jakemcdermott/set-selenium-container-image-tags
use selenium hub / node container image version '3.8.1-erbium'
2018-01-28 16:37:03 -05:00
Jake McDermott
7e829e3a9d use selenium hub / node container image version '3.8.1-erbium' 2018-01-28 16:05:29 -05:00
Shane McDonald
b8cba916a5 Merge pull request #1069 from tdgroot/develop-dockerfile_rsync
Add rsync to Dockerfile
2018-01-27 10:14:36 -05:00
Timon de Groot
dc96a1730e Add rsync to Dockerfile 2018-01-27 11:54:51 +01:00
Matthew Jones
d4983ea10d Merge pull request #856 from ewjoachim/docker-compose-491
Fixes #491: Adding Docker Compose installer
2018-01-26 08:33:40 -05:00
Joachim Jablon
209bdd00a1 related #491 Bacpkort #1007 2018-01-26 07:09:28 +01:00
Joachim Jablon
c4efbd62bc related #491 Docker Compose installer
Signed-off-by: Joachim Jablon <ewjoachim@gmail.com>
2018-01-26 07:09:28 +01:00
Joachim Jablon
287a3bc8d4 related #491 Documentation for Docker Compose
Signed-off-by: Joachim Jablon <ewjoachim@gmail.com>
2018-01-26 07:09:28 +01:00
Joachim Jablon
9fefc26528 related #491 Split local_docker docker into 2 task files
Signed-off-by: Joachim Jablon <ewjoachim@gmail.com>
2018-01-26 07:09:28 +01:00
Ryan Petrello
e2d4ef31fd Merge pull request #1061 from ryanpetrello/fix-1042
fix a unicode bug in the stdout endpoint when ?content_encoding=base64
2018-01-25 16:07:21 -05:00
Ryan Petrello
a15e257b9e fix a unicode bug in the stdout endpoint when ?content_encoding=base64
see: https://github.com/ansible/awx/issues/1042
2018-01-25 15:53:43 -05:00
Ryan Petrello
a56370fed5 Merge pull request #1059 from ryanpetrello/reload-celery
reload the entire celery worker pool when uwsgi reloads the Python app
2018-01-25 15:12:22 -05:00
Ryan Petrello
e7ed4811c1 reload the entire celery worker pool when uwsgi reloads the Python app
this is for the development environment only; when uwsgi notices a code
change, it automatically reloads the uwsgi workers; this patch includes
a hook that sends `SIGHUP` to the celery process, causing it to spawn
a new set of workers as well
2018-01-25 14:59:40 -05:00
Ryan Petrello
9860b38438 Merge pull request #1055 from ryanpetrello/lazybaron
only import the redbaron library on-demand
2018-01-25 13:09:58 -05:00
Ryan Petrello
ef80ecd3b6 only import the redbaron library on-demand
redbaron is a library we use to facilitate parsing local settings files;
at _import_ time it generates a parse tree and caches it to disk at
`/tmp`; this process is _really time consuming, and only necessary if
we're actually *using* the library

right now, we're importing this library and paying the penalty
_every_ time we load the awx application
2018-01-25 10:23:44 -05:00
Ryan Petrello
50290a9063 Merge pull request #1024 from ryanpetrello/fix-710-schedule-timezone
support TZID= in schedule rrules
2018-01-25 10:14:57 -05:00
Shane McDonald
fefa4a8bf4 Merge pull request #1049 from ansible/jakemcdermott-fix-1045
add minimum git version to install guide
2018-01-24 15:40:10 -05:00
Jake McDermott
546f88c74d add minimum git version 2018-01-24 15:12:58 -05:00
Matthew Jones
42098bfa6d Merge pull request #621 from ryanpetrello/set_stat_workflow_race_condition
don't process artifacts from custom `set_stat` calls asynchronously
2018-01-24 10:27:19 -05:00
Jake McDermott
afa1fb489c Merge pull request #1044 from jakemcdermott/fix-948
bump templates form credential_types page limit
2018-01-24 09:35:43 -05:00
Wayne Witzel III
b205630490 Merge pull request #622 from wwitzel3/release_3.2.3
Wait for Slack RTM API websocket connection to be established
2018-01-24 08:59:45 -05:00
Wayne Witzel III
aa469d730e Wait for Slack RTM API websocket connection to be established 2018-01-24 13:48:42 +00:00
Jake McDermott
3571abb42b bump templates form credential_types page limit 2018-01-23 18:28:59 -05:00
Ryan Petrello
d57470ce49 don't process artifacts from custom set_stat calls asynchronously
previously, we persisted custom artifacts to the database on
`Job.artifacts` via the callback receiver.  when the callback receiver
is backed up processing events, this can result in race conditions for
workflows where a playbook calls `set_stat()`, but the artifact data is
not persisted in the database before the next job in the workflow starts

see: https://github.com/ansible/ansible-tower/issues/7831
2018-01-23 17:09:23 -05:00
Jake McDermott
21425db889 Merge pull request #1041 from jakemcdermott/fix-multicred-bugs
Fix multicred bugs
2018-01-23 13:34:35 -05:00
Jake McDermott
cc64657749 use correct handle for modal tag deselect 2018-01-23 10:32:13 -05:00
Jake McDermott
7300c2ccc1 fix unexpected deselect when selecting no-vault-id vault credentials 2018-01-23 10:19:19 -05:00
Jake McDermott
7c596039c5 fix modal exit button close 2018-01-23 09:39:59 -05:00
Ryan Petrello
9857c8272e add more tests for weird timezone/DST boundaries in schedules
see: https://github.com/ansible/awx/pull/1024
2018-01-22 14:57:57 -05:00
Shane McDonald
797169317c Merge pull request #1037 from shanemcd/devel
Use newer version of git in dev image
2018-01-22 14:43:09 -05:00
Shane McDonald
67c6591f6f Use newer version of git in dev image
More fallout from #982
2018-01-22 13:57:44 -05:00
Ryan Petrello
15906b7e3c support TZID= in schedule rrules
this commit allows schedule `rrule` strings to include local timezone
information via TZID=NNNNN; occurrences are _generated_ in the local
time specific by the user (or UTC, if e.g., DTSTART:YYYYMMDDTHHMMSSZ)
while Schedule.next_run, Schedule.dtstart, and Schedule.dtend will be
stored in the UTC equivalent (i.e., the scheduler will still do math on
"what to run next" based on UTC datetimes).

in addition to this change, there is now a new API endpoint,
`/api/v2/schedules/preview/`, which takes an rrule and shows the next
10 occurrences in local and UTC time.

see: https://github.com/ansible/ansible-tower/issues/823
related: https://github.com/dateutil/dateutil/issues/614
2018-01-22 11:50:00 -05:00
Ryan Petrello
fdd2b84804 Merge pull request #1036 from ryanpetrello/fix-955
fix a bug that breaks workflows w/ a survey password + inventory sync
2018-01-22 10:55:51 -05:00
Ryan Petrello
ac3f7d0fac fix a bug that breaks workflows w/ a survey password + inventory sync
prior versions of awx did not raise an exception for this scenario
- they simply ignored kwargs that they couldn't accept.  this change is
a sort of middle ground - it ignores them, but gives a clue in the logs
as to why

see: https://github.com/ansible/awx/issues/955
related: https://github.com/ansible/awx/pull/803
2018-01-22 09:41:30 -05:00
Shane McDonald
09d63b4883 Merge pull request #1029 from jakemcdermott/fix-navbar
fix navbar / breadcrumb issue
2018-01-19 13:54:56 -05:00
Jake McDermott
b96e33ea50 fix navbar / breadcrumb issue 2018-01-19 13:52:11 -05:00
Matthew Jones
71d23e8c81 Merge pull request #1007 from wallnerryan/alternate-dns-servers
support dns servers: fixes https://github.com/ansible/awx/issues/1004
2018-01-19 08:58:29 -05:00
Ryan Petrello
073feb74cb Merge pull request #1015 from ryanpetrello/fix-980
fix another bug that breaks the JT callback process
2018-01-18 14:26:48 -05:00
Ryan Petrello
43f19cc94b fix another bug that breaks the JT callback process
see: https://github.com/ansible/awx/issues/980
related: 17cd0595d7
2018-01-18 13:17:06 -05:00
Ryan Petrello
ef312f0030 Merge pull request #1011 from ryanpetrello/fix-1010
don't require an IRC password in the notification UI
2018-01-18 12:55:57 -05:00
Christian Adams
d0fec0f19c Merge pull request #1013 from rooftopcellist/rdb_docs
added RDB info to docs
2018-01-18 12:33:00 -05:00
adamscmRH
1e14221625 added RDB info to docs 2018-01-18 11:48:26 -05:00
Matthew Jones
b6a901ac51 Merge pull request #1012 from wwitzel3/devel
first-parent requires git >= 1.8.4
2018-01-18 11:36:33 -05:00
Wayne Witzel III
1af0ee2f8c first-parent requires git >= 1.8.4 2018-01-18 16:12:23 +00:00
Ryan Petrello
b62ac6fbe4 Merge pull request #1001 from ryanpetrello/fix-7852
refactor credential injection for builtin types
2018-01-18 10:49:03 -05:00
Ryan Petrello
e5aaeedc43 don't require an IRC password in the notification UI
see: https://github.com/ansible/awx/issues/1010
2018-01-18 09:14:22 -05:00
Wayne Witzel III
fc5c5400cd Merge pull request #1003 from wwitzel3/devel
Fix notification_data attempting to access name property of an int
2018-01-18 08:55:50 -05:00
Wayne Witzel III
95bead2bb2 Extend notification_data test 2018-01-18 13:30:12 +00:00
Ryan Wallner
bcbda23aee support dns servers 2018-01-18 07:46:09 -05:00
Jake McDermott
5a21783013 Merge pull request #976 from jakemcdermott/multivault-templates-form
multivault select for templates form
2018-01-17 23:20:44 -05:00
Jake McDermott
e33604de71 show credential kind icon on credential tags 2018-01-17 23:07:34 -05:00
Jake McDermott
c50c63a9ff default to machine credential type 2018-01-17 23:07:23 -05:00
Jake McDermott
916d91cbc7 use updated credentials endpoint 2018-01-17 23:07:10 -05:00
Jake McDermott
79bd8b2c72 show vault id 2018-01-17 22:02:54 -05:00
Jake McDermott
5939116b0a update e2e and smoke tests for multivault select 2018-01-17 22:02:43 -05:00
Jake McDermott
6759e60428 add working multivault select for templates form 2018-01-17 22:02:30 -05:00
Jake McDermott
ef8af79700 load multiselect list when vault kind is selected 2018-01-17 22:02:16 -05:00
Ryan Petrello
dbb4d2b011 refactor credential injection for builtin types
this cleanups up a _lot_ of code duplication that we have for builtin
credential types. it will allow customers to setup custom inventory
sources that utilize builtin credential types (e.g., a custom inventory
script that could use an AzureRM credential)

see: https://github.com/ansible/ansible-tower/issues/7852
2018-01-17 16:50:28 -05:00
Wayne Witzel III
4a28065dbb Fix notification_data attempting to access name property of an int 2018-01-17 21:46:49 +00:00
Ryan Petrello
5387846cbb Merge pull request #992 from ryanpetrello/optimize-output-event-filter
optimize OutputEventFilter for large stdout streams
2018-01-17 14:24:15 -05:00
Ryan Petrello
6b247f1f24 Merge pull request #1000 from ryanpetrello/fix-7853
fix a minor unicode handling bug in project names
2018-01-17 14:15:15 -05:00
Ryan Petrello
838b793704 fix a minor unicode handling bug in project names
see: https://github.com/ansible/ansible-tower/issues/7853
2018-01-17 13:37:06 -05:00
Ryan Petrello
3cb8c98a41 Merge pull request #998 from ryanpetrello/fix-980
fix a bug which broke the callback plugin launch process
2018-01-17 12:12:05 -05:00
Ryan Petrello
18f254fc28 Merge pull request #769 from rbywater/feature/cloudformssuffix
Add ability to append suffix to host names for Cloudforms Inventory
2018-01-17 11:43:10 -05:00
Michael Abashian
9c6c6ce816 Merge pull request #990 from mabashian/975-delete-template
Fixed delete on templates list
2018-01-17 11:40:42 -05:00
Chris Meyers
6699be95bf Merge pull request #995 from chrismeyersfsu/improvement-fact_cache_log_job
add job_id to fact cache log output
2018-01-17 11:30:57 -05:00
Ryan Petrello
17cd0595d7 fix a bug which broke the callback plugin launch process
see: https://github.com/ansible/awx/issues/980
2018-01-17 11:28:13 -05:00
Chris Meyers
0402064c0f expose ansible_facts_modified 2018-01-17 10:28:34 -05:00
Chris Meyers
e33265e12c add job_id to fact cache log output 2018-01-17 10:19:27 -05:00
Richard Bywater
b8c76301de Add validation to ensure leading fullstop for suffix 2018-01-17 13:20:59 +13:00
Ryan Petrello
51f7907a01 optimize OutputEventFilter for large stdout streams
update our event data search algorithm to be a bit lazier in event data
discovery; this drastically improves processing speeds for stdout >5MB

see: https://github.com/ansible/awx/issues/417
2018-01-16 14:41:35 -05:00
jlmitch5
1a98cedc0f Merge pull request #993 from ansible/jlmitch5-patch-1
update css so that scroll bar doesn't take padding from main content area
2018-01-16 14:28:14 -05:00
jlmitch5
db974d4fd4 update css so that scroll bar doesn't take padding from main content area 2018-01-16 14:22:08 -05:00
mabashian
d6e663eff0 Fixed delete on templates list 2018-01-16 08:36:38 -05:00
Christian Adams
ccb40c8c68 Merge pull request #986 from rooftopcellist/xtra_vars
extends JT xtra var error msg
2018-01-16 00:58:59 -05:00
Ryan Petrello
6eb04de1a7 Merge pull request #978 from ryanpetrello/fix-7841
fix a minor bug in the JT launch related to support for zero credentials
2018-01-15 20:35:28 -05:00
Shane McDonald
cad5c5e79a Merge pull request #987 from shanemcd/devel
Fix sdist builder image
2018-01-15 20:25:16 -05:00
Shane McDonald
97472cb91b Fix sdist builder image
Fallout from https://github.com/ansible/awx/pull/982
2018-01-15 15:39:48 -05:00
adamscmRH
0c63ea0052 extends JT xtra var error msg 2018-01-15 15:05:03 -05:00
Ryan Petrello
fa9c6287f7 Merge pull request #620 from ryanpetrello/fix-815
don't overwrite env['ANSIBLE_LIBRARY'] when fact caching is enabled
2018-01-15 13:55:42 -05:00
Ryan Petrello
2955842c44 don't overwrite env['ANSIBLE_LIBRARY'] when fact caching is enabled
see: https://github.com/ansible/awx/issues/815
see: https://github.com/ansible/ansible-tower/issues/7830
2018-01-15 13:39:46 -05:00
Ryan Petrello
64028dba66 Merge pull request #619 from ryanpetrello/file_based_tower_fact_cache
replace our memcached-based fact cache implementation with local files
2018-01-15 11:57:18 -05:00
Chris Meyers
2b1d2b2976 Merge pull request #805 from chrismeyersfsu/feature-saml_import_attr
allow for saml attributes to define team and org
2018-01-15 11:57:05 -05:00
Ryan Petrello
e1d50a43fd only allow facts to cache in the proper file system location 2018-01-15 11:45:49 -05:00
Shane McDonald
7d51b1cb9d Merge pull request #982 from shanemcd/devel
Use first parent commit when determining version from tags
2018-01-15 11:04:23 -05:00
Shane McDonald
52e531625c Use first parent commit when determining version from tags
We were having issues where an older tag was being outputed from `git describe`.

From the man page:

Follow only the first parent commit upon seeing a merge commit. This is useful when you wish to not match tags on branches merged in the history of the target commit.
2018-01-15 11:01:47 -05:00
Ryan Petrello
983b192a45 replace our memcached-based fact cache implementation with local files
see: https://github.com/ansible/ansible-tower/issues/7840
2018-01-15 09:16:44 -05:00
Richard Bywater
b5db652050 Clarify that leading fullstop needed 2018-01-14 14:05:34 +13:00
Jake McDermott
e699402115 Merge pull request #979 from mabashian/template-list-actions
Added old schedule/copy logic to template list
2018-01-12 20:19:31 -05:00
mabashian
d012f5cd99 Added old schedule/copy logic to template list until it can be refactored 2018-01-12 12:23:29 -05:00
Ryan Petrello
e0c04df1ee Merge pull request #618 from ryanpetrello/become_who_you_were_meant_to_be
add support for new "BECOME" prompt in Ansible 2.5+
2018-01-12 11:45:08 -05:00
Ryan Petrello
4a2ca20b60 fix a minor bug in the JT launch related to support for zero credentials
see: https://github.com/ansible/ansible-tower/issues/7841
2018-01-12 11:37:33 -05:00
Ryan Petrello
563f730268 add support for new "BECOME" prompt in Ansible 2.5+
see: https://github.com/ansible/ansible-tower/issues/7850
2018-01-12 10:40:40 -05:00
Chris Meyers
e49dfd6ee2 only run saml pipeline if saml social auth
* Do not trigger saml social auth pipeline methods if the user logging
in was not created by the saml social auth backend.
2018-01-11 16:20:49 -05:00
Jake McDermott
fb414802fa Merge pull request #970 from ansible/smoketest-fixes-for-lists
update test selector for add button dropdown arrow
2018-01-11 16:13:51 -05:00
Michael Abashian
00f400e839 Merge pull request #971 from mabashian/892-delete-inv-src
Fixed a few straggling success/error promises and replaced with then/catch
2018-01-11 14:51:29 -05:00
Chris Meyers
234e33df0e Merge pull request #959 from chrismeyersfsu/feature-multiple_ldap_servers
implement multiple ldap servers
2018-01-11 14:45:38 -05:00
mabashian
f9b0a3121f Fixed a few straggling success/error promises and replaced with then/catch 2018-01-11 13:59:46 -05:00
Jake McDermott
0afdca3674 update test selector for add button dropdown arrow 2018-01-11 13:48:03 -05:00
Matthew Jones
03cef6fea3 Merge pull request #969 from matburt/default_x_forwarded_for
Add X-Forwarded-For as a default source of remote host headers
2018-01-11 12:01:27 -05:00
Matthew Jones
7dc0fce1aa Use x-forwarded-for by default in openshift and kubernetes 2018-01-11 12:00:01 -05:00
Matthew Jones
648d27f28d Merge pull request #909 from scottp-dpaw/add-openshift-hint
Add REMOTE_HOST_HEADERS override to OpenShift template
2018-01-11 11:56:05 -05:00
jlmitch5
5a5e5bc121 Merge pull request #898 from jlmitch5/newTemplateList
implementation for expanded template list
2018-01-11 11:23:56 -05:00
John Mitchell
aea37654e2 updated template list to using new components 2018-01-11 11:20:12 -05:00
Ryan Petrello
89b9d7ac8b Merge pull request #617 from ryanpetrello/release_3.2.3
fix a bug in inventory generation for isolated nodes
2018-01-11 11:04:09 -05:00
Ryan Petrello
b8758044e0 fix a bug in inventory generation for isolated nodes
see: https://github.com/ansible/ansible-tower/issues/7849
related: https://github.com/ansible/awx/pull/551
2018-01-11 10:41:58 -05:00
Chris Meyers
2ed97aeb0c implement multiple ldap servers 2018-01-11 09:03:14 -05:00
Ryan Petrello
9431b0b6ff Merge pull request #962 from ryanpetrello/fix-7843
fix a unicode handling bug
2018-01-10 18:17:58 -05:00
Ryan Petrello
a5007ccd41 fix a unicode handling bug
see: https://github.com/ansible/ansible-tower/issues/7843
related: https://github.com/ansible/awx/pull/807
2018-01-10 15:56:31 -05:00
Michael Abashian
81fc4219ae Merge pull request #957 from mabashian/860-facts
Fixed display of host facts
2018-01-10 15:02:23 -05:00
Ryan Petrello
c3c4d79890 Merge pull request #958 from ryanpetrello/multivenv
add an example for custom virtualenv setup in containers
2018-01-10 14:24:42 -05:00
Ryan Petrello
b01b229fea add an example for custom virtualenv setup in containers 2018-01-10 13:48:55 -05:00
mabashian
984b7e066d Fixed display of host facts 2018-01-10 13:18:38 -05:00
Matthew Jones
67d927121d Merge pull request #940 from ryanpetrello/multivenv
implement support for per-playbook/project/org virtualenvs
2018-01-10 12:15:38 -05:00
Ryan Petrello
4c40791d06 properly handle unicode for isolated job buffers
from: https://docs.python.org/2/library/stringio.html#module-cStringIO
"Unlike the StringIO module, this module is not able to accept Unicode
strings that cannot be encoded as plain ASCII strings."

see: https://github.com/ansible/ansible-tower/issues/7846
2018-01-10 10:56:59 -05:00
Matthew Jones
ae06cff991 Merge pull request #938 from ansible/kubernetes_install_support
Kubernetes install support
2018-01-10 09:57:33 -05:00
Matthew Jones
7ea6d7bf4d Clean up documentation for kubernetes installer 2018-01-10 09:39:07 -05:00
Matthew Jones
fad4a549d0 Remove oc command usage from docker registry k8s reference 2018-01-10 09:38:00 -05:00
Matthew Jones
9365e477c5 Merge pull request #951 from ansible/remove_nodeport
Remove nodeport customization
2018-01-10 09:32:36 -05:00
Matthew Jones
d0b3cac72a Remove nodeport definition 2018-01-10 09:29:12 -05:00
Chris Meyers
de02138dfd spelling is hard 2018-01-10 09:26:11 -05:00
Matthew Jones
44f0b003fc Kubernetes install documentation 2018-01-10 09:25:59 -05:00
Matthew Jones
56aed597b2 Add initial support for kubernetes to the installer 2018-01-10 09:25:59 -05:00
Matthew Jones
f33ee03b98 Remove nodeport customization
This isn't strictly necessary for the Openshift routes and can
sometimes cause problems when the resource is already defined in openshift
2018-01-10 09:23:46 -05:00
Ryan Petrello
69a3b0def6 Merge pull request #946 from ryanpetrello/fix-7846
fix a handful of issues for playbooks that contain unicode
2018-01-10 09:16:40 -05:00
Matthew Jones
6504972d82 Merge pull request #741 from rbywater/bugfix/cloudformsinventory
Fix CloudForms enabled & id variable names - relates to #705
2018-01-10 00:22:58 -05:00
Ryan Petrello
4bb2b5768e properly compose stdout downloads that contain unicode 2018-01-09 23:52:02 -05:00
Ryan Petrello
c0a641ed52 properly handle unicode for isolated job buffers
from: https://docs.python.org/2/library/stringio.html#module-cStringIO
"Unlike the StringIO module, this module is not able to accept Unicode
strings that cannot be encoded as plain ASCII strings."

see: https://github.com/ansible/ansible-tower/issues/7846
2018-01-09 23:46:17 -05:00
Ryan Petrello
1e8c89f536 implement support for per-playbook/project/org virtualenvs
see: https://github.com/ansible/awx/issues/34
2018-01-09 22:47:01 -05:00
Shane McDonald
54d3412820 Merge pull request #942 from wwitzel3/devel
Update asgi-amqp requirement
2018-01-09 19:17:45 -05:00
Wayne Witzel III
1690938dfb Update asgi-amqp requirement 2018-01-09 23:54:30 +00:00
Chris Meyers
0a9d3d47b9 more efficiently determine saml team mapping 2018-01-09 12:16:07 -05:00
Ryan Petrello
2952b0a0fe Merge pull request #807 from AlanCoding/inv_update_name
Make inventory update name combination of inventory and source
2018-01-08 10:43:44 -05:00
Ryan Petrello
1d3e8f8b87 Merge pull request #831 from AlanCoding/field_names
Use Options models to consolidate field_names list
2018-01-08 10:36:16 -05:00
Ryan Petrello
97c040aaa1 Merge pull request #839 from AlanCoding/cache_settings_dict
Cache the global settings list, cProfile speedup
2018-01-08 10:35:00 -05:00
Ryan Petrello
818c95501a Merge pull request #920 from ryanpetrello/fix-914
add vault_id to launch endpoints default vault credentials
2018-01-08 10:31:31 -05:00
Chris Meyers
664bdec57f add documentation 2018-01-05 14:43:33 -05:00
Michael Abashian
92068930a6 Merge pull request #919 from ansible/jakemcdermott-add-dialog-slider-imports
add dialog and slider vendor imports
2018-01-05 08:48:11 -05:00
Chris Meyers
d07a946183 Merge pull request #921 from chrismeyersfsu/fix-handle_work_error-689
Fix handle_work_error()
2018-01-05 07:54:03 -05:00
Chris Meyers
9d58b15135 allow for saml attributes to define team and org
related to https://github.com/ansible/awx/issues/217

* Adds a configure tower in tower setting for users to configure a saml
attribute that tower will use to put users into teams and orgs.
2018-01-04 15:35:11 -05:00
Chris Meyers
a0038276a4 do not use a custom task exception
* Celery + json pickling do not handle custom Exceptions (and may never
do so). Mentioning of, if handling custom Exceptions then the code would
be susceptible to same arbitrary code execution that python pickle is
vulnerable to.
* So don't use custom Exceptions.
2018-01-04 15:30:52 -05:00
Chris Meyers
f0ff6ecb0a handle_work_error signature to work
* celery error callback signature isn't well defined. Thus, our error
callback signature is made to handle just about any call signature and
depend on only 1 attribute, id, existing.

See https://github.com/celery/celery/issues/3709
2018-01-04 15:23:13 -05:00
Ryan Petrello
60743d6ba6 add the vault_id to the response payload on the JT launch endpoint
see: https://github.com/ansible/awx/issues/914
2018-01-04 15:13:48 -05:00
Ryan Petrello
4707b5e020 Merge pull request #917 from ryanpetrello/more-stdout-event-polish
more stdout event polish
2018-01-04 14:54:46 -05:00
Jake McDermott
ed7d7fcf00 add dialog and slider vendor imports 2018-01-04 14:46:42 -05:00
Aaron Tan
6c2a7f3782 Merge pull request #906 from jangsutsr/refactor_named_url_tests
Refactor named URL unit tests
2018-01-04 14:20:27 -05:00
Ryan Petrello
47875c5f9a Merge pull request #916 from ryanpetrello/memcache-fact-cache-size-warning
make the fact caching plugin fail more gracefully for large payloads
2018-01-04 13:06:29 -05:00
Ryan Petrello
f28f7c6184 refactor job event signal generation code 2018-01-04 12:50:12 -05:00
Ryan Petrello
1494c8395b update websockets docs to reflect new event groups 2018-01-04 11:40:15 -05:00
Ryan Petrello
2691e1d707 make the fact caching plugin fail more gracefully for large payloads
related: https://github.com/ansible/ansible/pull/34424
2018-01-04 11:33:47 -05:00
Ryan Petrello
6d413bd412 Merge pull request #833 from ryanpetrello/stdout-events
generalize stdout event processing to emit events for all job types
2018-01-04 11:28:52 -05:00
Aaron Tan
54bf7e13d8 Refactor named URL unit tests
The original tests set no longer works after Django 1.11 due to more
strict rules against dynamic model definition. The refactored tests set
aims at each existing model that apply named URL rules,  instead of
abstract general use cases, thus significantly improves maintainability
and readability.

Signed-off-by: Aaron Tan <jangsutsr@gmail.com>
2018-01-03 14:00:30 -05:00
Ryan Petrello
c6b6a3ad89 send and subcribe to summary events for all new events 2018-01-03 09:09:45 -05:00
Ryan Petrello
2bd656e61d calculate stdout download length using the ORM, not raw SQL 2018-01-03 09:09:44 -05:00
Ryan Petrello
35b8e40d3c remove deprecation from the stdout endpoint; text downloads still use it 2018-01-03 09:09:44 -05:00
Ryan Petrello
c4d901bf2c add functional API tests for deprecated job event stdout composition
see: https://github.com/ansible/awx/issues/200
2018-01-03 09:09:44 -05:00
Ryan Petrello
1369f72885 add new API endpoints and websocket emit for new job event types
see: https://github.com/ansible/awx/issues/200
2018-01-03 09:09:44 -05:00
Ryan Petrello
0b30e7907b change stdout composition to generate from job events on the fly
this approach totally removes the process of reading and writing stdout
files on the local file system at settings.JOBOUTPUT_ROOT when jobs are
run; now stdout content is only written on-demand as it's fetched for
the deprecated `stdout` endpoint

see: https://github.com/ansible/awx/issues/200
2018-01-03 09:09:43 -05:00
Ryan Petrello
fc94b3a943 generalize stdout event processing to emit events for *all* job types
* introduces three new models: `ProjectUpdateEvent`,
  `InventoryUpdateEvent`, and `SystemJobEvent`
* simplifies the stdout callback management in `tasks.py` - now _all_
  job run types capture and emit events to the callback receiver
* supports stdout reconstruction from events for stdout downloads for
  _all_ job types
* configures `ProjectUpdate` runs to configure the awx display callback
  (so we can capture real playbook events for `project_update.yml`)
* ProjectUpdate, InventoryUpdate, and SystemJob runs no longer write
  text blobs to the deprecated `main_unifiedjob.result_stdout_text` column

see: https://github.com/ansible/awx/issues/200
2018-01-03 09:09:42 -05:00
Scott Percival
fde9099198 Add REMOTE_HOST_HEADERS override to OpenShift template
Signed-off-by: Scott Percival <scott.percival@dbca.wa.gov.au>
2018-01-03 09:53:17 +08:00
Michael Abashian
815cd829e0 Merge pull request #872 from mabashian/865-jquery-ui-upgrade-bug
Fixed spinners after jquery-ui upgrade
2018-01-02 13:18:51 -05:00
Aaron Tan
a2fd78add4 Implement item copy feature
See acceptance doc for implement details.

Signed-off-by: Aaron Tan <jangsutsr@gmail.com>
2018-01-02 10:20:44 -05:00
Ryan Petrello
28c612ae9c Merge pull request #871 from AlanCoding/dirty_extra_data
Fix bug when creating system job schedule
2018-01-02 09:22:07 -05:00
Bill Nottingham
d6ed6a856d Merge pull request #845 from wenottingham/he-sees-you-when-you're-sleeping
Tweak collected information.
2017-12-21 13:52:11 -05:00
mabashian
706b370f7e Fixed spinners after jquery-ui upgrade 2017-12-20 16:02:53 -05:00
AlanCoding
80a2d10742 fix bug when creating system job schedule 2017-12-20 15:30:57 -05:00
Jake McDermott
f7259a1e78 Merge pull request #844 from jakemcdermott/fix-package-json-backmerge-issue
package.json updates / restore ui watch functionality
2017-12-18 17:03:03 -05:00
AlanCoding
08570fe785 make inventory update name combination of inventory and source 2017-12-18 16:21:39 -05:00
Jake McDermott
987cdc6802 Bump versions of angular-codemirror, jquery-ui, and moment 2017-12-18 16:21:29 -05:00
Alan Rominger
6e27294e2b Merge pull request #846 from AlanCoding/encrypt_on_save
Encrypt password answers on config save
2017-12-18 16:20:08 -05:00
AlanCoding
3439ba5f3b allow WFJT nodes without required variables 2017-12-18 12:03:40 -05:00
AlanCoding
c8e10adc96 fix bug saving extra_data and follow prompts rules
display_extra_vars was not taking a copy of the data before
acting on it - this causes a bug where the activity stream will
modify the existing object on the model. That leads to new data
not being accepted.

Also moved the processing of extra_data to prior to the accept
or ignore kwargs logic so that we pass the right (post-encryption)
form of the variables.
2017-12-18 10:50:22 -05:00
Christian Adams
7e261b5246 Merge pull request #847 from rooftopcellist/contributing_typos
Fixed Typos
2017-12-15 11:09:43 -05:00
AlanCoding
1e1839915d validate against unencrypted values at spawn point 2017-12-15 10:47:23 -05:00
AlanCoding
74bf058d62 encrypt password answers on config save 2017-12-15 07:48:55 -05:00
Matthew Jones
5ec537bad2 Merge pull request #843 from ansible/remove_old_tests
Removing old unused tests
2017-12-14 23:55:12 -05:00
Bill Nottingham
568901af74 Tweak collected information. 2017-12-14 19:22:18 -05:00
adamscmRH
c2e9926330 Fixed Typos 2017-12-14 16:13:04 -05:00
Jake McDermott
c4ccfa1b27 restoring ui watch functionality 2017-12-14 14:23:46 -05:00
Matthew Jones
478bcc0b07 Removing old unused tests 2017-12-14 11:34:43 -05:00
AlanCoding
0bb9c58e25 cache the global settings list, cProfile speedup 2017-12-14 11:29:30 -05:00
Alan Rominger
9c783aa0ce Merge pull request #804 from AlanCoding/active_count
simplify query for active_count
2017-12-14 10:47:12 -05:00
Alan Rominger
526391a072 Merge pull request #838 from AlanCoding/no_unicode_loop_2
Avoid slowdown generating smart_filter (alternative 2)
2017-12-14 10:23:31 -05:00
AlanCoding
98f8faa349 simplify query for active_count 2017-12-14 09:53:26 -05:00
AlanCoding
8a2a5b0fb1 avoid slowdown generating smart_filter 2017-12-14 09:39:39 -05:00
Jake McDermott
07cfa6cba5 Merge pull request #834 from AlanCoding/jump-the-start-line 2017-12-13 22:43:54 -05:00
AlanCoding
e188692acf use Options models to consolidate field_names list 2017-12-13 22:39:38 -05:00
Jake McDermott
ad70754b6a Merge pull request #832 from mabashian/linting-error-cleanup
Fixed linting/leftover merge errors
2017-12-13 19:46:02 -05:00
AlanCoding
9fb24f1a4c add hack to TimingMiddlWare for Shippable tests 2017-12-13 18:49:26 -05:00
mabashian
aefa30e1e9 Fixed linting/leftover merge errors 2017-12-13 18:28:52 -05:00
Alan Rominger
7eb2d86890 Merge pull request #749 from AlanCoding/detail_opt
Apply list view optimizations to detail view
2017-12-13 18:19:00 -05:00
Matthew Jones
2fb0144914 Add libcurl-devel to official image build 2017-12-13 16:14:55 -05:00
AlanCoding
e3a731bb9e apply listview optimizations to detail view 2017-12-13 16:09:37 -05:00
Ryan Petrello
451e9a7504 Merge pull request #826 from AlanCoding/322flake
flake8 fixes from removal of re-encrypt test
2017-12-13 15:23:41 -05:00
Ryan Petrello
8311acfba2 Merge pull request #825 from AlanCoding/towervars
Add back in support of towervars lost in merge
2017-12-13 15:19:47 -05:00
AlanCoding
77a1c405a6 flake8 fixes from removal of reencrypt test 2017-12-13 14:32:34 -05:00
AlanCoding
1b0bca8229 add back in support of towervars lost in merge 2017-12-13 14:30:11 -05:00
Ryan Petrello
bd91e8eb54 Merge pull request #824 from ryanpetrello/devel
fix a few tests caused by fallout between 3.2.2 bugs and 3.3 multicred
2017-12-13 14:10:39 -05:00
Ryan Petrello
ea4cd99003 fix a few tests caused by fallout between 3.2.2 bugs and 3.3 multicred 2017-12-13 14:02:25 -05:00
Shane McDonald
00ce244716 Merge pull request #822 from jakemcdermott/fix-merge-issues
fix merge issue with inventory source service
2017-12-13 13:32:13 -05:00
Jake McDermott
3b791609cd fix merge issue with inventory source service 2017-12-13 13:23:15 -05:00
Matthew Jones
a8d4eb7c1d Merge pull request #821 from ryanpetrello/devel
more test cleanup from 3.2.2 merge
2017-12-13 13:15:59 -05:00
Ryan Petrello
d35bfafcf5 more test cleanup from 3.2.2 merge 2017-12-13 13:14:22 -05:00
Ryan Petrello
9f8ef4d1e5 Merge pull request #820 from ryanpetrello/devel
fix a number of failing unit tests related to the 3.2.2 merge
2017-12-13 13:04:55 -05:00
Ryan Petrello
a978d094b4 fix a number of failing unit tests related to the 3.2.2 merge 2017-12-13 13:03:17 -05:00
Shane McDonald
47e422ba7a Merge pull request #819 from ansible/jakemcdermott-patch-1
fix arg name clash in hosts list controller
2017-12-13 13:02:47 -05:00
Jake McDermott
4b86815275 fix arg name clash in hosts list controller 2017-12-13 13:01:11 -05:00
Alan Rominger
6c1c850c5f Merge pull request #816 from AlanCoding/ints
use credential property that returns integers
2017-12-13 12:56:07 -05:00
AlanCoding
f4f1e0fd3c use credential property that returns integers 2017-12-13 12:54:32 -05:00
Matthew Jones
ca84e1c654 Merge pull request #817 from ansible/jakemcdermott-patch-1
fix missing comma in package.json
2017-12-13 12:48:16 -05:00
Jake McDermott
6b6e898882 fix missing comma in package.json 2017-12-13 12:45:26 -05:00
Matthew Jones
9dbcc5934e Merge remote-tracking branch 'tower/release_3.2.2' into devel 2017-12-13 12:25:47 -05:00
Greg Considine
fac7fd45f8 Merge pull request #614 from gconsidine/ui/fix/toggle-button-container
Adjust style of toggle button to accommodate text that exceeds 42px
2017-12-11 16:09:44 -05:00
Matthew Jones
34c206fab0 Bump psql-container pg version to 9.6 2017-12-11 12:01:28 -05:00
gconsidine
a2f64f1053 Adjust style of toggle button to accommodate text that exceeds 42px 2017-12-11 11:47:21 -05:00
Shane McDonald
334d47f3ab Pull updated translations 2017-12-11 09:42:06 -05:00
Ryan Petrello
4724b6a3d6 Merge pull request #613 from ryanpetrello/release_3.2.2
change how we detect the current user at LDAP login to avoid a nuanced recursion error
2017-12-08 15:46:32 -05:00
Ryan Petrello
ce94ba4c83 change how we detect the current user to avoid a nuanced recursion error
see: https://github.com/ansible/ansible-tower/issues/7802
2017-12-08 15:35:47 -05:00
Ryan Petrello
0dc4fa975b Merge pull request #612 from ryanpetrello/release_3.2.2
fix a race condition in "host.last_job" when jobs are deleted
2017-12-06 13:43:45 -05:00
Ryan Petrello
1fb890f4eb fix a race condition in "host.last_job" when jobs are deleted
see: https://github.com/ansible/ansible-tower/issues/7815
2017-12-06 11:30:19 -05:00
Shane McDonald
15e8fd5eca Pull updated translations 2017-12-05 14:17:18 -05:00
Ryan Petrello
06e751fea1 Merge pull request #611 from ryanpetrello/release_3.2.2
more unit tests for survey default handling
2017-12-05 09:18:48 -05:00
Ryan Petrello
fe93ef5488 more unit tests for survey default handling 2017-12-05 09:04:57 -05:00
Richard Bywater
9b05a41eec Add ability to append suffix to host names for Cloudforms Inventory
Allows for use of a suffix that will be appended to host names returned
from Cloudforms API if that suffix is not present.

For example with a suffix of 'example.org', the following results would
be shown for a particular Cloudforms host name:
someexample -> someexample.example.org
someexample.example.org -> someexample.example.org

The main use-case for this is, when one Inventory Source is returning
names that have a FQDN name whilst others are returning a shortname, to
ensure that the hosts in an inventory aren't effectively duplicated.
2017-12-05 14:47:33 +13:00
Ryan Petrello
2c12f1b66e Merge pull request #610 from ryanpetrello/faster-result-stdout-cleanup
don't fetch stdout when purging jobs - it's slow and causes OOMs
2017-12-04 15:41:16 -05:00
Ryan Petrello
33dedc88c8 don't fetch stdout when purging jobs - it's slow and causes OOMs
see: https://github.com/ansible/ansible-tower/issues/7751
2017-12-04 15:14:55 -05:00
Alan Rominger
759867c863 Merge pull request #609 from AlanCoding/more_encryption_tests
encryption tests around the contract with survey functionality
2017-12-04 14:20:30 -05:00
Alan Rominger
d4613d448c Merge pull request #608 from AlanCoding/empty_string_defaults
allow password default reuse with empty default
2017-12-04 14:19:32 -05:00
AlanCoding
dbd68c5747 encryption tests around the contract with survey functionality 2017-12-04 11:45:07 -05:00
AlanCoding
d23d7c422d allow password default reuse with empty default 2017-12-04 10:49:36 -05:00
Ryan Petrello
4b793dc58a Merge pull request #606 from ryanpetrello/test-7806
improve validation for empty default passwords
2017-12-04 10:19:06 -05:00
Ryan Petrello
112757e202 properly handle JT launch POST for required survey fields w/ no default
see: ansible/ansible-tower#7805
2017-12-04 09:45:21 -05:00
Ryan Petrello
12380fe1b1 add more tests for survey default encryption
see: https://github.com/ansible/ansible-tower/issues/7805
see: https://github.com/ansible/ansible-tower/issues/7806
see: https://github.com/ansible/ansible-tower/issues/7807
2017-12-04 09:45:14 -05:00
Ryan Petrello
b987b7daa0 Merge pull request #605 from ryanpetrello/release_3.2.2
fix another survey encryption-related bug
2017-12-01 17:30:43 -05:00
Ryan Petrello
6c7851b51f fix another survey encryption-related bug 2017-12-01 17:11:00 -05:00
Aaron Tan
1ff0591553 Merge pull request #603 from jangsutsr/fix-7737-1
Follow up fix #542
2017-12-01 16:07:14 -05:00
Aaron Tan
58ad214dcf Follow up fix #542
Relates
https://github.com/ansible/ansible-tower/issues/7737#issuecomment-348566452

Signed-off-by: Aaron Tan <jangsutsr@gmail.com>
2017-12-01 15:58:33 -05:00
Ryan Petrello
a71cee9300 Merge pull request #604 from ryanpetrello/survey_spec_validation_refactor
refactor survey spec validation into a separate testable function
2017-12-01 15:44:07 -05:00
Ryan Petrello
1057b93570 refactor survey spec validation into a separate testable function 2017-12-01 15:34:07 -05:00
Alan Rominger
e0edfeac7c Merge pull request #602 from AlanCoding/clean_defaults
block user from entering encrypted as bare default
2017-12-01 14:54:42 -05:00
AlanCoding
47f45bf9b3 block user from entering encrypted as bare default 2017-12-01 14:44:06 -05:00
Ryan Petrello
8d162f9044 Merge pull request #601 from ryanpetrello/flake8-fixes
backport a few fixes from awx to address busted ci
2017-12-01 12:48:06 -05:00
AlanCoding
6269b43456 update tests to new Ansible core code 2017-12-01 12:31:56 -05:00
AlanCoding
67867cf0c8 flake8: comply with new E722 rule 2017-12-01 12:16:44 -05:00
Ryan Petrello
7538b4ce15 Merge pull request #600 from ryanpetrello/fix-7800-migration
upgrade survey encryption migration to work around an old survey bug
2017-12-01 12:13:18 -05:00
Ryan Petrello
8c6a1e348d upgrade survey encryption migration to work around an old survey bug
see: https://github.com/ansible/ansible-tower/issues/7800
2017-12-01 11:34:47 -05:00
Shane McDonald
3cd80ef67a Update pot files 2017-11-30 15:29:29 -05:00
Wayne Witzel III
f3310236e4 Merge pull request #599 from wwitzel3/release_3.2.2
Fix git project sync bug.
2017-11-30 11:22:29 -05:00
Wayne Witzel III
ed28faa3db Use TMP instead of TMPDIR and only set it in RunProjectUpdate 2017-11-30 16:10:12 +00:00
Ryan Petrello
fc4b02b79f Merge pull request #597 from ryanpetrello/jenkins-no-like-unicode
removing some cruft we thought would help us catch bugs (it didn't)
2017-11-29 16:39:20 -05:00
Ryan Petrello
a3dd9eb4b7 removing some cruft we thought would help us catch bugs (it didn't) 2017-11-29 16:23:07 -05:00
Richard Bywater
079abc162f Fix CloudForms enabled & id variable names
On Cloudforms (Version 2.0 at least), the dictionary that gets passed to
the inventory_import has a top-level 'cloudforms' dictionary element
that contains the 'id' and 'power_state' rather than those elements
being at the top-level of the dictionary.

This change adds in the 'cloudforms' into the expected name.
2017-11-30 09:30:23 +13:00
Ryan Petrello
d773d163f7 Merge pull request #595 from ryanpetrello/fix-workflow-survey-encrypt
more survey password encryption bug squashing
2017-11-29 15:09:40 -05:00
Ryan Petrello
68ada92f3b more survey password encryption bug squashing
the nature of this latest bug is that the WorkflowJob has a *different*
implementation of _accept_or_ignore_job_kwargs, and it wasn't performing
encryption for extra vars provided at launch time; this change places the
encryption mechanism in UJT.create_unified_job so that it works the same
for _all_ UJTs

see: https://github.com/ansible/ansible-tower/issues/7798
see: https://github.com/ansible/ansible-tower/issues/7046
2017-11-29 14:40:41 -05:00
Aaron Tan
4c43afda19 Merge pull request #586 from jangsutsr/fix-7768
Supress exception with concurrent deletion
2017-11-29 13:40:45 -05:00
Ryan Petrello
91cc4689c9 Merge pull request #594 from ryanpetrello/fix-sosreport-venv
fix incorrect virtualenv path for sosreport plugin
2017-11-29 10:07:12 -05:00
Hideki Saito
febfcf709d fix incorrect virtualenv path for sosreport plugin 2017-11-29 09:57:41 -05:00
Ryan Petrello
cf1d5a29f6 Merge pull request #593 from ryanpetrello/fix-7796
fix another encrypted survey password bug
2017-11-28 17:08:35 -05:00
Ryan Petrello
1425021106 fix another encrypted survey password bug
properly encrypt extra_vars that overlap with survey passwords when
`ask_variables_on_launch=true`

see: https://github.com/ansible/ansible-tower/issues/7796
2017-11-28 16:52:47 -05:00
Ryan Petrello
7b42316366 Merge pull request #592 from ryanpetrello/fix-7793
fix a bug which caused v1 cred backwards-compat to apply to v2 requests
2017-11-28 14:49:54 -05:00
Ryan Petrello
ce9d75c2e4 Merge pull request #591 from ryanpetrello/rename-ovirt
rename oVirt4 to Red Hat Virtualization
2017-11-28 13:10:58 -05:00
Ryan Petrello
26845642f0 fix a bug which caused v1 cred backwards-compat to apply to v2 requests
see: https://github.com/ansible/ansible-tower/issues/7793
2017-11-28 13:05:13 -05:00
Ryan Petrello
6fa0d9d4ed rename oVirt4 to Red Hat Virtualization
see: https://github.com/ansible/ansible-tower/issues/7790
2017-11-28 11:02:42 -05:00
Ryan Petrello
7accac2f63 Merge pull request #590 from ryanpetrello/fix-7784
fix a bug in survey password default validation within workflows
2017-11-27 18:04:30 -05:00
Ryan Petrello
044c047ac6 fix a bug in survey password default validation
see: https://github.com/ansible/ansible-tower/issues/7046
see: https://github.com/ansible/ansible-tower/issues/7764
see: https://github.com/ansible/ansible-tower/issues/7784
2017-11-27 17:25:45 -05:00
Chris Meyers
5a2ecd25e7 Merge pull request #589 from ansible/fix-project_update_cascade_fast
correctly cascade job cancel
2017-11-27 13:21:25 -05:00
Chris Meyers
6c89935521 correctly cascade job cancel
* Check the reason for a dependent project update failure. If it's
because of a cancel, then let the normal cancel mechanisms update the
jobs status and explanation. Do not update the dependent job's status
for a project update that was canceled, in the run code.
2017-11-27 12:34:55 -05:00
Aaron Tan
0641c6b0a6 Supress exception with concurrent deletion
Relates https://github.com/ansible/ansible-tower/issues/7768

This issue, as well as
https://github.com/ansible/ansible-tower/issues/7622, both rooted in a
concurrency issue of Django ORM:
https://github.com/ansible/ansible-tower/issues/762://code.djangoproject.com/ticket/28806

The solution related deals specifically with the related issue, but is
not a general solution. A general workaround can be found in
https://github.com/ansible/tower/pull/500.

Signed-off-by: Aaron Tan <jangsutsr@gmail.com>
2017-11-17 16:29:08 -05:00
Alan Rominger
4ea27e0d1b Merge pull request #587 from AlanCoding/computed_self
update original when updating computed fields
2017-11-17 12:23:43 -05:00
Wayne Witzel III
79c196fc08 Merge pull request #588 from wwitzel3/release_3.2.2
Include all previously run operations to satisfy Django migration planner
2017-11-17 12:12:21 -05:00
Wayne Witzel III
249a5e5e4d Include all previously run operations to satisfy Django migration planner. 2017-11-17 12:02:07 -05:00
AlanCoding
51c73cb357 update original when updating computed fields 2017-11-17 10:16:49 -05:00
Ryan Petrello
8d35b71321 Merge pull request #585 from ryanpetrello/fix-wfjt-survey-encryption
re-encrypt WFJT.survey_spec and WorkflowJob.extra_vars too
2017-11-17 08:52:54 -05:00
Ryan Petrello
a80d5b1b39 reencrypt WFJT.survey_spec too
https://github.com/ansible/ansible-tower/issues/7046
2017-11-16 23:00:22 -05:00
Wayne Witzel III
e5d86419c8 Merge pull request #582 from AlanCoding/smart_computed2
update smart inventory computed fields
2017-11-16 19:36:08 -05:00
Greg Considine
54a98ff612 Merge pull request #581 from gconsidine/ui/fix/disabled-button-opacity
Update disabled button color to match style guide
2017-11-16 16:40:06 -05:00
Ryan Petrello
e7077185bf Merge pull request #584 from ryanpetrello/release_3.2.2
make settings.AWX_ISOLATED_KEY_GENERATION readonly
2017-11-16 13:42:00 -05:00
Ryan Petrello
4187d02b8a make settings.AWX_ISOLATED_KEY_GENERATION readonly
see: https://github.com/ansible/ansible-tower/issues/7380
2017-11-16 13:35:37 -05:00
Matthew Jones
457359322f Merge pull request #583 from ansible/nicer_error_tower_inventory
Present the tower inventory sync failure in a better way
2017-11-16 13:16:22 -05:00
Matthew Jones
8a65c6e1c8 Present the tower inventory sync failure in a better way
This allows it to be handled better by ansible 2.4+
2017-11-16 12:59:12 -05:00
AlanCoding
fb29f68efc update smart inventory computed fields 2017-11-16 11:57:30 -05:00
gconsidine
1fcddba558 Update disabled button color to match style guide 2017-11-16 11:15:23 -05:00
Chris Meyers
e20599d7bb Merge pull request #580 from chrismeyersfsu/fix-project_update_cascade_tower
cascade cancel proj update when job canceled
2017-11-15 11:19:35 -05:00
Chris Meyers
9288b53015 cascade cancel proj update when job canceled
* Implicit project update, launch_type='sync', get "associated" with a
job via project_update. When a job is canceled, so should this implicit
project update. This change enforces that logic.
2017-11-15 11:17:52 -05:00
Ryan Petrello
82be0a8af2 Merge pull request #579 from ryanpetrello/fix-survey-encryption-migration-failure
fix a bug in the survey reencryption migration
2017-11-15 10:38:35 -05:00
Ryan Petrello
35c374fc79 fix a bug in the survey reencryption migration
see: https://github.com/ansible/ansible-tower/issues/7046
2017-11-15 10:26:46 -05:00
Alan Rominger
dbe135991b Merge pull request #575 from AlanCoding/single_cancel
do not propogate cancel of inventory sync back up to project
2017-11-15 09:46:41 -05:00
Bill Nottingham
64f89b3fce Merge pull request #578 from wenottingham/protect-our-environment
Remove some environment variables the callback plugin doesn't actually use
2017-11-14 16:10:53 -05:00
Bill Nottingham
aaaae87aa7 Remove some environment variables the callback plugin doesn't actually use. 2017-11-14 15:57:49 -05:00
Ryan Petrello
44a2d7a346 Merge pull request #577 from ryanpetrello/release_3.2.2
render survey_spec for display purposes in a safe manner
2017-11-13 15:18:09 -05:00
Ryan Petrello
be00b1ca96 render survey_spec for display purposes in a safe manner
survey_spec is a nested dict, so if we don't `deepcopy()` it, updates
to the individual fields could corrupt the original data structure;
this was causing a bug whereby activity stream updates converted
encrypted survey password defaults -> `$encrypted$`, but inadvertently
modified the originating model due to shared references

see: https://github.com/ansible/ansible-tower/issues/7769
2017-11-13 13:01:56 -05:00
AlanCoding
33574d70c8 do not propogate cancel of inventory sync back up to project 2017-11-13 08:44:00 -05:00
Ryan Petrello
bc705ad8ce Merge pull request #574 from ryanpetrello/fix-7764
properly perform validation on encrypted survey defaults
2017-11-10 12:07:54 -05:00
Ryan Petrello
78961c8037 properly perform validation on encrypted survey defaults
see: https://github.com/ansible/ansible-tower/issues/7764
2017-11-10 10:52:09 -05:00
Alan Rominger
e22486ada8 Merge pull request #573 from AlanCoding/7765
[3.2.2] fix bug of system auditor 404 viewing job
2017-11-08 11:16:28 -05:00
AlanCoding
0051da95c9 fix bug of system auditor 404 viewing job 2017-11-08 10:44:41 -05:00
Ryan Petrello
122142c040 Merge pull request #572 from ryanpetrello/isolated-debug-toolkit
add some useful tools for isolated connectivity debugging
2017-11-08 10:08:05 -05:00
Ryan Petrello
91ad0a9f89 add a useful tool for isolated connectivity debugging 2017-11-08 09:27:33 -05:00
Ryan Fitzpatrick
6ea3ecbb26 Merge pull request #567 from rmfitzpatrick/tower_inv_source_filter_wording
Provide more specificity to Tower inventory filter help text
2017-11-07 10:55:30 -05:00
Jared Tabor
e87dce023b Merge pull request #568 from jaredevantabor/source-deletion
When deleting a source, delete the source's groups too
2017-11-06 11:27:46 -08:00
Matthew Jones
89a05e9bbc Handle json decoder errors from tower inventory source 2017-11-06 14:15:53 -05:00
Jared Tabor
96fbc9ea27 Merge pull request #571 from jaredevantabor/6551
Band aid for rapidly deleting groups
2017-11-03 16:28:22 -07:00
Jared Tabor
e70d377a53 feedback from PR: launch both DELETE calls at the same time
for both groups and hosts.
2017-11-03 16:26:00 -07:00
Jared Tabor
f65ef9f75c prolong removing the delete modal until after the list refreshes, post-delete 2017-11-03 15:25:39 -07:00
Wayne Witzel III
7149c41804 Merge pull request #570 from wwitzel3/release_3.2.2
Handle ProgrammingError in squashed helpers
2017-11-03 10:45:09 -04:00
Wayne Witzel III
1a5b5c32b8 Handle ProgrammingError in squashed helpers 2017-11-03 10:34:25 -04:00
Wayne Witzel III
1b44ca8ef4 Merge pull request #569 from wwitzel3/release_3.2.2
Handle programming error when evaluating the replaces list for 320
2017-11-03 09:02:58 -04:00
Wayne Witzel III
d7f4707044 Handle programming error when evaluating the replaces list for 320 2017-11-03 08:53:53 -04:00
Jared Tabor
9d39ac83f9 When deleting a source, delete the source's groups too 2017-11-02 15:59:09 -07:00
Wayne Witzel III
ce393da6fd Merge pull request #564 from wwitzel3/squashbillies
Squashbillies - Fixing direct upgrades from 3.0/3.1 bug fix revisions.
2017-11-02 16:19:26 -04:00
Wayne Witzel III
2f86774006 3.2.0 should not have any replacements defined unless 0005a or 0005b migrations have been run previously 2017-11-02 16:11:45 -04:00
Ryan Fitzpatrick
e2c63c41e7 Provide more specificity to Tower inventory filter help text 2017-11-02 15:01:03 -04:00
Wayne Witzel III
f9685717b8 Move post 3.0 migrations to pre 3.1 position in migration files 2017-11-02 14:06:36 -04:00
Wayne Witzel III
47a3ba9bd5 Rename squash 300 to 30 2017-11-02 14:06:36 -04:00
Wayne Witzel III
af3e6f792c Rename squash 310 to 31 2017-11-02 14:06:35 -04:00
Wayne Witzel III
fc56a1c170 Fix 3.0 to 3.2 migration paths 2017-11-02 14:06:35 -04:00
Wayne Witzel III
84fb908261 Fix 3.1 to 3.2 migration paths 2017-11-02 14:06:33 -04:00
jlmitch5
cb4a38d7a7 Merge pull request #566 from ansible/click-to-close
Add close logic to clicking outside of the bounds of a lookup modal
2017-11-02 13:33:03 -04:00
John Mitchell
9518c38bb8 add close logic to clicking outside of the bounds of a lookup modal 2017-11-02 12:20:53 -04:00
Matthew Jones
5e37d6ea7e Remove unused TOWER_HOST and AWX_HOST env vars
These were conflicting with the new Tower credential
2017-11-02 10:44:06 -04:00
jlmitch5
54e76b2534 Merge pull request #565 from ansible/no_placeholder_select2_typeahead
munge placeholder from select2 typeahead search bar
2017-11-01 12:45:29 -04:00
jlmitch5
b8ed41fa82 munge placeholder from select2 typeahead search bar 2017-10-31 14:39:25 -04:00
Michael Abashian
fbd03287ea Merge pull request #549 from mabashian/7697-smart-inv-pagination
Only pass host filter param to smart inv shortcut form
2017-10-31 13:29:52 -04:00
Michael Abashian
7919433288 Merge pull request #548 from mabashian/7752-host-list
Fixed related host list linking
2017-10-31 13:29:35 -04:00
Ryan Petrello
3568be84c8 Merge pull request #561 from ryanpetrello/idle-hands
improve the callback worker's ability to deal with idle/disconnected DB
2017-10-31 10:05:29 -04:00
Ryan Petrello
8d2ab3de42 improve the callback worker's ability to deal with idle/disconnected DB
if database connectivity is lost, callback workers currently raise an
uncaught exception and hang; this can cause the entire process to stop
handling callback events

see: https://github.com/ansible/ansible-tower/issues/7660
2017-10-31 09:51:13 -04:00
Jared Tabor
4c4cbaef9f Merge pull request #562 from jaredevantabor/timezone
Changing angular-tz-extensions branch
2017-10-30 14:21:19 -07:00
Jared Tabor
aef224732c changing angular-tz to point to a branch with a patch for UTC timezones
and also patching angular-scheduler to point to angular 1.4.14
and also patching angular-codemirror to point to angular 1.4.14,
and adding fsevents:"*" to the package.json, and regenerating
npm-shrinkwrap.json for the new dependencies and their branches.
2017-10-30 14:20:24 -07:00
Jake McDermott
b0c1be7338 Merge pull request #563 from jakemcdermott/bug-7718
append credential types documentation link to popovers
2017-10-30 16:24:19 -04:00
Jake McDermott
14a3a6073e append credential types documentation link to help popovers 2017-10-30 15:23:27 -04:00
Jake McDermott
fc7c2117e9 Merge pull request #553 from jakemcdermott/bug-5449
use abbreviated month name for dashboard chart
2017-10-27 17:01:47 -04:00
Aaron Tan
962de13965 Merge pull request #503 from jangsutsr/fix-7712
[3.2.2]Special handle host related_search_fields
2017-10-27 11:16:09 -04:00
Aaron Tan
7211ff22df Special handle host related_search_fields
Relates #7712 of ansible-tower.

UI uses `related_search_fields` list to populate help text for resourse
search, `ansible_facts` is searchable via UI but the general pickup
logic would ignore it. So make it a corner case.

Signed-off-by: Aaron Tan <jangsutsr@gmail.com>
2017-10-27 10:55:43 -04:00
Alan Rominger
003d7f0915 Merge pull request #543 from AlanCoding/urlencode_host_filter
[3.2.2] urlencode unquote host_filter on save
2017-10-27 08:50:39 -04:00
Matthew Jones
f019452207 Merge pull request #551 from ansible/tower_inventory_source
Tower inventory source
2017-10-27 08:41:24 -04:00
Alan Rominger
c323a2393a Merge pull request #552 from AlanCoding/retry_cleanup
[3.2.2] retry cleanup of build artifacts for bwrap race condition
2017-10-27 08:23:28 -04:00
Matthew Jones
85be3c7692 Align inventory variables with Ansible modules 2017-10-27 08:12:14 -04:00
Matthew Jones
5f3ebc26e0 Adding license checks for Tower inventory source
* For Tower the license must match between the source and destination
* For AWX the check is disabled
* Hosts imported from another Tower don't count against your license
  in the local Tower
* Fix up some issues with enablement
* Prevent slashes from being used in the instance filter
* Add &all=1 filter to make sure we pick up all hosts
2017-10-27 08:12:14 -04:00
Matthew Jones
d282966aa1 Use towervars to enable turning on remote tracking vars on Tower src
* This allows the local Tower to track enabled state and unique
  instance id for each host imported from the remote Tower
2017-10-27 08:12:14 -04:00
Matthew Jones
71e132ce0f Show instance filter ui element with tower inventory source 2017-10-27 08:12:14 -04:00
Jake McDermott
d6d84e8f5e use abbreviated month name for dashboard chart 2017-10-26 21:44:01 -04:00
Matthew Jones
fdc7f58bb4 Support passing instance filters to tower inventory src
* Switch ignore ssl errors to default on
* Application inventory source defaults for Tower src
2017-10-26 13:51:05 -04:00
Matthew Jones
6c597ad165 Adding initial credential and invsrc for Tower
* New credential type for Tower
* Inventory source definitions and migrations for Tower
* Initial Tower inventory source script
2017-10-26 13:51:05 -04:00
AlanCoding
48ec69c4f5 retry cleanup of build artifacts for bwrap race condition 2017-10-26 13:33:21 -04:00
Aaron Tan
1ea3d55167 Merge pull request #550 from jangsutsr/fix-7737-1
Follow up fix #7737
2017-10-26 11:29:05 -04:00
Aaron Tan
7181bd1c9b Follow up fix #7737
The original fix introduced migration failure, this PR managed to fix
that.

Signed-off-by: Aaron Tan <jangsutsr@gmail.com>
2017-10-26 11:15:07 -04:00
Ryan Petrello
9e8ac3b09b Merge pull request #547 from ryanpetrello/fix-cleanup-memory-usage
[3.2.2] Backport (from awx) various memory optimizations for job cleanup
2017-10-26 10:39:58 -04:00
Aaron Tan
e24e1fc1f0 Merge pull request #542 from jangsutsr/fix-7737
[3.2.2]support `AZURE_CLOUD_ENVIRONMENT`
2017-10-26 10:20:14 -04:00
Aaron Tan
f28b48a473 support AZURE_CLOUD_ENVIRONMENT
Relates #7737 of ansible-tower.

Signed-off-by: Aaron Tan <jangsutsr@gmail.com>
2017-10-26 10:11:21 -04:00
Michael Abashian
4f58537949 Merge pull request #546 from mabashian/6209-license-readonly-auth-forms
Make codemirror ready-only when auth form field is disabled
2017-10-26 09:58:24 -04:00
mabashian
0512f65c8f Only pass host filter param to smart inv shortcut form 2017-10-26 09:55:36 -04:00
mabashian
947bdeed3e Fixed related host list linking 2017-10-25 16:59:44 -04:00
Mike McMahon
d3a7bec674 Backport (from awx) various memory optimizations for job cleanup
see: https://github.com/ansible/ansible-tower/issues/7751

0388568 Reduces the job to only looking at objects older than the cutoff date
0234311 missing colon and missing variable usage
399e0e5 switching to iterator and adding the missed Job cleanup
0cd34c1 jobs take count of gte cutoff, process only lt cutoff
2017-10-25 12:17:46 -04:00
Ryan Petrello
652facba9f Merge pull request #545 from ryanpetrello/fix-7746
work around an ansible bug that can cause project syncs to fail
2017-10-25 11:37:15 -04:00
mabashian
b1ef7506ea Make codemirror ready-only when auth form field is disabled 2017-10-25 11:30:26 -04:00
Ryan Petrello
c95d7d465a work around an ansible bug that can cause project syncs to fail
https://github.com/ansible/ansible-tower/issues/7746
https://github.com/ansible/ansible/issues/30064
2017-10-25 11:09:39 -04:00
Michael Abashian
70919638ba Merge pull request #541 from mabashian/7608-adhoc-launch-modal
Made adhoc launch modal height dynamic
2017-10-24 14:07:32 -04:00
Michael Abashian
6ea48cd73e Merge pull request #540 from mabashian/6370-delete-inv-src
Delete inv source hosts before inv source
2017-10-24 14:07:08 -04:00
Michael Abashian
63ca8e4134 Merge pull request #538 from mabashian/7707-host-filter-remove-tags-v2
Fixed removing host filter search term with encoded character
2017-10-24 14:06:37 -04:00
Aaron Tan
725cc469cf Merge pull request #544 from jangsutsr/fix-7747
[3.2.2]Include vault credential check in job relaunch
2017-10-24 12:14:01 -04:00
Aaron Tan
665a4d83e3 Include vault credential check in job relaunch
Relates #7747 of ansible-tower.

Signed-off-by: Aaron Tan <jangsutsr@gmail.com>
2017-10-24 11:00:13 -04:00
AlanCoding
018514d657 urlencode unquote host_filter on save 2017-10-24 08:05:08 -04:00
mabashian
71d428433f Made adhoc launch modal height dynamic 2017-10-23 12:09:25 -04:00
Michael Abashian
2f689fffbe Merge pull request #531 from mabashian/4796-workflow-resize-v2
Zoom workflow graph to fit screen on initial load
2017-10-23 10:47:14 -04:00
mabashian
3119d5ed22 Delete inv source hosts before inv source 2017-10-20 20:10:36 -04:00
Ryan Petrello
aab27e9b93 Merge pull request #539 from ryanpetrello/fix-7740
fix a unicode handling bug in inventory source name migration
2017-10-20 13:35:20 -04:00
Ryan Petrello
b60a30cbd4 fix a unicode handling bug in inventory source name migration
see: https://github.com/ansible/ansible-tower/issues/7740
2017-10-20 12:13:44 -04:00
Ryan Petrello
88acd95a72 Merge pull request #534 from ryanpetrello/release_3.2.2
store cloudforms inventory cache files in the proper location on disk
2017-10-20 09:41:05 -04:00
mabashian
c3fbb07535 Fixed removing host filter search term with encoded character 2017-10-19 19:31:41 -04:00
Michael Abashian
8d043e6f85 Merge pull request #532 from mabashian/7681-disassociate-help-popover
Disassociate host/group popover text
2017-10-19 11:12:28 -04:00
Michael Abashian
31602c4b28 Merge pull request #533 from mabashian/7720-adhoc-launch-error
Fixed error message when launching adhoc command
2017-10-19 11:11:44 -04:00
Ryan Petrello
57cd8adc2d Merge pull request #537 from ryanpetrello/ovirt4-auth-module
properly support authentication for ovirt4 ansible modules
2017-10-19 09:57:16 -04:00
Ryan Petrello
c1e20fe7a0 properly support authentication for ovirt4 ansible modules
see: https://github.com/ansible/ansible-tower/issues/6522
see: https://github.com/ansible/ansible-tower/issues/6522#issuecomment-337909863
2017-10-19 09:47:25 -04:00
Ryan Petrello
b1f5529aa4 Merge pull request #536 from ryanpetrello/fix-7741
properly follow symlinks for bwrap'd working directories
2017-10-19 08:58:16 -04:00
Alan Rominger
350699eda8 Merge pull request #504 from AlanCoding/fk_error_msg
[3.2.2] tweak of error message for ForeignKey filters
2017-10-18 19:06:12 -04:00
Ryan Petrello
10a7544d68 properly follow symlinks for bwrap'd working directories
see: https://github.com/ansible/ansible-tower/issues/7741
2017-10-18 17:03:10 -04:00
Jared Tabor
d3eea5e694 generalizing class which is ignored when trying to drag the host-event-modal
it was only applied to .CodeMirror, which is only used by the JSON tab
2017-10-18 10:55:53 -07:00
Marliana Lara
8fd9fea113 Merge pull request #530 from marshmalien/fix/7702-job-stdout-wordwrap
Fix job standard out word-wrap
2017-10-18 13:51:36 -04:00
Wayne Witzel III
470a4b7746 Merge pull request #535 from wwitzel3/release_3.2.2
use getattr in social auth django strategy
2017-10-18 11:02:11 -04:00
Wayne Witzel III
38c2ea7025 use getattr in social auth django strategy 2017-10-18 10:20:44 -04:00
Ryan Petrello
5895654538 store cloudforms inventory cache files in the proper location on disk
with process isolation enabled (which is the awx default), cloudforms
caches inventory script results on disk; awx should direct cloudforms to
store these cache files in a location that's exposed to the isolated
environment

see: ansible/ansible#31760
2017-10-17 17:07:21 -04:00
mabashian
b402d9ba6d Fixed error message when launching adhoc command 2017-10-17 14:42:24 -04:00
mabashian
5db478a4a0 Zoom workflow graph to fit screen on initial load 2017-10-17 12:59:44 -04:00
mabashian
059347eec3 Made disassociate host/group titles more descriptive 2017-10-17 10:23:17 -04:00
mabashian
e8dbfa42cf Fixed disassociate host from group help text 2017-10-17 10:10:34 -04:00
Michael Abashian
3d12e040ed Merge pull request #528 from mabashian/5129-jt-spinner-v2
Moved wait stop calls on jt form so that they fire right before reloading state
2017-10-16 16:20:02 -04:00
Michael Abashian
fceca3bcae Merge pull request #527 from mabashian/7697-smart-inventory-shortcut
Fixed smart inv button bug navigating to page 2 of hosts.
2017-10-16 16:19:40 -04:00
Marliana Lara
fcd03fb1c2 Fix job standard out error message word-wrap 2017-10-13 14:59:57 -04:00
mabashian
2cab6982c1 Moved wait stop calls on jt form so that they fire right before reloading state 2017-10-12 17:01:32 -04:00
mabashian
3ede367df4 Fixed smart inv button bug navigating to page 2 of hosts. Added tooltip when button is disabled. 2017-10-12 16:56:21 -04:00
Alan Rominger
f6bf0ad21f Merge pull request #521 from AlanCoding/update_isolated
[3.2.2] update isolated container requirements
2017-10-12 14:24:38 -04:00
Greg Considine
817b397d20 Merge pull request #524 from gconsidine/ui/fix/remove-unsupported-query-tokens
Remove unsupported tokens from search generated queries
2017-10-11 17:54:26 -04:00
jlmitch5
b61fdaf721 Merge pull request #526 from jlmitch5/hideWorkflowAndSurveyButtons
hide workflow and survey buttons from non-detail tabs
2017-10-11 11:35:35 -04:00
John Mitchell
1603106cb4 include workflow editor when showing buttons' 2017-10-11 10:58:38 -04:00
Aaron Tan
1454000b91 Merge pull request #522 from jangsutsr/ldap_docs
Add LDAP deploy instructions
2017-10-11 10:07:29 -04:00
Alan Rominger
b2e63d5e47 Merge pull request #510 from AlanCoding/event_accounting
add logger statement for number of events
2017-10-10 22:12:11 -04:00
Alan Rominger
e7ede6af4a Merge pull request #525 from AlanCoding/update_isolated_version
[3.2.2] update fallback isolated version to 3.2.2
2017-10-10 22:06:09 -04:00
Alan Rominger
5503d4efb4 Merge pull request #523 from AlanCoding/fix_isolated_capacity
[3.2.2] fix equation for isolated instance capacity
2017-10-10 22:04:03 -04:00
John Mitchell
54640dbca0 hide workflow and survey buttons from non-detail tabs
since the two are basically sub-states of the edit form (detail tab), they should only show up when that tab is selected
2017-10-10 17:08:04 -04:00
AlanCoding
eab82f3efa updated fallback isolated version to 3.2.2 2017-10-10 15:45:20 -04:00
gconsidine
9e3d90896b Remove unsupported tokens from search generated queries 2017-10-10 15:22:17 -04:00
AlanCoding
e66a1002ee fix equation for isolated instance capacity 2017-10-10 14:58:09 -04:00
Aaron Tan
82160e2072 Add LDAP deploy instructions
Signed-off-by: Aaron Tan <jangsutsr@gmail.com>
2017-10-10 14:54:53 -04:00
AlanCoding
e814f28039 add logger statement for number of events 2017-10-10 14:48:00 -04:00
AlanCoding
03e58523b2 tweak of error message for ForeignKey filters 2017-10-10 14:47:37 -04:00
AlanCoding
341ef411a4 update isolated container requirements 2017-10-10 14:46:41 -04:00
Aaron Tan
8d19555cf1 Merge pull request #519 from jangsutsr/fix-7726
Disable inventory var overwrite in inv import
2017-10-10 14:38:50 -04:00
Greg Considine
d23fd0515d Merge pull request #518 from gconsidine/ui/fix/credential-kind-list-display
Use credential_type to fetch associated types in list view
2017-10-10 14:38:50 -04:00
Aaron Tan
b9483c28b0 Disable inventory var overwrite in inv import
Relates #7726 of ansible-tower.

Signed-off-by: Aaron Tan <jangsutsr@gmail.com>
2017-10-10 14:38:50 -04:00
Alan Rominger
6f9fc0c3f8 Merge pull request #514 from AlanCoding/no_ordereddit
[3.2.2] prevent OrderedDict syntax in error message
2017-10-10 14:38:49 -04:00
gconsidine
766a088749 Use credential_type to fetch associated types in list view 2017-10-10 14:38:49 -04:00
Chris Church
2b539cab85 Merge pull request #511 from cchurch/ldap-filter-dash-support
[3.2.2] Support dash in LDAP attribute names in filters.
2017-10-10 14:38:49 -04:00
AlanCoding
2fb67a3648 prevent OrderedDict syntax in error message 2017-10-10 14:38:49 -04:00
Greg Considine
64c5e3994e Merge pull request #513 from gconsidine/ui/fix/lookup-component-empty-input
Set lookup value changed from something to nothing to be null
2017-10-10 14:38:49 -04:00
Alan Rominger
7b792926eb Merge pull request #509 from AlanCoding/max_ui_events
[3.2.2] add CTiT setting for max UI job events
2017-10-10 14:38:49 -04:00
Chris Church
c067788428 Support dash in LDAP attribute names in filters. 2017-10-10 14:38:49 -04:00
gconsidine
b7071a48c2 Set lookup value changed from something to nothing to be null 2017-10-10 14:38:49 -04:00
AlanCoding
dee4b72303 add CTiT setting for max UI job events 2017-10-10 14:38:49 -04:00
Alan Rominger
5994a77b84 Merge pull request #508 from AlanCoding/password_handholding
[3.2.2] reword error message about encrypted user input
2017-10-10 14:38:48 -04:00
Marliana Lara
f93506fe2c Merge pull request #491 from marshmalien/fix/7661-host-config-key-border
[3.2.2] Fix missing right border of lookup buttons
2017-10-10 14:38:48 -04:00
Alan Rominger
7c86e38b81 Merge pull request #506 from AlanCoding/active_job_period
[3.2.2] add period to active job conflict error
2017-10-10 14:38:48 -04:00
AlanCoding
1c374fba7d reword error message about encrypted user input 2017-10-10 14:38:48 -04:00
Marliana Lara
2cc9e2ca0b Fix hidden right border of form input lookup buttons 2017-10-10 14:38:48 -04:00
Alan Rominger
335dfd564a Merge pull request #505 from AlanCoding/v1_jt_vc_ct_sf
[3.2.2] Exclude credential type content from v1
2017-10-10 14:38:48 -04:00
AlanCoding
5380d57ce8 add period to active job conflict error
Rename StateConflict to ActiveJobConflict and used shared
message inside of that exception class.
2017-10-10 14:38:48 -04:00
AlanCoding
a01f80db5b Exclude credential type content from v1
credential_type_id was showing up in vault_credential
summary_fields in API v1
2017-10-10 14:38:48 -04:00
Aaron Tan
d7eba47adb Merge pull request #456 from jangsutsr/fix-7656
[3.2.2]Remove search term separators
2017-10-10 14:38:47 -04:00
Alan Rominger
5fffdec69d Merge pull request #490 from AlanCoding/many_deleter_320
[3.2.2] Delete all hosts or groups from inventory source
2017-10-10 14:38:47 -04:00
Aaron Tan
358ef76529 Remove search term separators
Relates #7656 in ansible-tower.

We have been using comma `,` and space ` ` to separate search terms in
query string `<field_name>__search=<search terms>`, however in general
we can always use `&` to achieve separation like
`<field_name>__search=<search term 1>&<field_name>__search=<search term
2>&...`. Using specific delimiters makes it impossible for search terms
to contain those delimiters, so they are better off being removed.

Signed-off-by: Aaron Tan <jangsutsr@gmail.com>
2017-10-10 14:38:47 -04:00
Aaron Tan
bb628c52ad Merge pull request #487 from jangsutsr/fix-7586
[3.2.2]Fix SAML auth behind load balancer issue.
2017-10-10 14:38:47 -04:00
AlanCoding
d2e0b26287 allow deleting hosts and groups from inv src sublists 2017-10-10 14:38:47 -04:00
Ryan Petrello
f2d46baf09 Merge pull request #496 from ryanpetrello/fix-6683
fix a bug when Tower is integrated with ipsilon SAML server
2017-10-10 14:38:47 -04:00
Ryan Petrello
c6fdadd7f2 Merge pull request #497 from ryanpetrello/fix-7259
properly sanitize encrypted default passwords in JT.survey_spec
2017-10-10 14:38:47 -04:00
Aaron Tan
cc8b115c6a Fix SAML auth behind load balancer issue.
Relates to #7586 of ansible-tower as a follow-up of fix #420 of tower.

The original fix works for Django version 1.9 and above, this PR
expanded the solution to Django verison 1.8 and below.

Signed-off-by: Aaron Tan <jangsutsr@gmail.com>
2017-10-10 14:38:47 -04:00
Ryan Petrello
82d05e0a10 properly sanitize encrypted default passwords in JT.survey_spec
see: https://github.com/ansible/ansible-tower/issues/7259
2017-10-10 14:38:47 -04:00
Ryan Petrello
9978b3f9ad Merge pull request #489 from ryanpetrello/release_3.2.2
fix busted 3.2.2 activity stream migration
2017-10-10 14:38:46 -04:00
Alan Rominger
4f4af058b3 Merge pull request #480 from AlanCoding/committed_cap
[3.2.2] add IG committed capacity to serializer
2017-10-10 14:38:46 -04:00
Ryan Petrello
b372cebf8d fix a bug when Tower is integrated with ipsilon SAML server
https://github.com/ansible/ansible-tower/issues/6683
2017-10-10 14:38:46 -04:00
Alan Rominger
3df8e2beb1 Merge pull request #494 from AlanCoding/wfjt_perm_fix2
[3.2.2] fix bug checking WFJT node for prompted resources
2017-10-10 14:38:46 -04:00
AlanCoding
c45fbcf2ee add IG committed capacity to serializer 2017-10-10 14:38:46 -04:00
Ryan Petrello
5efa50788f Merge pull request #481 from ryanpetrello/fix-7046
[3.2.2] encrypt job survey data
2017-10-10 14:38:46 -04:00
AlanCoding
3abbe87e10 fix bug checking WFJT node for prompted resources 2017-10-10 14:38:46 -04:00
Ryan Petrello
f26bdb3e96 migrate existing survey passwords to be encrypted
see: https://github.com/ansible/ansible-tower/issues/7046
2017-10-10 14:38:46 -04:00
Ryan Petrello
4be4e3db7f encrypt job survey data
see: https://github.com/ansible/ansible-tower/issues/7046
2017-10-10 14:38:46 -04:00
Alan Rominger
4ea92f0dcb Merge pull request #306 from AlanCoding/new_perf_logging
[3.2.2] new method of performance logging
2017-10-10 14:38:45 -04:00
Ryan Petrello
a0cfbb93e9 fix busted 3.2.2 activity stream migration
see: ansible/ansible-tower#7704
2017-10-10 14:38:45 -04:00
Aaron Tan
08a784d50c Merge pull request #474 from jangsutsr/fix-7386
Include Tower configurations into activity stream
2017-10-10 14:38:45 -04:00
AlanCoding
9ee18d02c8 new method of performance logging 2017-10-10 14:38:45 -04:00
Ryan Petrello
4fd190e4c8 Merge pull request #468 from ryanpetrello/smarter-credtype-migrations
[3.2.2] add new credential types in a more stable way in migrations
2017-10-10 14:38:45 -04:00
Aaron Tan
a11e33458f Include Tower configurations into activity stream
Relates #7386 of ansible-tower.

Due to the uniqueness of Tower configuration datastore model, it is not
fully compatible with activity stream workflow. This PR introduced
setting field for activitystream model along with other changes to make
Tower configuration a special case for activity streams.

Signed-off-by: Aaron Tan <jangsutsr@gmail.com>
2017-10-10 14:38:45 -04:00
Aaron Tan
84fdfbb898 Merge pull request #469 from jangsutsr/fix-7684
[3.2.2] Prevent slugify username from social sso backends
2017-10-10 14:38:45 -04:00
Ryan Petrello
f4a252a331 add new credential types in a more stable way in migrations
instead of writing individual migrations for new built-in credential
types, this change makes the "setup_tower_managed_defaults" function
idempotent so that it only adds the credential types you're missing
2017-10-10 14:38:45 -04:00
Ryan Petrello
d4fe60756b Merge pull request #466 from ryanpetrello/ovirt4-inv-source
don't install pycurl from pypi; use a system package instead
2017-10-10 14:38:45 -04:00
Aaron Tan
f4ab979b59 Prevent slugify username from social sso backends
Relates #7684 of ansible-tower.

Slugify username in python-social-auth means disallowing
any non-alphanumerial characters, which is an over-kill
for awx/tower, thus disabling it.

Signed-off-by: Aaron Tan <jangsutsr@gmail.com>
2017-10-10 14:38:45 -04:00
Ryan Petrello
3d3d79b6b3 Merge pull request #464 from ryanpetrello/ovirt4-inv-source
add ovirt sdk dependency for ovirt4 support
2017-10-10 14:38:44 -04:00
Ryan Petrello
e06d4d7734 don't install pycurl from pypi; use a system package instead
the ovirt4 sdk relies on pycurl, which is complicated to install w/ pip;
rely on pycurl to be provided by a system package instead
2017-10-10 14:38:44 -04:00
Ryan Petrello
ab18a4a440 Merge pull request #454 from ryanpetrello/ovirt4-inv-source
support ovirt4 as a built-in inventory source
2017-10-10 14:38:44 -04:00
Ryan Petrello
7438062b97 add ovirt sdk dependency for ovirt4 support 2017-10-10 14:38:44 -04:00
Ryan Petrello
4510cd11db Merge pull request #452 from ryanpetrello/fix-7609
disable GCE inventory caching w/ a .ini file
2017-10-10 14:38:44 -04:00
Ryan Petrello
74f2509482 support ovirt4 as a built-in inventory source
see: https://github.com/ansible/ansible-tower/issues/6522
2017-10-10 14:38:44 -04:00
Ryan Petrello
f84e42ed15 Merge pull request #451 from ryanpetrello/fix-7609
disable GCE inventory source cache
2017-10-10 14:38:44 -04:00
Ryan Petrello
94b4dabee2 disable GCE inventory caching w/ a .ini file
see: https://github.com/ansible/ansible-tower/issues/7609
see: https://github.com/ansible/tower/pull/451#pullrequestreview-64454393
2017-10-10 14:38:44 -04:00
Ryan Petrello
94d44e8791 disable GCE inventory source cache
by default, the GCE inventory script caches results on disk for
5 minutes; disable this behavior

see: https://github.com/ansible/ansible-tower/issues/7609
2017-10-10 14:38:44 -04:00
Ryan Petrello
d24166bd68 Merge pull request #442 from ryanpetrello/fix-7554
properly encode LDAP DN values on validation
2017-10-10 14:38:43 -04:00
Ryan Petrello
62f82e7a7e Merge pull request #441 from ryanpetrello/fix-7607
allow the credential type to be changed for unused credentials
2017-10-10 14:38:43 -04:00
Ryan Petrello
7a21a45781 properly encode LDAP DN values on validation
see: https://github.com/ansible/ansible-tower/issues/7554
2017-10-10 14:38:43 -04:00
Ryan Petrello
91ec0a4482 Merge pull request #430 from ryanpetrello/fix-7620
don't show polymorphic_ctype in unique validation error messaging
2017-10-10 14:38:43 -04:00
Ryan Petrello
c8f4320b58 allow the credential type to be changed for unused credentials
see: https://github.com/ansible/ansible-tower/issues/7607
2017-10-10 14:38:43 -04:00
Ryan Petrello
71a725c5f8 Merge pull request #432 from ryanpetrello/fix-7513
add awx meta variables to adhoc command extra_vars
2017-10-10 14:38:43 -04:00
Ryan Petrello
96572fe3d4 don't show polymorphic_ctype in unique validation error messaging
see: https://github.com/ansible/ansible-tower/issues/7620
2017-10-10 14:38:43 -04:00
Ryan Petrello
554a9586c6 add awx meta variables to adhoc command extra_vars
see: https://github.com/ansible/ansible-tower/issues/7513
2017-10-10 14:38:37 -04:00
Ryan Petrello
f41c8cf4f2 Merge pull request #426 from ryanpetrello/fix-7655
don't append to the activity stream on LDAP group disassociate
2017-10-10 14:38:18 -04:00
Ryan Petrello
f2f42c2c8a don't append to the activity stream on LDAP group disassociate
for organizations w/ a large number of ldap orgs/teams, this results in
a _huge_ number of extraneous activity stream entries

see: https://github.com/ansible/ansible-tower/issues/7655
2017-10-10 14:38:18 -04:00
574 changed files with 27918 additions and 25937 deletions

View File

@@ -1,31 +0,0 @@
sudo: false
language: python
python:
- '2.7'
env:
- TOXENV=api-lint
- TOXENV=api
- TOXENV=ui-lint
- TOXENV=ui
install:
- pip install tox
script:
- tox
# after_success:
# - TOXENV=coveralls tox
addons:
apt:
packages:
- swig
- libxmlsec1-dev
- postgresql-9.5
- libssl-dev
cache:
pip: true
directories:
- node_modules
- .tox
services:
- mongodb
# Enable when we stop using sqlite for API tests
# - postgresql

View File

@@ -24,6 +24,7 @@ Have questions about this document or anything not covered here? Come chat with
* [Start a shell](#start-the-shell)
* [Create a superuser](#create-a-superuser)
* [Load the data](#load-the-data)
* [Building API Documentation](#build-documentation)
* [Accessing the AWX web interface](#accessing-the-awx-web-interface)
* [Purging containers and images](#purging-containers-and-images)
* [What should I work on?](#what-should-i-work-on)
@@ -57,7 +58,7 @@ For Linux platforms, refer to the following from Docker:
> https://docs.docker.com/engine/installation/linux/docker-ce/fedora/
**Centos**
**CentOS**
> https://docs.docker.com/engine/installation/linux/docker-ce/centos/
@@ -217,7 +218,7 @@ If you want to start and use the development environment, you'll first need to b
(container)# /bootstrap_development.sh
```
The above will do all the setup tasks, including running database migrations, so it amy take a couple minutes.
The above will do all the setup tasks, including running database migrations, so it may take a couple minutes.
Now you can start each service individually, or start all services in a pre-configured tmux session like so:
@@ -261,6 +262,20 @@ You can optionally load some demo data. This will create a demo project, invento
> This information will persist in the database running in the `tools_postgres_1` container, until the container is removed. You may periodically need to recreate
this container, and thus the database, if the database schema changes in an upstream commit.
##### Building API Documentation
AWX includes support for building [Swagger/OpenAPI
documentation](https://swagger.io). To build the documentation locally, run:
```bash
(container)/awx_devel$ make swagger
```
This will write a file named `swagger.json` that contains the API specification
in OpenAPI format. A variety of online tools are available for translating
this data into more consumable formats (such as HTML). http://editor.swagger.io
is an example of one such service.
### Accessing the AWX web interface
You can now log into the AWX web interface at [https://localhost:8043](https://localhost:8043), and access the API directly at [https://localhost:8043/api/](https://localhost:8043/api/).
@@ -281,7 +296,7 @@ For feature work, take a look at the current [Enhancements](https://github.com/a
If it has someone assigned to it then that person is the person responsible for working the enhancement. If you feel like you could contribute then reach out to that person.
Fixing bugs, adding translations, and updating the documentation are always appreciated, so reviewing the backlog of issues is always a good place to start.
Fixing bugs, adding translations, and updating the documentation are always appreciated, so reviewing the backlog of issues is always a good place to start. For extra information on debugging tools, see [Debugging](https://github.com/ansible/awx/blob/devel/docs/debugging.md).
**NOTE**
@@ -293,7 +308,7 @@ Fixing bugs, adding translations, and updating the documentation are always appr
## Submitting Pull Requests
Fixes and Features for AWX will go through the Github pull request process. Submit your pull request (PR) agains the `devel` branch.
Fixes and Features for AWX will go through the Github pull request process. Submit your pull request (PR) against the `devel` branch.
Here are a few things you can do to help the visibility of your change, and increase the likelihood that it will be accepted:
@@ -312,7 +327,7 @@ It's generally a good idea to discuss features with us first by engaging us in t
We like to keep our commit history clean, and will require resubmission of pull requests that contain merge commits. Use `git pull --rebase`, rather than
`git pull`, and `git rebase`, rather than `git merge`.
Sometimes it might take us a while to fully review your PR. We try to keep the `devel` branch in good working order, and so we review requests carefuly. Please be patient.
Sometimes it might take us a while to fully review your PR. We try to keep the `devel` branch in good working order, and so we review requests carefully. Please be patient.
All submitted PRs will have the linter and unit tests run against them, and the status reported in the PR.

View File

@@ -13,24 +13,30 @@ This document provides a guide for installing AWX.
- [Choose a deployment platform](#choose-a-deployment-platform)
- [Official vs Building Images](#official-vs-building-images)
- [OpenShift](#openshift)
- [Prerequisites](#prerequisites)
- [Prerequisites](#prerequisites-1)
- [Deploying to Minishift](#deploying-to-minishift)
- [Pre-build steps](#pre-build-steps)
- [PostgreSQL](#postgresql)
- [Start the build](#start-the-build)
- [Post build](#post-build)
- [Accessing AWX](#accessing-awx)
- [Docker](#docker)
- [Kubernetes](#kubernetes)
- [Prerequisites](#prerequisites-2)
- [Pre-build steps](#pre-build-steps-1)
- [Start the build](#start-the-build-1)
- [Accessing AWX](#accessing-awx-1)
- [SSL Termination](#ssl-termination)
- [Docker or Docker Compose](#docker-or-docker-compose)
- [Prerequisites](#prerequisites-3)
- [Pre-build steps](#pre-build-steps-2)
- [Deploying to a remote host](#deploying-to-a-remote-host)
- [Inventory variables](#inventory-variables)
- [Docker registry](#docker-registry)
- [PostgreSQL](#postgresql-1)
- [Proxy settings](#proxy-settings)
- [Start the build](#start-the-build-1)
- [Start the build](#start-the-build-2)
- [Post build](#post-build-1)
- [Accessing AWX](#accessing-awx-1)
- [Accessing AWX](#accessing-awx-2)
## Getting started
@@ -54,7 +60,7 @@ Before you can run a deployment, you'll need the following installed in your loc
- [Docker](https://docs.docker.com/engine/installation/)
- [docker-py](https://github.com/docker/docker-py) Python module
- [GNU Make](https://www.gnu.org/software/make/)
- [Git](https://git-scm.com/)
- [Git](https://git-scm.com/) Requires Version 1.8.4+
### System Requirements
@@ -63,7 +69,7 @@ The system that runs the AWX service will need to satisfy the following requirem
- At leasts 4GB of memory
- At least 2 cpu cores
- At least 20GB of space
- Running Docker or Openshift
- Running Docker, Openshift, or Kubernetes
### AWX Tunables
@@ -71,11 +77,14 @@ The system that runs the AWX service will need to satisfy the following requirem
### Choose a deployment platform
We currently support running AWX as a containerized application using Docker images deployed to either an OpenShift cluster, or a standalone Docker daemon. The remainder of this document will walk you through the process of building the images, and deploying them to either platform.
We currently support running AWX as a containerized application using Docker images deployed to either an OpenShift cluster, docker-compose or a standalone Docker daemon. The remainder of this document will walk you through the process of building the images, and deploying them to either platform.
The [installer](./installer) directory contains an [inventory](./installer/inventory) file, and a playbook, [install.yml](./installer/install.yml). You'll begin by setting variables in the inventory file according to the platform you wish to use, and then you'll start the image build and deployment process by running the playbook.
In the sections below, you'll find deployment details and instructions for each platform. To deploy to Docker, view the [Docker section](#docker), and for OpenShift, view the [OpenShift section](#openshift).
In the sections below, you'll find deployment details and instructions for each platform:
- [OpenShift](#openshift)
- [Kubernetes](#kubernetes)
- [Docker or Docker Compose](#docker-or-docker-compose).
### Official vs Building Images
@@ -133,10 +142,6 @@ Before starting the build process, review the [inventory](./installer/inventory)
> Name of the OpenShift project that will be created, and used as the namespace for the AWX app. Defaults to *awx*.
*awx_node_port*
> The web server port running inside the AWX pod. Defaults to *30083*.
*openshift_user*
> Username of the OpenShift user that will create the project, and deploy the application. Defaults to *developer*.
@@ -144,7 +149,7 @@ Before starting the build process, review the [inventory](./installer/inventory)
*docker_registry*
> IP address and port, or URL, for accessing a registry that the OpenShift cluster can access. Defaults to *172.30.1.1:5000*, the internal registry delivered with Minishift. This is not needed if you are using official hosted images.
n
*docker_registry_repository*
> Namespace to use when pushing and pulling images to and from the registry. Generally this will match the project name. It defaults to *awx*. This is not needed if you are using official hosted images.
@@ -271,16 +276,88 @@ The above example is taken from a Minishift instance. From a web browser, use `h
Once you access the AWX server, you will be prompted with a login dialog. The default administrator username is `admin`, and the password is `password`.
## Docker
## Kubernetes
### Prerequisites
You will need the following installed on the host where AWX will be deployed:
A Kubernetes deployment will require you to have access to a Kubernetes cluster as well as the following tools:
- [Docker](https://docs.docker.com/engine/installation/)
- [docker-py](https://github.com/docker/docker-py) Python module
- [kubectl](https://kubernetes.io/docs/tasks/tools/install-kubectl/)
- [helm](https://docs.helm.sh/using_helm/#quickstart-guide)
Note: After installing Docker, the Docker service must be started.
The installation program will reference `kubectl` directly. `helm` is only necessary if you are letting the installer configure PostgreSQL for you.
### Pre-build steps
Before starting the build process, review the [inventory](./installer/inventory) file, and uncomment and provide values for the following variables found in the `[all:vars]` section uncommenting when necessary. Make sure the openshift and standalone docker sections are commented out:
*kubernetes_context*
> Prior to running the installer, make sure you've configured the context for the cluster you'll be installing to. This is how the installer knows which cluster to connect to and what authentication to use
*awx_kubernetes_namespace*
> Name of the Kubernetes namespace where the AWX resources will be installed. This will be created if it doesn't exist
*docker_registry_*
> These settings should be used if building your own base images. You'll need access to an external registry and are responsible for making sure your kube cluster can talk to it and use it. If these are undefined and the dockerhub_ configuration settings are uncommented then the images will be pulled from dockerhub instead
### Start the build
After making changes to the `inventory` file use `ansible-playbook` to begin the install
```bash
$ ansible-playbook -i inventory install.yml
```
### Post build
After the playbook run completes, check the status of the deployment by running `kubectl get pods --namespace awx` (replace awx with the namespace you used):
```bash
# View the running pods, it may take a few minutes for everything to be marked in the Running state
$ kubectl get pods --namespace awx
NAME READY STATUS RESTARTS AGE
awx-2558692395-2r8ss 4/4 Running 0 29s
awx-postgresql-355348841-kltkn 1/1 Running 0 1m
```
### Accessing AWX
The AWX web interface is running in the AWX pod behind the `awx-web-svc` service:
```bash
# View available services
$ kubectl get svc --namespace awx
NAME TYPE CLUSTER-IP EXTERNAL-IP PORT(S) AGE
awx-postgresql ClusterIP 10.7.250.208 <none> 5432/TCP 2m
awx-web-svc NodePort 10.7.241.35 <none> 80:30177/TCP 1m
```
The deployment process creates an `Ingress` named `awx-web-svc` also. Some kubernetes cloud providers will automatically handle routing configuration when an Ingress is created others may require that you more explicitly configure it. You can see what kubernetes knows about things with:
```bash
kubectl get ing --namespace awx
NAME HOSTS ADDRESS PORTS AGE
awx-web-svc * 35.227.x.y 80 3m
```
If your provider is able to allocate an IP Address from the Ingress controller then you can navigate to the address and access the AWX interface. For some providers it can take a few minutes to allocate and make this accessible. For other providers it may require you to manually intervene.
### SSL Termination
Unlike Openshift's `Route` the Kubernetes `Ingress` doesn't yet handle SSL termination. As such the default configuration will only expose AWX through HTTP on port 80. You are responsible for configuring SSL support until support is added (either to Kubernetes or AWX itself).
## Docker or Docker-Compose
### Prerequisites
- [Docker](https://docs.docker.com/engine/installation/) on the host where AWX will be deployed. After installing Docker, the Docker service must be started (depending on your OS, you may have to add the local user that uses Docker to the ``docker`` group, refer to the documentation for details)
- [docker-py](https://github.com/docker/docker-py) Python module.
If you're installing using Docker Compose, you'll need [Docker Compose](https://docs.docker.com/compose/install/).
### Pre-build steps
@@ -323,6 +400,13 @@ Before starting the build process, review the [inventory](./installer/inventory)
> Provide a port number that can be mapped from the Docker daemon host to the web server running inside the AWX container. Defaults to *80*.
*use_docker_compose*
> Switch to ``true`` to use Docker Compose instead of the standalone Docker install.
*docker_compose_dir*
When using docker-compose, the `docker-compose.yml` file will be created there (default `/var/lib/awx`).
#### Docker registry
@@ -404,6 +488,8 @@ e240ed8209cd awx_task:1.0.0.8 "/tini -- /bin/sh ..." 2 minutes ago
97e196120ab3 postgres:9.6 "docker-entrypoint..." 2 minutes ago Up 2 minutes 5432/tcp postgres
```
If you're deploying using Docker Compose, container names will be prefixed by the name of the folder where the docker-compose.yml file is created (by default, `awx`).
Immediately after the containers start, the *awx_task* container will perform required setup tasks, including database migrations. These tasks need to complete before the web interface can be accessed. To monitor the progress, you can follow the container's STDOUT by running the following:
```bash
@@ -466,3 +552,14 @@ Added instance awx to tower
The AWX web server is accessible on the deployment host, using the *host_port* value set in the *inventory* file. The default URL is [http://localhost](http://localhost).
You will prompted with a login dialog. The default administrator username is `admin`, and the password is `password`.
### Maintenance using docker-compose
After the installation, maintenance operations with docker-compose can be done by using the `docker-compose.yml` file created at the location pointed by `docker_compose_dir`.
Among the possible operations, you may:
- Stop AWX : `docker-compose stop`
- Upgrade AWX : `docker-compose pull && docker-compose up --force-recreate`
See the [docker-compose documentation](https://docs.docker.com/compose/) for details.

View File

@@ -12,10 +12,10 @@ MANAGEMENT_COMMAND ?= awx-manage
IMAGE_REPOSITORY_AUTH ?=
IMAGE_REPOSITORY_BASE ?= https://gcr.io
VERSION=$(shell git describe --long)
VERSION3=$(shell git describe --long | sed 's/\-g.*//')
VERSION3DOT=$(shell git describe --long | sed 's/\-g.*//' | sed 's/\-/\./')
RELEASE_VERSION=$(shell git describe --long | sed 's@\([0-9.]\{1,\}\).*@\1@')
VERSION=$(shell git describe --long --first-parent)
VERSION3=$(shell git describe --long --first-parent | sed 's/\-g.*//')
VERSION3DOT=$(shell git describe --long --first-parent | sed 's/\-g.*//' | sed 's/\-/\./')
RELEASE_VERSION=$(shell git describe --long --first-parent | sed 's@\([0-9.]\{1,\}\).*@\1@')
# NOTE: This defaults the container image version to the branch that's active
COMPOSE_TAG ?= $(GIT_BRANCH)
@@ -23,7 +23,7 @@ COMPOSE_HOST ?= $(shell hostname)
VENV_BASE ?= /venv
SCL_PREFIX ?=
CELERY_SCHEDULE_FILE ?= /celerybeat-schedule
CELERY_SCHEDULE_FILE ?= /var/lib/awx/beat.db
DEV_DOCKER_TAG_BASE ?= gcr.io/ansible-tower-engineering
# Python packages to install only from source (not from binary wheels)
@@ -216,13 +216,11 @@ init:
. $(VENV_BASE)/awx/bin/activate; \
fi; \
$(MANAGEMENT_COMMAND) provision_instance --hostname=$(COMPOSE_HOST); \
$(MANAGEMENT_COMMAND) register_queue --queuename=tower --hostnames=$(COMPOSE_HOST);\
$(MANAGEMENT_COMMAND) register_queue --queuename=tower --instance_percent=100;\
if [ "$(AWX_GROUP_QUEUES)" == "tower,thepentagon" ]; then \
$(MANAGEMENT_COMMAND) provision_instance --hostname=isolated; \
$(MANAGEMENT_COMMAND) register_queue --queuename='thepentagon' --hostnames=isolated --controller=tower; \
$(MANAGEMENT_COMMAND) generate_isolated_key | ssh -o "StrictHostKeyChecking no" root@isolated 'cat > /root/.ssh/authorized_keys'; \
elif [ "$(AWX_GROUP_QUEUES)" != "tower" ]; then \
$(MANAGEMENT_COMMAND) register_queue --queuename=$(firstword $(subst $(comma), ,$(AWX_GROUP_QUEUES))) --hostnames=$(COMPOSE_HOST); \
fi;
# Refresh development environment after pulling new code.
@@ -299,7 +297,7 @@ uwsgi: collectstatic
@if [ "$(VENV_BASE)" ]; then \
. $(VENV_BASE)/awx/bin/activate; \
fi; \
uwsgi -b 32768 --socket 127.0.0.1:8050 --module=awx.wsgi:application --home=/venv/awx --chdir=/awx_devel/ --vacuum --processes=5 --harakiri=120 --master --no-orphans --py-autoreload 1 --max-requests=1000 --stats /tmp/stats.socket --master-fifo=/awxfifo --lazy-apps --logformat "%(addr) %(method) %(uri) - %(proto) %(status)"
uwsgi -b 32768 --socket 127.0.0.1:8050 --module=awx.wsgi:application --home=/venv/awx --chdir=/awx_devel/ --vacuum --processes=5 --harakiri=120 --master --no-orphans --py-autoreload 1 --max-requests=1000 --stats /tmp/stats.socket --master-fifo=/awxfifo --lazy-apps --logformat "%(addr) %(method) %(uri) - %(proto) %(status)" --hook-accepting1-once="exec:/bin/sh -c '[ -f /tmp/celery_pid ] && kill -1 `cat /tmp/celery_pid` || true'"
daphne:
@if [ "$(VENV_BASE)" ]; then \
@@ -322,10 +320,11 @@ runserver:
# Run to start the background celery worker for development.
celeryd:
rm -f /tmp/celery_pid
@if [ "$(VENV_BASE)" ]; then \
. $(VENV_BASE)/awx/bin/activate; \
fi; \
celery worker -A awx -l DEBUG -B -Ofair --autoscale=100,4 --schedule=$(CELERY_SCHEDULE_FILE) -Q tower_scheduler,tower_broadcast_all,$(COMPOSE_HOST),$(AWX_GROUP_QUEUES) -n celery@$(COMPOSE_HOST)
celery worker -A awx -l DEBUG -B -Ofair --autoscale=100,4 --schedule=$(CELERY_SCHEDULE_FILE) -Q tower_broadcast_all -n celery@$(COMPOSE_HOST) --pidfile /tmp/celery_pid
# Run to start the zeromq callback receiver
receiver:
@@ -364,6 +363,12 @@ pyflakes: reports
pylint: reports
@(set -o pipefail && $@ | reports/$@.report)
swagger: reports
@if [ "$(VENV_BASE)" ]; then \
. $(VENV_BASE)/awx/bin/activate; \
fi; \
(set -o pipefail && py.test awx/conf/tests/functional awx/main/tests/functional/api awx/main/tests/docs --release=$(VERSION_TARGET) | tee reports/$@.report)
check: flake8 pep8 # pyflakes pylint
TEST_DIRS ?= awx/main/tests/unit awx/main/tests/functional awx/conf/tests awx/sso/tests
@@ -607,7 +612,7 @@ clean-elk:
docker rm tools_kibana_1
psql-container:
docker run -it --net tools_default --rm postgres:9.4.1 sh -c 'exec psql -h "postgres" -p "5432" -U postgres'
docker run -it --net tools_default --rm postgres:9.6 sh -c 'exec psql -h "postgres" -p "5432" -U postgres'
VERSION:
@echo $(VERSION_TARGET) > $@

View File

@@ -166,7 +166,13 @@ class FieldLookupBackend(BaseFilterBackend):
elif isinstance(field, models.BooleanField):
return to_python_boolean(value)
elif isinstance(field, (ForeignObjectRel, ManyToManyField, GenericForeignKey, ForeignKey)):
return self.to_python_related(value)
try:
return self.to_python_related(value)
except ValueError:
raise ParseError(_('Invalid {field_name} id: {field_id}').format(
field_name=getattr(field, 'name', 'related field'),
field_id=value)
)
else:
return field.to_python(value)
@@ -243,11 +249,10 @@ class FieldLookupBackend(BaseFilterBackend):
# Search across related objects.
if key.endswith('__search'):
for value in values:
for search_term in force_text(value).replace(',', ' ').split():
search_value, new_keys = self.value_to_python(queryset.model, key, search_term)
assert isinstance(new_keys, list)
for new_key in new_keys:
search_filters.append((new_key, search_value))
search_value, new_keys = self.value_to_python(queryset.model, key, force_text(value))
assert isinstance(new_keys, list)
for new_key in new_keys:
search_filters.append((new_key, search_value))
continue
# Custom chain__ and or__ filters, mutually exclusive (both can

View File

@@ -5,6 +5,7 @@
import inspect
import logging
import time
import six
# Django
from django.conf import settings
@@ -21,31 +22,38 @@ from django.utils.translation import ugettext_lazy as _
# Django REST Framework
from rest_framework.authentication import get_authorization_header
from rest_framework.exceptions import PermissionDenied
from rest_framework.exceptions import PermissionDenied, AuthenticationFailed
from rest_framework import generics
from rest_framework.response import Response
from rest_framework import status
from rest_framework import views
from rest_framework.permissions import AllowAny
# cryptography
from cryptography.fernet import InvalidToken
# AWX
from awx.api.filters import FieldLookupBackend
from awx.main.models import * # noqa
from awx.main.access import access_registry
from awx.main.utils import * # noqa
from awx.main.utils.db import get_all_field_names
from awx.api.serializers import ResourceAccessListElementSerializer
from awx.api.serializers import ResourceAccessListElementSerializer, CopySerializer
from awx.api.versioning import URLPathVersioning, get_request_version
from awx.api.metadata import SublistAttachDetatchMetadata
from awx.api.metadata import SublistAttachDetatchMetadata, Metadata
__all__ = ['APIView', 'GenericAPIView', 'ListAPIView', 'SimpleListAPIView',
'ListCreateAPIView', 'SubListAPIView', 'SubListCreateAPIView',
'SubListDestroyAPIView',
'SubListCreateAttachDetachAPIView', 'RetrieveAPIView',
'RetrieveUpdateAPIView', 'RetrieveDestroyAPIView',
'RetrieveUpdateDestroyAPIView', 'DestroyAPIView',
'RetrieveUpdateDestroyAPIView',
'SubDetailAPIView',
'ResourceAccessList',
'ParentMixin',
'DeleteLastUnattachLabelMixin',
'SubListAttachDetachAPIView',]
'SubListAttachDetachAPIView',
'CopyAPIView']
logger = logging.getLogger('awx.api.generics')
analytics_logger = logging.getLogger('awx.analytics.performance')
@@ -89,8 +97,17 @@ def get_view_description(cls, request, html=False):
return mark_safe(desc)
def get_default_schema():
if settings.SETTINGS_MODULE == 'awx.settings.development':
from awx.api.swagger import AutoSchema
return AutoSchema()
else:
return views.APIView.schema
class APIView(views.APIView):
schema = get_default_schema()
versioning_class = URLPathVersioning
def initialize_request(self, request, *args, **kwargs):
@@ -115,6 +132,10 @@ class APIView(views.APIView):
drf_request = super(APIView, self).initialize_request(request, *args, **kwargs)
request.drf_request = drf_request
try:
request.drf_request_user = getattr(drf_request, 'user', False)
except AuthenticationFailed:
request.drf_request_user = None
return drf_request
def finalize_response(self, request, response, *args, **kwargs):
@@ -140,7 +161,6 @@ class APIView(views.APIView):
response['X-API-Query-Count'] = len(q_times)
response['X-API-Query-Time'] = '%0.3fs' % sum(q_times)
analytics_logger.info("api response", extra=dict(python_objects=dict(request=request, response=response)))
return response
def get_authenticate_header(self, request):
@@ -171,27 +191,14 @@ class APIView(views.APIView):
and in the browsable API.
"""
func = self.settings.VIEW_DESCRIPTION_FUNCTION
return func(self.__class__, self._request, html)
return func(self.__class__, getattr(self, '_request', None), html)
def get_description_context(self):
return {
'view': self,
'docstring': type(self).__doc__ or '',
'new_in_13': getattr(self, 'new_in_13', False),
'new_in_14': getattr(self, 'new_in_14', False),
'new_in_145': getattr(self, 'new_in_145', False),
'new_in_148': getattr(self, 'new_in_148', False),
'new_in_200': getattr(self, 'new_in_200', False),
'new_in_210': getattr(self, 'new_in_210', False),
'new_in_220': getattr(self, 'new_in_220', False),
'new_in_230': getattr(self, 'new_in_230', False),
'new_in_240': getattr(self, 'new_in_240', False),
'new_in_300': getattr(self, 'new_in_300', False),
'new_in_310': getattr(self, 'new_in_310', False),
'new_in_320': getattr(self, 'new_in_320', False),
'new_in_330': getattr(self, 'new_in_330', False),
'new_in_api_v2': getattr(self, 'new_in_api_v2', False),
'deprecated': getattr(self, 'deprecated', False),
'swagger_method': getattr(self.request, 'swagger_method', None),
}
def get_description(self, request, html=False):
@@ -209,7 +216,7 @@ class APIView(views.APIView):
context['deprecated'] = True
description = render_to_string(template_list, context)
if context.get('deprecated'):
if context.get('deprecated') and context.get('swagger_method') is None:
# render deprecation messages at the very top
description = '\n'.join([render_to_string('api/_deprecated.md', context), description])
return description
@@ -269,12 +276,17 @@ class GenericAPIView(generics.GenericAPIView, APIView):
return serializer
def get_queryset(self):
#if hasattr(self.request.user, 'get_queryset'):
# return self.request.user.get_queryset(self.model)
if self.queryset is not None:
return self.queryset._clone()
elif self.model is not None:
return self.model._default_manager.all()
qs = self.model._default_manager
if self.model in access_registry:
access_class = access_registry[self.model]
if access_class.select_related:
qs = qs.select_related(*access_class.select_related)
if access_class.prefetch_related:
qs = qs.prefetch_related(*access_class.prefetch_related)
return qs
else:
return super(GenericAPIView, self).get_queryset()
@@ -442,6 +454,41 @@ class SubListAPIView(ParentMixin, ListAPIView):
return qs & sublist_qs
class DestroyAPIView(generics.DestroyAPIView):
def has_delete_permission(self, obj):
return self.request.user.can_access(self.model, 'delete', obj)
def perform_destroy(self, instance, check_permission=True):
if check_permission and not self.has_delete_permission(instance):
raise PermissionDenied()
super(DestroyAPIView, self).perform_destroy(instance)
class SubListDestroyAPIView(DestroyAPIView, SubListAPIView):
"""
Concrete view for deleting everything related by `relationship`.
"""
check_sub_obj_permission = True
def destroy(self, request, *args, **kwargs):
instance_list = self.get_queryset()
if (not self.check_sub_obj_permission and
not request.user.can_access(self.parent_model, 'delete', self.get_parent_object())):
raise PermissionDenied()
self.perform_list_destroy(instance_list)
return Response(status=status.HTTP_204_NO_CONTENT)
def perform_list_destroy(self, instance_list):
if self.check_sub_obj_permission:
# Check permissions for all before deleting, avoiding half-deleted lists
for instance in instance_list:
if self.has_delete_permission(instance):
raise PermissionDenied()
for instance in instance_list:
self.perform_destroy(instance, check_permission=False)
class SubListCreateAPIView(SubListAPIView, ListCreateAPIView):
# Base class for a sublist view that allows for creating subobjects
# associated with the parent object.
@@ -680,22 +727,11 @@ class RetrieveUpdateAPIView(RetrieveAPIView, generics.RetrieveUpdateAPIView):
pass
class RetrieveDestroyAPIView(RetrieveAPIView, generics.RetrieveDestroyAPIView):
def destroy(self, request, *args, **kwargs):
# somewhat lame that delete has to call it's own permissions check
obj = self.get_object()
if not request.user.can_access(self.model, 'delete', obj):
raise PermissionDenied()
obj.delete()
return Response(status=status.HTTP_204_NO_CONTENT)
class RetrieveUpdateDestroyAPIView(RetrieveUpdateAPIView, RetrieveDestroyAPIView):
class RetrieveDestroyAPIView(RetrieveAPIView, DestroyAPIView):
pass
class DestroyAPIView(GenericAPIView, generics.DestroyAPIView):
class RetrieveUpdateDestroyAPIView(RetrieveUpdateAPIView, DestroyAPIView):
pass
@@ -713,3 +749,152 @@ class ResourceAccessList(ParentMixin, ListAPIView):
for r in roles:
ancestors.update(set(r.ancestors.all()))
return User.objects.filter(roles__in=list(ancestors)).distinct()
def trigger_delayed_deep_copy(*args, **kwargs):
from awx.main.tasks import deep_copy_model_obj
connection.on_commit(lambda: deep_copy_model_obj.delay(*args, **kwargs))
class CopyAPIView(GenericAPIView):
serializer_class = CopySerializer
permission_classes = (AllowAny,)
copy_return_serializer_class = None
new_in_330 = True
new_in_api_v2 = True
def _get_copy_return_serializer(self, *args, **kwargs):
if not self.copy_return_serializer_class:
return self.get_serializer(*args, **kwargs)
serializer_class_store = self.serializer_class
self.serializer_class = self.copy_return_serializer_class
ret = self.get_serializer(*args, **kwargs)
self.serializer_class = serializer_class_store
return ret
@staticmethod
def _decrypt_model_field_if_needed(obj, field_name, field_val):
if field_name in getattr(type(obj), 'REENCRYPTION_BLACKLIST_AT_COPY', []):
return field_val
if isinstance(field_val, dict):
for sub_field in field_val:
if isinstance(sub_field, six.string_types) \
and isinstance(field_val[sub_field], six.string_types):
try:
field_val[sub_field] = decrypt_field(obj, field_name, sub_field)
except InvalidToken:
# Catching the corner case with v1 credential fields
field_val[sub_field] = decrypt_field(obj, sub_field)
elif isinstance(field_val, six.string_types):
field_val = decrypt_field(obj, field_name)
return field_val
def _build_create_dict(self, obj):
ret = {}
if self.copy_return_serializer_class:
all_fields = Metadata().get_serializer_info(
self._get_copy_return_serializer(), method='POST'
)
for field_name, field_info in all_fields.items():
if not hasattr(obj, field_name) or field_info.get('read_only', True):
continue
ret[field_name] = CopyAPIView._decrypt_model_field_if_needed(
obj, field_name, getattr(obj, field_name)
)
return ret
@staticmethod
def copy_model_obj(old_parent, new_parent, model, obj, creater, copy_name='', create_kwargs=None):
fields_to_preserve = set(getattr(model, 'FIELDS_TO_PRESERVE_AT_COPY', []))
fields_to_discard = set(getattr(model, 'FIELDS_TO_DISCARD_AT_COPY', []))
m2m_to_preserve = {}
o2m_to_preserve = {}
create_kwargs = create_kwargs or {}
for field_name in fields_to_discard:
create_kwargs.pop(field_name, None)
for field in model._meta.get_fields():
try:
field_val = getattr(obj, field.name)
except AttributeError:
continue
# Adjust copy blacklist fields here.
if field.name in fields_to_discard or field.name in [
'id', 'pk', 'polymorphic_ctype', 'unifiedjobtemplate_ptr', 'created_by', 'modified_by'
] or field.name.endswith('_role'):
create_kwargs.pop(field.name, None)
continue
if field.one_to_many:
if field.name in fields_to_preserve:
o2m_to_preserve[field.name] = field_val
elif field.many_to_many:
if field.name in fields_to_preserve and not old_parent:
m2m_to_preserve[field.name] = field_val
elif field.many_to_one and not field_val:
create_kwargs.pop(field.name, None)
elif field.many_to_one and field_val == old_parent:
create_kwargs[field.name] = new_parent
elif field.name == 'name' and not old_parent:
create_kwargs[field.name] = copy_name or field_val + ' copy'
elif field.name in fields_to_preserve:
create_kwargs[field.name] = CopyAPIView._decrypt_model_field_if_needed(
obj, field.name, field_val
)
new_obj = model.objects.create(**create_kwargs)
# Need to save separatedly because Djang-crum get_current_user would
# not work properly in non-request-response-cycle context.
new_obj.created_by = creater
new_obj.save()
for m2m in m2m_to_preserve:
for related_obj in m2m_to_preserve[m2m].all():
getattr(new_obj, m2m).add(related_obj)
if not old_parent:
sub_objects = []
for o2m in o2m_to_preserve:
for sub_obj in o2m_to_preserve[o2m].all():
sub_model = type(sub_obj)
sub_objects.append((sub_model.__module__, sub_model.__name__, sub_obj.pk))
return new_obj, sub_objects
ret = {obj: new_obj}
for o2m in o2m_to_preserve:
for sub_obj in o2m_to_preserve[o2m].all():
ret.update(CopyAPIView.copy_model_obj(obj, new_obj, type(sub_obj), sub_obj, creater))
return ret
def get(self, request, *args, **kwargs):
obj = self.get_object()
create_kwargs = self._build_create_dict(obj)
for key in create_kwargs:
create_kwargs[key] = getattr(create_kwargs[key], 'pk', None) or create_kwargs[key]
return Response({'can_copy': request.user.can_access(self.model, 'add', create_kwargs)})
def post(self, request, *args, **kwargs):
obj = self.get_object()
create_kwargs = self._build_create_dict(obj)
create_kwargs_check = {}
for key in create_kwargs:
create_kwargs_check[key] = getattr(create_kwargs[key], 'pk', None) or create_kwargs[key]
if not request.user.can_access(self.model, 'add', create_kwargs_check):
raise PermissionDenied()
serializer = self.get_serializer(data=request.data)
if not serializer.is_valid():
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
new_obj, sub_objs = CopyAPIView.copy_model_obj(
None, None, self.model, obj, request.user, create_kwargs=create_kwargs,
copy_name=serializer.validated_data.get('name', '')
)
if hasattr(new_obj, 'admin_role') and request.user not in new_obj.admin_role:
new_obj.admin_role.members.add(request.user)
if sub_objs:
permission_check_func = None
if hasattr(type(self), 'deep_copy_permission_check_func'):
permission_check_func = (
type(self).__module__, type(self).__name__, 'deep_copy_permission_check_func'
)
trigger_delayed_deep_copy(
self.model.__module__, self.model.__name__,
obj.pk, new_obj.pk, request.user.pk, sub_objs,
permission_check_func=permission_check_func
)
serializer = self._get_copy_return_serializer(new_obj)
return Response(serializer.data, status=status.HTTP_201_CREATED)

View File

@@ -190,23 +190,6 @@ class Metadata(metadata.SimpleMetadata):
finally:
delattr(view, '_request')
# Add version number in which view was added to Tower.
added_in_version = '1.2'
for version in ('3.2.0', '3.1.0', '3.0.0', '2.4.0', '2.3.0', '2.2.0',
'2.1.0', '2.0.0', '1.4.8', '1.4.5', '1.4', '1.3'):
if getattr(view, 'new_in_%s' % version.replace('.', ''), False):
added_in_version = version
break
metadata['added_in_version'] = added_in_version
# Add API version number in which view was added to Tower.
added_in_api_version = 'v1'
for version in ('v2',):
if getattr(view, 'new_in_api_%s' % version, False):
added_in_api_version = version
break
metadata['added_in_api_version'] = added_in_api_version
# Add type(s) handled by this view/serializer.
if hasattr(view, 'get_serializer'):
serializer = view.get_serializer()

View File

@@ -33,7 +33,7 @@ class OrderedDictLoader(yaml.SafeLoader):
key = self.construct_object(key_node, deep=deep)
try:
hash(key)
except TypeError, exc:
except TypeError as exc:
raise yaml.constructor.ConstructorError(
"while constructing a mapping", node.start_mark,
"found unacceptable key (%s)" % exc, key_node.start_mark

View File

@@ -5,6 +5,8 @@
from rest_framework import renderers
from rest_framework.request import override_method
import six
class BrowsableAPIRenderer(renderers.BrowsableAPIRenderer):
'''
@@ -69,8 +71,8 @@ class PlainTextRenderer(renderers.BaseRenderer):
format = 'txt'
def render(self, data, media_type=None, renderer_context=None):
if not isinstance(data, basestring):
data = unicode(data)
if not isinstance(data, six.string_types):
data = six.text_type(data)
return data.encode(self.charset)

View File

@@ -9,7 +9,6 @@ import re
import six
import urllib
from collections import OrderedDict
from dateutil import rrule
# Django
from django.conf import settings
@@ -44,7 +43,7 @@ from awx.main.fields import ImplicitRoleField
from awx.main.utils import (
get_type_for_model, get_model_for_type, timestamp_apiformat,
camelcase_to_underscore, getattrd, parse_yaml_or_json,
has_model_field_prefetched, extract_ansible_vars)
has_model_field_prefetched, extract_ansible_vars, encrypt_dict)
from awx.main.utils.filters import SmartFilter
from awx.main.redact import REPLACE_STR
@@ -131,6 +130,22 @@ def reverse_gfk(content_object, request):
}
class CopySerializer(serializers.Serializer):
name = serializers.CharField()
def validate(self, attrs):
name = attrs.get('name')
view = self.context.get('view', None)
obj = view.get_object()
if name == obj.name:
raise serializers.ValidationError(_(
'The original object is already named {}, a copy from'
' it cannot have the same name.'.format(name)
))
return attrs
class BaseSerializerMetaclass(serializers.SerializerMetaclass):
'''
Custom metaclass to enable attribute inheritance from Meta objects on
@@ -345,7 +360,9 @@ class BaseSerializer(serializers.ModelSerializer):
continue
summary_fields[fk] = OrderedDict()
for field in related_fields:
if field == 'credential_type_id' and fk == 'credential' and self.version < 2: # TODO: remove version check in 3.3
if (
self.version < 2 and field == 'credential_type_id' and
fk in ['credential', 'vault_credential']): # TODO: remove version check in 3.3
continue
fval = getattr(fkval, field, None)
@@ -612,14 +629,12 @@ class UnifiedJobTemplateSerializer(BaseSerializer):
class UnifiedJobSerializer(BaseSerializer):
show_capabilities = ['start', 'delete']
result_stdout = serializers.SerializerMethodField()
class Meta:
model = UnifiedJob
fields = ('*', 'unified_job_template', 'launch_type', 'status',
'failed', 'started', 'finished', 'elapsed', 'job_args',
'job_cwd', 'job_env', 'job_explanation', 'result_stdout',
'execution_node', 'result_traceback')
'job_cwd', 'job_env', 'job_explanation', 'execution_node',
'result_traceback')
extra_kwargs = {
'unified_job_template': {
'source': 'unified_job_template_id',
@@ -700,25 +715,17 @@ class UnifiedJobSerializer(BaseSerializer):
return ret
def get_result_stdout(self, obj):
obj_size = obj.result_stdout_size
if obj_size > settings.STDOUT_MAX_BYTES_DISPLAY:
return _("Standard Output too large to display (%(text_size)d bytes), "
"only download supported for sizes over %(supported_size)d bytes") % {
'text_size': obj_size, 'supported_size': settings.STDOUT_MAX_BYTES_DISPLAY}
return obj.result_stdout
class UnifiedJobListSerializer(UnifiedJobSerializer):
class Meta:
fields = ('*', '-job_args', '-job_cwd', '-job_env', '-result_traceback', '-result_stdout')
fields = ('*', '-job_args', '-job_cwd', '-job_env', '-result_traceback')
def get_field_names(self, declared_fields, info):
field_names = super(UnifiedJobListSerializer, self).get_field_names(declared_fields, info)
# Meta multiple inheritance and -field_name options don't seem to be
# taking effect above, so remove the undesired fields here.
return tuple(x for x in field_names if x not in ('job_args', 'job_cwd', 'job_env', 'result_traceback', 'result_stdout'))
return tuple(x for x in field_names if x not in ('job_args', 'job_cwd', 'job_env', 'result_traceback'))
def get_types(self):
if type(self) is UnifiedJobListSerializer:
@@ -758,14 +765,6 @@ class UnifiedJobStdoutSerializer(UnifiedJobSerializer):
class Meta:
fields = ('result_stdout',)
def get_result_stdout(self, obj):
obj_size = obj.result_stdout_size
if obj_size > settings.STDOUT_MAX_BYTES_DISPLAY:
return _("Standard Output too large to display (%(text_size)d bytes), "
"only download supported for sizes over %(supported_size)d bytes") % {
'text_size': obj_size, 'supported_size': settings.STDOUT_MAX_BYTES_DISPLAY}
return obj.result_stdout
def get_types(self):
if type(self) is UnifiedJobStdoutSerializer:
return ['project_update', 'inventory_update', 'job', 'ad_hoc_command', 'system_job']
@@ -912,7 +911,7 @@ class OrganizationSerializer(BaseSerializer):
class Meta:
model = Organization
fields = ('*',)
fields = ('*', 'custom_virtualenv',)
def get_related(self, obj):
res = super(OrganizationSerializer, self).get_related(obj)
@@ -1000,7 +999,7 @@ class ProjectSerializer(UnifiedJobTemplateSerializer, ProjectOptionsSerializer):
class Meta:
model = Project
fields = ('*', 'organization', 'scm_delete_on_next_update', 'scm_update_on_launch',
'scm_update_cache_timeout', 'scm_revision',) + \
'scm_update_cache_timeout', 'scm_revision', 'custom_virtualenv',) + \
('last_update_failed', 'last_updated') # Backwards compatibility
read_only_fields = ('scm_delete_on_next_update',)
@@ -1020,6 +1019,7 @@ class ProjectSerializer(UnifiedJobTemplateSerializer, ProjectOptionsSerializer):
notification_templates_error = self.reverse('api:project_notification_templates_error_list', kwargs={'pk': obj.pk}),
access_list = self.reverse('api:project_access_list', kwargs={'pk': obj.pk}),
object_roles = self.reverse('api:project_object_roles_list', kwargs={'pk': obj.pk}),
copy = self.reverse('api:project_copy', kwargs={'pk': obj.pk}),
))
if obj.organization:
res['organization'] = self.reverse('api:organization_detail',
@@ -1111,11 +1111,17 @@ class ProjectUpdateSerializer(UnifiedJobSerializer, ProjectOptionsSerializer):
def get_related(self, obj):
res = super(ProjectUpdateSerializer, self).get_related(obj)
try:
res.update(dict(
project = self.reverse('api:project_detail', kwargs={'pk': obj.project.pk}),
))
except ObjectDoesNotExist:
pass
res.update(dict(
project = self.reverse('api:project_detail', kwargs={'pk': obj.project.pk}),
cancel = self.reverse('api:project_update_cancel', kwargs={'pk': obj.pk}),
scm_inventory_updates = self.reverse('api:project_update_scm_inventory_updates', kwargs={'pk': obj.pk}),
notifications = self.reverse('api:project_update_notifications_list', kwargs={'pk': obj.pk}),
events = self.reverse('api:project_update_events_list', kwargs={'pk': obj.pk}),
))
return res
@@ -1167,6 +1173,7 @@ class InventorySerializer(BaseSerializerWithVariables):
access_list = self.reverse('api:inventory_access_list', kwargs={'pk': obj.pk}),
object_roles = self.reverse('api:inventory_object_roles_list', kwargs={'pk': obj.pk}),
instance_groups = self.reverse('api:inventory_instance_groups_list', kwargs={'pk': obj.pk}),
copy = self.reverse('api:inventory_copy', kwargs={'pk': obj.pk}),
))
if obj.insights_credential:
res['insights_credential'] = self.reverse('api:credential_detail', kwargs={'pk': obj.insights_credential.pk})
@@ -1184,7 +1191,7 @@ class InventorySerializer(BaseSerializerWithVariables):
if host_filter:
try:
SmartFilter().query_from_string(host_filter)
except RuntimeError, e:
except RuntimeError as e:
raise models.base.ValidationError(e)
return host_filter
@@ -1234,8 +1241,9 @@ class HostSerializer(BaseSerializerWithVariables):
model = Host
fields = ('*', 'inventory', 'enabled', 'instance_id', 'variables',
'has_active_failures', 'has_inventory_sources', 'last_job',
'last_job_host_summary', 'insights_system_id')
read_only_fields = ('last_job', 'last_job_host_summary', 'insights_system_id',)
'last_job_host_summary', 'insights_system_id', 'ansible_facts_modified',)
read_only_fields = ('last_job', 'last_job_host_summary', 'insights_system_id',
'ansible_facts_modified',)
def build_relational_field(self, field_name, relation_info):
field_class, field_kwargs = super(HostSerializer, self).build_relational_field(field_name, relation_info)
@@ -1523,6 +1531,7 @@ class CustomInventoryScriptSerializer(BaseSerializer):
res = super(CustomInventoryScriptSerializer, self).get_related(obj)
res.update(dict(
object_roles = self.reverse('api:inventory_script_object_roles_list', kwargs={'pk': obj.pk}),
copy = self.reverse('api:inventory_script_copy', kwargs={'pk': obj.pk}),
))
if obj.organization:
@@ -1726,10 +1735,18 @@ class InventoryUpdateSerializer(UnifiedJobSerializer, InventorySourceOptionsSeri
def get_related(self, obj):
res = super(InventoryUpdateSerializer, self).get_related(obj)
try:
res.update(dict(
inventory_source = self.reverse(
'api:inventory_source_detail', kwargs={'pk': obj.inventory_source.pk}
),
))
except ObjectDoesNotExist:
pass
res.update(dict(
inventory_source = self.reverse('api:inventory_source_detail', kwargs={'pk': obj.inventory_source.pk}),
cancel = self.reverse('api:inventory_update_cancel', kwargs={'pk': obj.pk}),
notifications = self.reverse('api:inventory_update_notifications_list', kwargs={'pk': obj.pk}),
events = self.reverse('api:inventory_update_events_list', kwargs={'pk': obj.pk}),
))
if obj.source_project_update_id:
res['source_project_update'] = self.reverse('api:project_update_detail',
@@ -2072,6 +2089,7 @@ class CredentialSerializer(BaseSerializer):
object_roles = self.reverse('api:credential_object_roles_list', kwargs={'pk': obj.pk}),
owner_users = self.reverse('api:credential_owner_users_list', kwargs={'pk': obj.pk}),
owner_teams = self.reverse('api:credential_owner_teams_list', kwargs={'pk': obj.pk}),
copy = self.reverse('api:credential_copy', kwargs={'pk': obj.pk}),
))
# TODO: remove when API v1 is removed
@@ -2125,7 +2143,7 @@ class CredentialSerializer(BaseSerializer):
def to_internal_value(self, data):
# TODO: remove when API v1 is removed
if 'credential_type' not in data:
if 'credential_type' not in data and self.version == 1:
# If `credential_type` is not provided, assume the payload is a
# v1 credential payload that specifies a `kind` and a flat list
# of field values
@@ -2162,10 +2180,23 @@ class CredentialSerializer(BaseSerializer):
def validate_credential_type(self, credential_type):
if self.instance and credential_type.pk != self.instance.credential_type.pk:
raise ValidationError(
_('You cannot change the credential type of the credential, as it may break the functionality'
' of the resources using it.'),
)
for rel in (
'ad_hoc_commands',
'insights_inventories',
'inventorysources',
'inventoryupdates',
'unifiedjobs',
'unifiedjobtemplates',
'projects',
'projectupdates',
'workflowjobnodes'
):
if getattr(self.instance, rel).count() > 0:
raise ValidationError(
_('You cannot change the credential type of the credential, as it may break the functionality'
' of the resources using it.'),
)
return credential_type
@@ -2346,14 +2377,30 @@ class JobOptionsSerializer(LabelsListMixin, BaseSerializer):
def get_related(self, obj):
res = super(JobOptionsSerializer, self).get_related(obj)
res['labels'] = self.reverse('api:job_template_label_list', kwargs={'pk': obj.pk})
if obj.inventory:
res['inventory'] = self.reverse('api:inventory_detail', kwargs={'pk': obj.inventory.pk})
if obj.project:
res['project'] = self.reverse('api:project_detail', kwargs={'pk': obj.project.pk})
if obj.credential:
res['credential'] = self.reverse('api:credential_detail', kwargs={'pk': obj.credential})
if obj.vault_credential:
res['vault_credential'] = self.reverse('api:credential_detail', kwargs={'pk': obj.vault_credential})
try:
if obj.inventory:
res['inventory'] = self.reverse('api:inventory_detail', kwargs={'pk': obj.inventory.pk})
except ObjectDoesNotExist:
setattr(obj, 'inventory', None)
try:
if obj.project:
res['project'] = self.reverse('api:project_detail', kwargs={'pk': obj.project.pk})
except ObjectDoesNotExist:
setattr(obj, 'project', None)
try:
if obj.credential:
res['credential'] = self.reverse(
'api:credential_detail', kwargs={'pk': obj.credential}
)
except ObjectDoesNotExist:
setattr(obj, 'credential', None)
try:
if obj.vault_credential:
res['vault_credential'] = self.reverse(
'api:credential_detail', kwargs={'pk': obj.vault_credential}
)
except ObjectDoesNotExist:
setattr(obj, 'vault_credential', None)
if self.version > 1:
if isinstance(obj, UnifiedJobTemplate):
res['extra_credentials'] = self.reverse(
@@ -2504,7 +2551,7 @@ class JobTemplateSerializer(JobTemplateMixin, UnifiedJobTemplateSerializer, JobO
fields = ('*', 'host_config_key', 'ask_diff_mode_on_launch', 'ask_variables_on_launch', 'ask_limit_on_launch', 'ask_tags_on_launch',
'ask_skip_tags_on_launch', 'ask_job_type_on_launch', 'ask_verbosity_on_launch', 'ask_inventory_on_launch',
'ask_credential_on_launch', 'survey_enabled', 'become_enabled', 'diff_mode',
'allow_simultaneous')
'allow_simultaneous', 'custom_virtualenv')
def get_related(self, obj):
res = super(JobTemplateSerializer, self).get_related(obj)
@@ -2521,6 +2568,7 @@ class JobTemplateSerializer(JobTemplateMixin, UnifiedJobTemplateSerializer, JobO
labels = self.reverse('api:job_template_label_list', kwargs={'pk': obj.pk}),
object_roles = self.reverse('api:job_template_object_roles_list', kwargs={'pk': obj.pk}),
instance_groups = self.reverse('api:job_template_instance_groups_list', kwargs={'pk': obj.pk}),
copy = self.reverse('api:job_template_copy', kwargs={'pk': obj.pk}),
))
if obj.host_config_key:
res['callback'] = self.reverse('api:job_template_callback', kwargs={'pk': obj.pk})
@@ -2608,15 +2656,23 @@ class JobSerializer(UnifiedJobSerializer, JobOptionsSerializer):
notifications = self.reverse('api:job_notifications_list', kwargs={'pk': obj.pk}),
labels = self.reverse('api:job_label_list', kwargs={'pk': obj.pk}),
))
if obj.job_template:
res['job_template'] = self.reverse('api:job_template_detail',
kwargs={'pk': obj.job_template.pk})
try:
if obj.job_template:
res['job_template'] = self.reverse('api:job_template_detail',
kwargs={'pk': obj.job_template.pk})
except ObjectDoesNotExist:
setattr(obj, 'job_template', None)
if (obj.can_start or True) and self.version == 1: # TODO: remove in 3.3
res['start'] = self.reverse('api:job_start', kwargs={'pk': obj.pk})
if obj.can_cancel or True:
res['cancel'] = self.reverse('api:job_cancel', kwargs={'pk': obj.pk})
if obj.project_update:
res['project_update'] = self.reverse('api:project_update_detail', kwargs={'pk': obj.project_update.pk})
try:
if obj.project_update:
res['project_update'] = self.reverse(
'api:project_update_detail', kwargs={'pk': obj.project_update.pk}
)
except ObjectDoesNotExist:
pass
res['create_schedule'] = self.reverse('api:job_create_schedule', kwargs={'pk': obj.pk})
res['relaunch'] = self.reverse('api:job_relaunch', kwargs={'pk': obj.pk})
return res
@@ -2756,8 +2812,6 @@ class JobRelaunchSerializer(BaseSerializer):
def validate(self, attrs):
obj = self.context.get('obj')
if not obj.credential:
raise serializers.ValidationError(dict(credential=[_("Credential not found or deleted.")]))
if obj.project is None:
raise serializers.ValidationError(dict(errors=[_("Job Template Project is missing or undefined.")]))
if obj.inventory is None or obj.inventory.pending_deletion:
@@ -2914,9 +2968,11 @@ class SystemJobTemplateSerializer(UnifiedJobTemplateSerializer):
class SystemJobSerializer(UnifiedJobSerializer):
result_stdout = serializers.SerializerMethodField()
class Meta:
model = SystemJob
fields = ('*', 'system_job_template', 'job_type', 'extra_vars')
fields = ('*', 'system_job_template', 'job_type', 'extra_vars', 'result_stdout')
def get_related(self, obj):
res = super(SystemJobSerializer, self).get_related(obj)
@@ -2926,8 +2982,19 @@ class SystemJobSerializer(UnifiedJobSerializer):
res['notifications'] = self.reverse('api:system_job_notifications_list', kwargs={'pk': obj.pk})
if obj.can_cancel or True:
res['cancel'] = self.reverse('api:system_job_cancel', kwargs={'pk': obj.pk})
res['events'] = self.reverse('api:system_job_events_list', kwargs={'pk': obj.pk})
return res
def get_result_stdout(self, obj):
try:
return obj.result_stdout
except StdoutMaxBytesExceeded as e:
return _(
"Standard Output too large to display ({text_size} bytes), "
"only download supported for sizes over {supported_size} bytes").format(
text_size=e.total, supported_size=e.supported
)
class SystemJobCancelSerializer(SystemJobSerializer):
@@ -3065,15 +3132,60 @@ class LaunchConfigurationBaseSerializer(BaseSerializer):
ret['extra_data'] = obj.display_extra_data()
return ret
def get_summary_fields(self, obj):
summary_fields = super(LaunchConfigurationBaseSerializer, self).get_summary_fields(obj)
# Credential would be an empty dictionary in this case
summary_fields.pop('credential', None)
return summary_fields
def validate(self, attrs):
attrs = super(LaunchConfigurationBaseSerializer, self).validate(attrs)
# Build unsaved version of this config, use it to detect prompts errors
ujt = None
if 'unified_job_template' in attrs:
ujt = attrs['unified_job_template']
elif self.instance:
ujt = self.instance.unified_job_template
# Replace $encrypted$ submissions with db value if exists
# build additional field survey_passwords to track redacted variables
if 'extra_data' in attrs:
extra_data = parse_yaml_or_json(attrs.get('extra_data', {}))
if hasattr(ujt, 'survey_password_variables'):
# Prepare additional field survey_passwords for save
password_dict = {}
for key in ujt.survey_password_variables():
if key in extra_data:
password_dict[key] = REPLACE_STR
if not self.instance or password_dict != self.instance.survey_passwords:
attrs['survey_passwords'] = password_dict.copy()
# Force dict type (cannot preserve YAML formatting if passwords are involved)
if not isinstance(attrs['extra_data'], dict):
attrs['extra_data'] = parse_yaml_or_json(attrs['extra_data'])
# Encrypt the extra_data for save, only current password vars in JT survey
encrypt_dict(attrs['extra_data'], password_dict.keys())
# For any raw $encrypted$ string, either
# - replace with existing DB value
# - raise a validation error
# - remove key from extra_data if survey default is present
if self.instance:
db_extra_data = parse_yaml_or_json(self.instance.extra_data)
else:
db_extra_data = {}
for key in password_dict.keys():
if attrs['extra_data'].get(key, None) == REPLACE_STR:
if key not in db_extra_data:
element = ujt.pivot_spec(ujt.survey_spec)[key]
if 'default' in element and element['default']:
attrs['survey_passwords'].pop(key, None)
attrs['extra_data'].pop(key, None)
else:
raise serializers.ValidationError(
{"extra_data": _('Provided variable {} has no database value to replace with.').format(key)})
else:
attrs['extra_data'][key] = db_extra_data[key]
# Build unsaved version of this config, use it to detect prompts errors
mock_obj = self._build_mock_obj(attrs)
accepted, rejected, errors = ujt._accept_or_ignore_job_kwargs(
_exclude_errors=self.exclude_errors, **mock_obj.prompts_dict())
@@ -3085,19 +3197,9 @@ class LaunchConfigurationBaseSerializer(BaseSerializer):
raise serializers.ValidationError(errors)
# Model `.save` needs the container dict, not the psuedo fields
attrs['char_prompts'] = mock_obj.char_prompts
if mock_obj.char_prompts:
attrs['char_prompts'] = mock_obj.char_prompts
# Insert survey_passwords to track redacted variables
# TODO: perform encryption on save
if 'extra_data' in attrs:
extra_data = parse_yaml_or_json(attrs.get('extra_data', {}))
if hasattr(ujt, 'survey_password_variables'):
password_dict = {}
for key in ujt.survey_password_variables():
if key in extra_data:
password_dict[key] = REPLACE_STR
if not self.instance or password_dict != self.instance.survey_passwords:
attrs['survey_passwords'] = password_dict
return attrs
@@ -3108,7 +3210,7 @@ class WorkflowJobTemplateNodeSerializer(LaunchConfigurationBaseSerializer):
success_nodes = serializers.PrimaryKeyRelatedField(many=True, read_only=True)
failure_nodes = serializers.PrimaryKeyRelatedField(many=True, read_only=True)
always_nodes = serializers.PrimaryKeyRelatedField(many=True, read_only=True)
exclude_errors = ('required') # required variables may be provided by WFJT or on launch
exclude_errors = ('required',) # required variables may be provided by WFJT or on launch
class Meta:
model = WorkflowJobTemplateNode
@@ -3162,6 +3264,9 @@ class WorkflowJobTemplateNodeSerializer(LaunchConfigurationBaseSerializer):
cred = deprecated_fields['credential']
attrs['credential'] = cred
if cred is not None:
if not ujt_obj.ask_credential_on_launch:
raise serializers.ValidationError({"credential": _(
"Related template is not configured to accept credentials on launch.")})
cred = Credential.objects.get(pk=cred)
view = self.context.get('view', None)
if (not view) or (not view.request) or (view.request.user not in cred.use_role):
@@ -3360,6 +3465,41 @@ class JobEventWebSocketSerializer(JobEventSerializer):
return 'job_events'
class ProjectUpdateEventSerializer(JobEventSerializer):
class Meta:
model = ProjectUpdateEvent
fields = ('*', '-name', '-description', '-job', '-job_id',
'-parent_uuid', '-parent', '-host', 'project_update')
def get_related(self, obj):
res = super(JobEventSerializer, self).get_related(obj)
res['project_update'] = self.reverse(
'api:project_update_detail', kwargs={'pk': obj.project_update_id}
)
return res
class ProjectUpdateEventWebSocketSerializer(ProjectUpdateEventSerializer):
created = serializers.SerializerMethodField()
modified = serializers.SerializerMethodField()
event_name = serializers.CharField(source='event')
group_name = serializers.SerializerMethodField()
class Meta:
model = ProjectUpdateEvent
fields = ('*', 'event_name', 'group_name',)
def get_created(self, obj):
return obj.created.isoformat()
def get_modified(self, obj):
return obj.modified.isoformat()
def get_group_name(self, obj):
return 'project_update_events'
class AdHocCommandEventSerializer(BaseSerializer):
event_display = serializers.CharField(source='get_event_display', read_only=True)
@@ -3419,6 +3559,76 @@ class AdHocCommandEventWebSocketSerializer(AdHocCommandEventSerializer):
return 'ad_hoc_command_events'
class InventoryUpdateEventSerializer(AdHocCommandEventSerializer):
class Meta:
model = InventoryUpdateEvent
fields = ('*', '-name', '-description', '-ad_hoc_command', '-host',
'-host_name', 'inventory_update')
def get_related(self, obj):
res = super(AdHocCommandEventSerializer, self).get_related(obj)
res['inventory_update'] = self.reverse(
'api:inventory_update_detail', kwargs={'pk': obj.inventory_update_id}
)
return res
class InventoryUpdateEventWebSocketSerializer(InventoryUpdateEventSerializer):
created = serializers.SerializerMethodField()
modified = serializers.SerializerMethodField()
event_name = serializers.CharField(source='event')
group_name = serializers.SerializerMethodField()
class Meta:
model = InventoryUpdateEvent
fields = ('*', 'event_name', 'group_name',)
def get_created(self, obj):
return obj.created.isoformat()
def get_modified(self, obj):
return obj.modified.isoformat()
def get_group_name(self, obj):
return 'inventory_update_events'
class SystemJobEventSerializer(AdHocCommandEventSerializer):
class Meta:
model = SystemJobEvent
fields = ('*', '-name', '-description', '-ad_hoc_command', '-host',
'-host_name', 'system_job')
def get_related(self, obj):
res = super(AdHocCommandEventSerializer, self).get_related(obj)
res['system_job'] = self.reverse(
'api:system_job_detail', kwargs={'pk': obj.system_job_id}
)
return res
class SystemJobEventWebSocketSerializer(SystemJobEventSerializer):
created = serializers.SerializerMethodField()
modified = serializers.SerializerMethodField()
event_name = serializers.CharField(source='event')
group_name = serializers.SerializerMethodField()
class Meta:
model = SystemJobEvent
fields = ('*', 'event_name', 'group_name',)
def get_created(self, obj):
return obj.created.isoformat()
def get_modified(self, obj):
return obj.modified.isoformat()
def get_group_name(self, obj):
return 'system_job_events'
class JobLaunchSerializer(BaseSerializer):
# Representational fields
@@ -3483,15 +3693,16 @@ class JobLaunchSerializer(BaseSerializer):
id=getattrd(obj, '%s.pk' % field_name, None))
elif field_name == 'credentials':
if self.version > 1:
defaults_dict[field_name] = [
dict(
for cred in obj.credentials.all():
cred_dict = dict(
id=cred.id,
name=cred.name,
credential_type=cred.credential_type.pk,
passwords_needed=cred.passwords_needed
)
for cred in obj.credentials.all()
]
if cred.credential_type.managed_by_tower and 'vault_id' in cred.credential_type.defined_fields:
cred_dict['vault_id'] = cred.inputs.get('vault_id') or None
defaults_dict.setdefault(field_name, []).append(cred_dict)
else:
defaults_dict[field_name] = getattr(obj, field_name)
return defaults_dict
@@ -3506,7 +3717,7 @@ class JobLaunchSerializer(BaseSerializer):
template = self.context.get('template')
accepted, rejected, errors = template._accept_or_ignore_job_kwargs(
_exclude_errors=['prompts', 'required'], # make several error types non-blocking
_exclude_errors=['prompts'], # make several error types non-blocking
**attrs)
self._ignored_fields = rejected
@@ -3520,15 +3731,30 @@ class JobLaunchSerializer(BaseSerializer):
distinct_cred_kinds = []
for cred in accepted.get('credentials', []):
if cred.unique_hash() in distinct_cred_kinds:
errors['credentials'] = _('Cannot assign multiple %s credentials.' % cred.credential_type.name)
errors.setdefault('credentials', []).append(_(
'Cannot assign multiple {} credentials.'
).format(cred.unique_hash(display=True)))
distinct_cred_kinds.append(cred.unique_hash())
# Prohibit removing credentials from the JT list (unsupported for now)
template_credentials = template.credentials.all()
if 'credentials' in attrs:
removed_creds = set(template_credentials) - set(attrs['credentials'])
provided_mapping = Credential.unique_dict(attrs['credentials'])
for cred in removed_creds:
if cred.unique_hash() in provided_mapping.keys():
continue # User replaced credential with new of same type
errors.setdefault('credentials', []).append(_(
'Removing {} credential at launch time without replacement is not supported. '
'Provided list lacked credential(s): {}.'
).format(cred.unique_hash(display=True), ', '.join([str(c) for c in removed_creds])))
# verify that credentials (either provided or existing) don't
# require launch-time passwords that have not been provided
if 'credentials' in accepted:
launch_credentials = accepted['credentials']
else:
launch_credentials = template.credentials.all()
launch_credentials = template_credentials
passwords = attrs.get('credential_passwords', {}) # get from original attrs
passwords_lacking = []
for cred in launch_credentials:
@@ -3618,6 +3844,7 @@ class NotificationTemplateSerializer(BaseSerializer):
res.update(dict(
test = self.reverse('api:notification_template_test', kwargs={'pk': obj.pk}),
notifications = self.reverse('api:notification_template_notification_list', kwargs={'pk': obj.pk}),
copy = self.reverse('api:notification_template_copy', kwargs={'pk': obj.pk}),
))
if obj.organization:
res['organization'] = self.reverse('api:organization_detail', kwargs={'pk': obj.organization.pk})
@@ -3708,7 +3935,70 @@ class LabelSerializer(BaseSerializer):
return res
class ScheduleSerializer(LaunchConfigurationBaseSerializer):
class SchedulePreviewSerializer(BaseSerializer):
class Meta:
model = Schedule
fields = ('rrule',)
# We reject rrules if:
# - DTSTART is not include
# - INTERVAL is not included
# - SECONDLY is used
# - TZID is used
# - BYDAY prefixed with a number (MO is good but not 20MO)
# - BYYEARDAY
# - BYWEEKNO
# - Multiple DTSTART or RRULE elements
# - Can't contain both COUNT and UNTIL
# - COUNT > 999
def validate_rrule(self, value):
rrule_value = value
multi_by_month_day = ".*?BYMONTHDAY[\:\=][0-9]+,-*[0-9]+"
multi_by_month = ".*?BYMONTH[\:\=][0-9]+,[0-9]+"
by_day_with_numeric_prefix = ".*?BYDAY[\:\=][0-9]+[a-zA-Z]{2}"
match_count = re.match(".*?(COUNT\=[0-9]+)", rrule_value)
match_multiple_dtstart = re.findall(".*?(DTSTART(;[^:]+)?\:[0-9]+T[0-9]+Z?)", rrule_value)
match_native_dtstart = re.findall(".*?(DTSTART:[0-9]+T[0-9]+) ", rrule_value)
match_multiple_rrule = re.findall(".*?(RRULE\:)", rrule_value)
if not len(match_multiple_dtstart):
raise serializers.ValidationError(_('Valid DTSTART required in rrule. Value should start with: DTSTART:YYYYMMDDTHHMMSSZ'))
if len(match_native_dtstart):
raise serializers.ValidationError(_('DTSTART cannot be a naive datetime. Specify ;TZINFO= or YYYYMMDDTHHMMSSZZ.'))
if len(match_multiple_dtstart) > 1:
raise serializers.ValidationError(_('Multiple DTSTART is not supported.'))
if not len(match_multiple_rrule):
raise serializers.ValidationError(_('RRULE required in rrule.'))
if len(match_multiple_rrule) > 1:
raise serializers.ValidationError(_('Multiple RRULE is not supported.'))
if 'interval' not in rrule_value.lower():
raise serializers.ValidationError(_('INTERVAL required in rrule.'))
if 'secondly' in rrule_value.lower():
raise serializers.ValidationError(_('SECONDLY is not supported.'))
if re.match(multi_by_month_day, rrule_value):
raise serializers.ValidationError(_('Multiple BYMONTHDAYs not supported.'))
if re.match(multi_by_month, rrule_value):
raise serializers.ValidationError(_('Multiple BYMONTHs not supported.'))
if re.match(by_day_with_numeric_prefix, rrule_value):
raise serializers.ValidationError(_("BYDAY with numeric prefix not supported."))
if 'byyearday' in rrule_value.lower():
raise serializers.ValidationError(_("BYYEARDAY not supported."))
if 'byweekno' in rrule_value.lower():
raise serializers.ValidationError(_("BYWEEKNO not supported."))
if 'COUNT' in rrule_value and 'UNTIL' in rrule_value:
raise serializers.ValidationError(_("RRULE may not contain both COUNT and UNTIL"))
if match_count:
count_val = match_count.groups()[0].strip().split("=")
if int(count_val[1]) > 999:
raise serializers.ValidationError(_("COUNT > 999 is unsupported."))
try:
Schedule.rrulestr(rrule_value)
except Exception as e:
raise serializers.ValidationError(_("rrule parsing failed validation: {}").format(e))
return value
class ScheduleSerializer(LaunchConfigurationBaseSerializer, SchedulePreviewSerializer):
show_capabilities = ['edit', 'delete']
class Meta:
@@ -3722,6 +4012,15 @@ class ScheduleSerializer(LaunchConfigurationBaseSerializer):
))
if obj.unified_job_template:
res['unified_job_template'] = obj.unified_job_template.get_absolute_url(self.context.get('request'))
try:
if obj.unified_job_template.project:
res['project'] = obj.unified_job_template.project.get_absolute_url(self.context.get('request'))
except ObjectDoesNotExist:
pass
if obj.inventory:
res['inventory'] = obj.inventory.get_absolute_url(self.context.get('request'))
elif obj.unified_job_template and getattr(obj.unified_job_template, 'inventory', None):
res['inventory'] = obj.unified_job_template.inventory.get_absolute_url(self.context.get('request'))
return res
def validate_unified_job_template(self, value):
@@ -3731,60 +4030,8 @@ class ScheduleSerializer(LaunchConfigurationBaseSerializer):
raise serializers.ValidationError(_('Manual Project cannot have a schedule set.'))
elif type(value) == InventorySource and value.source == 'scm' and value.update_on_project_update:
raise serializers.ValidationError(_(
'Inventory sources with `update_on_project_update` cannot be scheduled. '
'Schedule its source project `{}` instead.'.format(value.source_project.name)))
return value
# We reject rrules if:
# - DTSTART is not include
# - INTERVAL is not included
# - SECONDLY is used
# - TZID is used
# - BYDAY prefixed with a number (MO is good but not 20MO)
# - BYYEARDAY
# - BYWEEKNO
# - Multiple DTSTART or RRULE elements
# - COUNT > 999
def validate_rrule(self, value):
rrule_value = value
multi_by_month_day = ".*?BYMONTHDAY[\:\=][0-9]+,-*[0-9]+"
multi_by_month = ".*?BYMONTH[\:\=][0-9]+,[0-9]+"
by_day_with_numeric_prefix = ".*?BYDAY[\:\=][0-9]+[a-zA-Z]{2}"
match_count = re.match(".*?(COUNT\=[0-9]+)", rrule_value)
match_multiple_dtstart = re.findall(".*?(DTSTART\:[0-9]+T[0-9]+Z)", rrule_value)
match_multiple_rrule = re.findall(".*?(RRULE\:)", rrule_value)
if not len(match_multiple_dtstart):
raise serializers.ValidationError(_('DTSTART required in rrule. Value should match: DTSTART:YYYYMMDDTHHMMSSZ'))
if len(match_multiple_dtstart) > 1:
raise serializers.ValidationError(_('Multiple DTSTART is not supported.'))
if not len(match_multiple_rrule):
raise serializers.ValidationError(_('RRULE require in rrule.'))
if len(match_multiple_rrule) > 1:
raise serializers.ValidationError(_('Multiple RRULE is not supported.'))
if 'interval' not in rrule_value.lower():
raise serializers.ValidationError(_('INTERVAL required in rrule.'))
if 'tzid' in rrule_value.lower():
raise serializers.ValidationError(_('TZID is not supported.'))
if 'secondly' in rrule_value.lower():
raise serializers.ValidationError(_('SECONDLY is not supported.'))
if re.match(multi_by_month_day, rrule_value):
raise serializers.ValidationError(_('Multiple BYMONTHDAYs not supported.'))
if re.match(multi_by_month, rrule_value):
raise serializers.ValidationError(_('Multiple BYMONTHs not supported.'))
if re.match(by_day_with_numeric_prefix, rrule_value):
raise serializers.ValidationError(_("BYDAY with numeric prefix not supported."))
if 'byyearday' in rrule_value.lower():
raise serializers.ValidationError(_("BYYEARDAY not supported."))
if 'byweekno' in rrule_value.lower():
raise serializers.ValidationError(_("BYWEEKNO not supported."))
if match_count:
count_val = match_count.groups()[0].strip().split("=")
if int(count_val[1]) > 999:
raise serializers.ValidationError(_("COUNT > 999 is unsupported."))
try:
rrule.rrulestr(rrule_value)
except Exception:
raise serializers.ValidationError(_("rrule parsing failed validation."))
six.text_type('Inventory sources with `update_on_project_update` cannot be scheduled. '
'Schedule its source project `{}` instead.').format(value.source_project.name)))
return value
@@ -3796,8 +4043,10 @@ class InstanceSerializer(BaseSerializer):
class Meta:
model = Instance
fields = ("id", "type", "url", "related", "uuid", "hostname", "created", "modified",
"version", "capacity", "consumed_capacity", "percent_capacity_remaining", "jobs_running")
read_only_fields = ('uuid', 'hostname', 'version')
fields = ("id", "type", "url", "related", "uuid", "hostname", "created", "modified", 'capacity_adjustment',
"version", "capacity", "consumed_capacity", "percent_capacity_remaining", "jobs_running",
"cpu", "memory", "cpu_capacity", "mem_capacity", "enabled")
def get_related(self, obj):
res = super(InstanceSerializer, self).get_related(obj)
@@ -3820,6 +4069,7 @@ class InstanceSerializer(BaseSerializer):
class InstanceGroupSerializer(BaseSerializer):
committed_capacity = serializers.SerializerMethodField()
consumed_capacity = serializers.SerializerMethodField()
percent_capacity_remaining = serializers.SerializerMethodField()
jobs_running = serializers.SerializerMethodField()
@@ -3827,8 +4077,10 @@ class InstanceGroupSerializer(BaseSerializer):
class Meta:
model = InstanceGroup
fields = ("id", "type", "url", "related", "name", "created", "modified", "capacity", "consumed_capacity",
"percent_capacity_remaining", "jobs_running", "instances", "controller")
fields = ("id", "type", "url", "related", "name", "created", "modified",
"capacity", "committed_capacity", "consumed_capacity",
"percent_capacity_remaining", "jobs_running", "instances", "controller",
"policy_instance_percentage", "policy_instance_minimum", "policy_instance_list")
def get_related(self, obj):
res = super(InstanceGroupSerializer, self).get_related(obj)
@@ -3856,7 +4108,10 @@ class InstanceGroupSerializer(BaseSerializer):
return self.context['capacity_map']
def get_consumed_capacity(self, obj):
return self.get_capacity_dict()[obj.name]['consumed_capacity']
return self.get_capacity_dict()[obj.name]['running_capacity']
def get_committed_capacity(self, obj):
return self.get_capacity_dict()[obj.name]['committed_capacity']
def get_percent_capacity_remaining(self, obj):
if not obj.capacity:
@@ -3954,6 +4209,11 @@ class ActivityStreamSerializer(BaseSerializer):
if fk == 'schedule':
rel['unified_job_template'] = thisItem.unified_job_template.get_absolute_url(self.context.get('request'))
if obj.setting and obj.setting.get('category', None):
rel['setting'] = self.reverse(
'api:setting_singleton_detail',
kwargs={'category_slug': obj.setting['category']}
)
return rel
def _get_rel(self, obj, fk):
@@ -4005,6 +4265,8 @@ class ActivityStreamSerializer(BaseSerializer):
username = obj.actor.username,
first_name = obj.actor.first_name,
last_name = obj.actor.last_name)
if obj.setting:
summary_fields['setting'] = [obj.setting]
return summary_fields

103
awx/api/swagger.py Normal file
View File

@@ -0,0 +1,103 @@
import json
import warnings
from coreapi.document import Object, Link
from rest_framework import exceptions
from rest_framework.permissions import AllowAny
from rest_framework.renderers import CoreJSONRenderer
from rest_framework.response import Response
from rest_framework.schemas import SchemaGenerator, AutoSchema as DRFAuthSchema
from rest_framework.views import APIView
from rest_framework_swagger import renderers
class AutoSchema(DRFAuthSchema):
def get_link(self, path, method, base_url):
link = super(AutoSchema, self).get_link(path, method, base_url)
try:
serializer = self.view.get_serializer()
except Exception:
serializer = None
warnings.warn('{}.get_serializer() raised an exception during '
'schema generation. Serializer fields will not be '
'generated for {} {}.'
.format(self.view.__class__.__name__, method, path))
link.__dict__['deprecated'] = getattr(self.view, 'deprecated', False)
# auto-generate a topic/tag for the serializer based on its model
if hasattr(self.view, 'swagger_topic'):
link.__dict__['topic'] = str(self.view.swagger_topic).title()
elif serializer and hasattr(serializer, 'Meta'):
link.__dict__['topic'] = str(
serializer.Meta.model._meta.verbose_name_plural
).title()
elif hasattr(self.view, 'model'):
link.__dict__['topic'] = str(self.view.model._meta.verbose_name_plural).title()
else:
warnings.warn('Could not determine a Swagger tag for path {}'.format(path))
return link
def get_description(self, path, method):
self.view._request = self.view.request
setattr(self.view.request, 'swagger_method', method)
description = super(AutoSchema, self).get_description(path, method)
return description
class SwaggerSchemaView(APIView):
_ignore_model_permissions = True
exclude_from_schema = True
permission_classes = [AllowAny]
renderer_classes = [
CoreJSONRenderer,
renderers.OpenAPIRenderer,
renderers.SwaggerUIRenderer
]
def get(self, request):
generator = SchemaGenerator(
title='Ansible Tower API',
patterns=None,
urlconf=None
)
schema = generator.get_schema(request=request)
# python core-api doesn't support the deprecation yet, so track it
# ourselves and return it in a response header
_deprecated = []
# By default, DRF OpenAPI serialization places all endpoints in
# a single node based on their root path (/api). Instead, we want to
# group them by topic/tag so that they're categorized in the rendered
# output
document = schema._data.pop('api')
for path, node in document.items():
if isinstance(node, Object):
for action in node.values():
topic = getattr(action, 'topic', None)
if topic:
schema._data.setdefault(topic, Object())
schema._data[topic]._data[path] = node
if isinstance(action, Object):
for link in action.links.values():
if link.deprecated:
_deprecated.append(link.url)
elif isinstance(node, Link):
topic = getattr(node, 'topic', None)
if topic:
schema._data.setdefault(topic, Object())
schema._data[topic]._data[path] = node
if not schema:
raise exceptions.ValidationError(
'The schema generator did not return a schema Document'
)
return Response(
schema,
headers={'X-Deprecated-Paths': json.dumps(_deprecated)}
)

View File

@@ -1,9 +1,9 @@
The resulting data structure contains:
{
"count": 99,
"next": null,
"previous": null,
"count": 99,
"next": null,
"previous": null,
"results": [
...
]
@@ -60,6 +60,10 @@ _Added in AWX 1.4_
?related__search=findme
Note: If you want to provide more than one search terms, please use multiple
search fields with the same key, like `?related__search=foo&related__search=bar`,
All search terms with the same key will be ORed together.
## Filtering
Any additional query string parameters may be used to filter the list of
@@ -70,7 +74,7 @@ in the specified value should be url-encoded. For example:
?field=value%20xyz
Fields may also span relations, only for fields and relationships defined in
the database:
the database:
?other__field=value

View File

@@ -1,14 +0,0 @@
{% if not version_label_flag or version_label_flag == 'true' %}
{% if new_in_13 %}> _Added in AWX 1.3_{% endif %}
{% if new_in_14 %}> _Added in AWX 1.4_{% endif %}
{% if new_in_145 %}> _Added in Ansible Tower 1.4.5_{% endif %}
{% if new_in_148 %}> _Added in Ansible Tower 1.4.8_{% endif %}
{% if new_in_200 %}> _Added in Ansible Tower 2.0.0_{% endif %}
{% if new_in_220 %}> _Added in Ansible Tower 2.2.0_{% endif %}
{% if new_in_230 %}> _Added in Ansible Tower 2.3.0_{% endif %}
{% if new_in_240 %}> _Added in Ansible Tower 2.4.0_{% endif %}
{% if new_in_300 %}> _Added in Ansible Tower 3.0.0_{% endif %}
{% if new_in_310 %}> _New in Ansible Tower 3.1.0_{% endif %}
{% if new_in_320 %}> _New in Ansible Tower 3.2.0_{% endif %}
{% if new_in_330 %}> _New in Ansible Tower 3.3.0_{% endif %}
{% endif %}

View File

@@ -0,0 +1,3 @@
Relaunch an Ad Hoc Command:
Make a POST request to this resource to launch a job. If any passwords or variables are required then they should be passed in via POST data. In order to determine what values are required in order to launch a job based on this job template you may make a GET request to this endpoint.

View File

@@ -1,4 +1,5 @@
Site configuration settings and general information.
{% ifmeth GET %}
# Site configuration settings and general information
Make a GET request to this resource to retrieve the configuration containing
the following fields (some fields may not be visible to all users):
@@ -11,6 +12,10 @@ the following fields (some fields may not be visible to all users):
* `license_info`: Information about the current license.
* `version`: Version of Ansible Tower package installed.
* `eula`: The current End-User License Agreement
{% endifmeth %}
{% ifmeth POST %}
# Install or update an existing license
(_New in Ansible Tower 2.0.0_) Make a POST request to this resource as a super
user to install or update the existing license. The license data itself can
@@ -18,3 +23,11 @@ be POSTed as a normal json data structure.
(_New in Ansible Tower 2.1.1_) The POST must include a `eula_accepted` boolean
element indicating acceptance of the End-User License Agreement.
{% endifmeth %}
{% ifmeth DELETE %}
# Delete an existing license
(_New in Ansible Tower 2.0.0_) Make a DELETE request to this resource as a super
user to delete the existing license
{% endifmeth %}

View File

@@ -1,3 +1 @@
{{ docstring }}
{% include "api/_new_in_awx.md" %}

View File

@@ -1,3 +1,5 @@
{% ifmeth POST %}
# Generate an Auth Token
Make a POST request to this resource with `username` and `password` fields to
obtain an authentication token to use for subsequent requests.
@@ -32,6 +34,10 @@ agent that originally obtained it.
Each request that uses the token for authentication will refresh its expiration
timestamp and keep it from expiring. A token only expires when it is not used
for the configured timeout interval (default 1800 seconds).
{% endifmeth %}
A DELETE request with the token set will cause the token to be invalidated and
no further requests can be made with it.
{% ifmeth DELETE %}
# Delete an Auth Token
A DELETE request with the token header set will cause the token to be
invalidated and no further requests can be made with it.
{% endifmeth %}

View File

@@ -1,9 +1,13 @@
{% ifmeth GET %}
# Retrieve {{ model_verbose_name|title }} Variable Data:
Make a GET request to this resource to retrieve all variables defined for this
Make a GET request to this resource to retrieve all variables defined for a
{{ model_verbose_name }}.
{% endifmeth %}
{% ifmeth PUT PATCH %}
# Update {{ model_verbose_name|title }} Variable Data:
Make a PUT request to this resource to update variables defined for this
Make a PUT or PATCH request to this resource to update variables defined for a
{{ model_verbose_name }}.
{% endifmeth %}

View File

@@ -38,5 +38,3 @@ Data about failed and successfull hosts by inventory will be given as:
"id": 2,
"name": "Test Inventory"
},
{% include "api/_new_in_awx.md" %}

View File

@@ -1,3 +1,5 @@
# View Statistics for Job Runs
Make a GET request to this resource to retrieve aggregate statistics about job runs suitable for graphing.
## Parmeters and Filtering
@@ -33,5 +35,3 @@ Data will be returned in the following format:
Each element contains an epoch timestamp represented in seconds and a numerical value indicating
the number of events during that time period
{% include "api/_new_in_awx.md" %}

View File

@@ -1,3 +1 @@
Make a GET request to this resource to retrieve aggregate statistics for Tower.
{% include "api/_new_in_awx.md" %}

View File

@@ -1,4 +1,4 @@
# List All {{ model_verbose_name_plural|title }} for this {{ parent_model_verbose_name|title }}:
# List All {{ model_verbose_name_plural|title }} for {{ parent_model_verbose_name|title|anora }}:
Make a GET request to this resource to retrieve a list of all
{{ model_verbose_name_plural }} directly or indirectly belonging to this

View File

@@ -1,9 +1,7 @@
# List Potential Child Groups for this {{ parent_model_verbose_name|title }}:
# List Potential Child Groups for {{ parent_model_verbose_name|title|anora }}:
Make a GET request to this resource to retrieve a list of
{{ model_verbose_name_plural }} available to be added as children of the
current {{ parent_model_verbose_name }}.
{% include "api/_list_common.md" %}
{% include "api/_new_in_awx.md" %}

View File

@@ -1,4 +1,4 @@
# List All {{ model_verbose_name_plural|title }} for this {{ parent_model_verbose_name|title }}:
# List All {{ model_verbose_name_plural|title }} for {{ parent_model_verbose_name|title|anora }}:
Make a GET request to this resource to retrieve a list of all
{{ model_verbose_name_plural }} of which the selected

View File

@@ -1,3 +1,5 @@
# List Fact Scans for a Host Specific Host Scan
Make a GET request to this resource to retrieve system tracking data for a particular scan
You may filter by datetime:
@@ -7,5 +9,3 @@ You may filter by datetime:
and module
`?datetime=2015-06-01&module=ansible`
{% include "api/_new_in_awx.md" %}

View File

@@ -1,3 +1,5 @@
# List Fact Scans for a Host by Module and Date
Make a GET request to this resource to retrieve system tracking scans by module and date/time
You may filter scan runs using the `from` and `to` properties:
@@ -7,5 +9,3 @@ You may filter scan runs using the `from` and `to` properties:
You may also filter by module
`?module=packages`
{% include "api/_new_in_awx.md" %}

View File

@@ -0,0 +1 @@
# List Red Hat Insights for a Host

View File

@@ -29,5 +29,3 @@ Response code from this action will be:
- 202 if some inventory source updates were successful, but some failed
- 400 if all of the inventory source updates failed
- 400 if there are no inventory sources in the inventory
{% include "api/_new_in_awx.md" %}

View File

@@ -1,7 +1,9 @@
# List Root {{ model_verbose_name_plural|title }} for this {{ parent_model_verbose_name|title }}:
{% ifmeth GET %}
# List Root {{ model_verbose_name_plural|title }} for {{ parent_model_verbose_name|title|anora }}:
Make a GET request to this resource to retrieve a list of root (top-level)
{{ model_verbose_name_plural }} associated with this
{{ parent_model_verbose_name }}.
{% include "api/_list_common.md" %}
{% endifmeth %}

View File

@@ -9,5 +9,3 @@ cancelled. The response will include the following field:
Make a POST request to this resource to cancel a pending or running inventory
update. The response status code will be 202 if successful, or 405 if the
update cannot be canceled.
{% include "api/_new_in_awx.md" %}

View File

@@ -9,5 +9,3 @@ from its inventory source. The response will include the following field:
Make a POST request to this resource to update the inventory source. If
successful, the response status code will be 202. If the inventory source is
not defined or cannot be updated, a 405 status code will be returned.
{% include "api/_new_in_awx.md" %}

View File

@@ -1,4 +1,4 @@
# Group Tree for this {{ model_verbose_name|title }}:
# Group Tree for {{ model_verbose_name|title|anora }}:
Make a GET request to this resource to retrieve a hierarchical view of groups
associated with the selected {{ model_verbose_name }}.
@@ -11,5 +11,3 @@ also containing a list of its children.
Each group data structure includes the following fields:
{% include "api/_result_fields_common.md" %}
{% include "api/_new_in_awx.md" %}

View File

@@ -1,10 +1,15 @@
# Cancel Job
{% ifmeth GET %}
# Determine if a Job can be cancelled
Make a GET request to this resource to determine if the job can be cancelled.
The response will include the following field:
* `can_cancel`: Indicates whether this job can be canceled (boolean, read-only)
{% endifmeth %}
{% ifmeth POST %}
# Cancel a Job
Make a POST request to this resource to cancel a pending or running job. The
response status code will be 202 if successful, or 405 if the job cannot be
canceled.
{% endifmeth %}

View File

@@ -23,5 +23,3 @@ Will show only failed plays. Alternatively `false` may be used.
?play__icontains=test
Will filter plays matching the substring `test`
{% include "api/_new_in_awx.md" %}

View File

@@ -25,5 +25,3 @@ Will show only failed plays. Alternatively `false` may be used.
?task__icontains=test
Will filter tasks matching the substring `test`
{% include "api/_new_in_awx.md" %}

View File

@@ -1,3 +1,3 @@
Relaunch a job:
Relaunch a Job:
Make a POST request to this resource to launch a job. If any passwords or variables are required then they should be passed in via POST data. In order to determine what values are required in order to launch a job based on this job template you may make a GET request to this endpoint.
Make a POST request to this resource to launch a job. If any passwords or variables are required then they should be passed in via POST data. In order to determine what values are required in order to launch a job based on this job template you may make a GET request to this endpoint.

View File

@@ -1,4 +1,5 @@
# Start Job
{% ifmeth GET %}
# Determine if a Job can be started
Make a GET request to this resource to determine if the job can be started and
whether any passwords are required to start the job. The response will include
@@ -7,10 +8,14 @@ the following fields:
* `can_start`: Flag indicating if this job can be started (boolean, read-only)
* `passwords_needed_to_start`: Password names required to start the job (array,
read-only)
{% endifmeth %}
{% ifmeth POST %}
# Start a Job
Make a POST request to this resource to start the job. If any passwords are
required, they must be passed via POST data.
If successful, the response status code will be 202. If any required passwords
are not provided, a 400 status code will be returned. If the job cannot be
started, a 405 status code will be returned.
{% endifmeth %}

View File

@@ -1,13 +1,7 @@
{% with 'false' as version_label_flag %}
{% include "api/sub_list_create_api_view.md" %}
{% endwith %}
Labels not associated with any other resources are deleted. A label can become disassociated with a resource as a result of 3 events.
1. A label is explicitly disassociated with a related job template
2. A job is deleted with labels
3. A cleanup job deletes a job with labels
{% with 'true' as version_label_flag %}
{% include "api/_new_in_awx.md" %}
{% endwith %}

View File

@@ -1,8 +1,8 @@
{% ifmeth GET %}
# List {{ model_verbose_name_plural|title }}:
Make a GET request to this resource to retrieve the list of
{{ model_verbose_name_plural }}.
{% include "api/_list_common.md" %}
{% include "api/_new_in_awx.md" %}
{% endifmeth %}

View File

@@ -1,6 +1,6 @@
{% include "api/list_api_view.md" %}
# Create {{ model_verbose_name_plural|title }}:
# Create {{ model_verbose_name|title|anora }}:
Make a POST request to this resource with the following {{ model_verbose_name }}
fields to create a new {{ model_verbose_name }}:
@@ -8,5 +8,3 @@ fields to create a new {{ model_verbose_name }}:
{% with write_only=1 %}
{% include "api/_result_fields_common.md" with serializer_fields=serializer_create_fields %}
{% endwith %}
{% include "api/_new_in_awx.md" %}

View File

@@ -1,4 +1,4 @@
# Retrieve {{ model_verbose_name|title }} Playbooks:
Make GET request to this resource to retrieve a list of playbooks available
for this {{ model_verbose_name }}.
for {{ model_verbose_name|anora }}.

View File

@@ -9,5 +9,3 @@ cancelled. The response will include the following field:
Make a POST request to this resource to cancel a pending or running project
update. The response status code will be 202 if successful, or 405 if the
update cannot be canceled.
{% include "api/_new_in_awx.md" %}

View File

@@ -8,5 +8,3 @@ from its SCM source. The response will include the following field:
Make a POST request to this resource to update the project. If the project
cannot be updated, a 405 status code will be returned.
{% include "api/_new_in_awx.md" %}

View File

@@ -2,11 +2,9 @@
### Note: starting from api v2, this resource object can be accessed via its named URL.
{% endif %}
# Retrieve {{ model_verbose_name|title }}:
# Retrieve {{ model_verbose_name|title|anora }}:
Make GET request to this resource to retrieve a single {{ model_verbose_name }}
record containing the following fields:
{% include "api/_result_fields_common.md" %}
{% include "api/_new_in_awx.md" %}

View File

@@ -2,15 +2,17 @@
### Note: starting from api v2, this resource object can be accessed via its named URL.
{% endif %}
# Retrieve {{ model_verbose_name|title }}:
{% ifmeth GET %}
# Retrieve {{ model_verbose_name|title|anora }}:
Make GET request to this resource to retrieve a single {{ model_verbose_name }}
record containing the following fields:
{% include "api/_result_fields_common.md" %}
{% endifmeth %}
# Delete {{ model_verbose_name|title }}:
{% ifmeth DELETE %}
# Delete {{ model_verbose_name|title|anora }}:
Make a DELETE request to this resource to delete this {{ model_verbose_name }}.
{% include "api/_new_in_awx.md" %}
{% endifmeth %}

View File

@@ -2,14 +2,17 @@
### Note: starting from api v2, this resource object can be accessed via its named URL.
{% endif %}
# Retrieve {{ model_verbose_name|title }}:
{% ifmeth GET %}
# Retrieve {{ model_verbose_name|title|anora }}:
Make GET request to this resource to retrieve a single {{ model_verbose_name }}
record containing the following fields:
{% include "api/_result_fields_common.md" %}
{% endifmeth %}
# Update {{ model_verbose_name|title }}:
{% ifmeth PUT PATCH %}
# Update {{ model_verbose_name|title|anora }}:
Make a PUT or PATCH request to this resource to update this
{{ model_verbose_name }}. The following fields may be modified:
@@ -17,9 +20,12 @@ Make a PUT or PATCH request to this resource to update this
{% with write_only=1 %}
{% include "api/_result_fields_common.md" with serializer_fields=serializer_update_fields %}
{% endwith %}
{% endifmeth %}
{% ifmeth PUT %}
For a PUT request, include **all** fields in the request.
{% endifmeth %}
{% ifmeth PATCH %}
For a PATCH request, include only the fields that are being modified.
{% include "api/_new_in_awx.md" %}
{% endifmeth %}

View File

@@ -2,14 +2,17 @@
### Note: starting from api v2, this resource object can be accessed via its named URL.
{% endif %}
# Retrieve {{ model_verbose_name|title }}:
{% ifmeth GET %}
# Retrieve {{ model_verbose_name|title|anora }}:
Make GET request to this resource to retrieve a single {{ model_verbose_name }}
record containing the following fields:
{% include "api/_result_fields_common.md" %}
{% endifmeth %}
# Update {{ model_verbose_name|title }}:
{% ifmeth PUT PATCH %}
# Update {{ model_verbose_name|title|anora }}:
Make a PUT or PATCH request to this resource to update this
{{ model_verbose_name }}. The following fields may be modified:
@@ -17,13 +20,18 @@ Make a PUT or PATCH request to this resource to update this
{% with write_only=1 %}
{% include "api/_result_fields_common.md" with serializer_fields=serializer_update_fields %}
{% endwith %}
{% endifmeth %}
{% ifmeth PUT %}
For a PUT request, include **all** fields in the request.
{% endifmeth %}
{% ifmeth PATCH %}
For a PATCH request, include only the fields that are being modified.
{% endifmeth %}
# Delete {{ model_verbose_name|title }}:
{% ifmeth DELETE %}
# Delete {{ model_verbose_name|title|anora }}:
Make a DELETE request to this resource to delete this {{ model_verbose_name }}.
{% include "api/_new_in_awx.md" %}
{% endifmeth %}

View File

@@ -0,0 +1 @@
# Test Logging Configuration

View File

@@ -1,9 +1,9 @@
# List {{ model_verbose_name_plural|title }} for this {{ parent_model_verbose_name|title }}:
{% ifmeth GET %}
# List {{ model_verbose_name_plural|title }} for {{ parent_model_verbose_name|title|anora }}:
Make a GET request to this resource to retrieve a list of
{{ model_verbose_name_plural }} associated with the selected
{{ parent_model_verbose_name }}.
{% include "api/_list_common.md" %}
{% include "api/_new_in_awx.md" %}
{% endifmeth %}

View File

@@ -1,6 +1,6 @@
{% include "api/sub_list_api_view.md" %}
# Create {{ model_verbose_name_plural|title }} for this {{ parent_model_verbose_name|title }}:
# Create {{ model_verbose_name|title|anora }} for {{ parent_model_verbose_name|title|anora }}:
Make a POST request to this resource with the following {{ model_verbose_name }}
fields to create a new {{ model_verbose_name }} associated with this
@@ -25,7 +25,7 @@ delete the associated {{ model_verbose_name }}.
}
{% else %}
# Add {{ model_verbose_name_plural|title }} for this {{ parent_model_verbose_name|title }}:
# Add {{ model_verbose_name_plural|title }} for {{ parent_model_verbose_name|title|anora }}:
Make a POST request to this resource with only an `id` field to associate an
existing {{ model_verbose_name }} with this {{ parent_model_verbose_name }}.
@@ -37,5 +37,3 @@ remove the {{ model_verbose_name }} from this {{ parent_model_verbose_name }}
{% if model_verbose_name != "label" %} without deleting the {{ model_verbose_name }}{% endif %}.
{% endif %}
{% endif %}
{% include "api/_new_in_awx.md" %}

View File

@@ -0,0 +1,6 @@
{% include "api/sub_list_create_api_view.md" %}
# Delete all {{ model_verbose_name_plural }} of this {{ parent_model_verbose_name|title }}:
Make a DELETE request to this resource to delete all {{ model_verbose_name_plural }} show in the list.
The {{ parent_model_verbose_name|title }} will not be deleted by this request.

View File

@@ -1,12 +1,16 @@
# List Roles for this Team:
# List Roles for a Team:
{% ifmeth GET %}
Make a GET request to this resource to retrieve a list of roles associated with the selected team.
{% include "api/_list_common.md" %}
{% endifmeth %}
{% ifmeth POST %}
# Associate Roles with this Team:
Make a POST request to this resource to add or remove a role from this team. The following fields may be modified:
* `id`: The Role ID to add to the team. (int, required)
* `disassociate`: Provide if you want to remove the role. (any value, optional)
{% endifmeth %}

View File

@@ -25,5 +25,3 @@ dark background.
Files over {{ settings.STDOUT_MAX_BYTES_DISPLAY|filesizeformat }} (configurable)
will not display in the browser. Use the `txt_download` or `ansi_download`
formats to download the file directly to view it.
{% include "api/_new_in_awx.md" %}

View File

@@ -1,3 +1,5 @@
# Retrieve Information about the current User
Make a GET request to retrieve user information about the current user.
One result should be returned containing the following fields:

View File

@@ -1,12 +1,16 @@
# List Roles for this User:
# List Roles for a User:
{% ifmeth GET %}
Make a GET request to this resource to retrieve a list of roles associated with the selected user.
{% include "api/_list_common.md" %}
{% endifmeth %}
{% ifmeth POST %}
# Associate Roles with this User:
Make a POST request to this resource to add or remove a role from this user. The following fields may be modified:
* `id`: The Role ID to add to the user. (int, required)
* `disassociate`: Provide if you want to remove the role. (any value, optional)
{% endifmeth %}

View File

@@ -11,6 +11,7 @@ from awx.api.views import (
CredentialObjectRolesList,
CredentialOwnerUsersList,
CredentialOwnerTeamsList,
CredentialCopy,
)
@@ -22,6 +23,7 @@ urls = [
url(r'^(?P<pk>[0-9]+)/object_roles/$', CredentialObjectRolesList.as_view(), name='credential_object_roles_list'),
url(r'^(?P<pk>[0-9]+)/owner_users/$', CredentialOwnerUsersList.as_view(), name='credential_owner_users_list'),
url(r'^(?P<pk>[0-9]+)/owner_teams/$', CredentialOwnerTeamsList.as_view(), name='credential_owner_teams_list'),
url(r'^(?P<pk>[0-9]+)/copy/$', CredentialCopy.as_view(), name='credential_copy'),
]
__all__ = ['urls']

View File

@@ -20,6 +20,7 @@ from awx.api.views import (
InventoryAccessList,
InventoryObjectRolesList,
InventoryInstanceGroupsList,
InventoryCopy,
)
@@ -40,6 +41,7 @@ urls = [
url(r'^(?P<pk>[0-9]+)/access_list/$', InventoryAccessList.as_view(), name='inventory_access_list'),
url(r'^(?P<pk>[0-9]+)/object_roles/$', InventoryObjectRolesList.as_view(), name='inventory_object_roles_list'),
url(r'^(?P<pk>[0-9]+)/instance_groups/$', InventoryInstanceGroupsList.as_view(), name='inventory_instance_groups_list'),
url(r'^(?P<pk>[0-9]+)/copy/$', InventoryCopy.as_view(), name='inventory_copy'),
]
__all__ = ['urls']

View File

@@ -7,6 +7,7 @@ from awx.api.views import (
InventoryScriptList,
InventoryScriptDetail,
InventoryScriptObjectRolesList,
InventoryScriptCopy,
)
@@ -14,6 +15,7 @@ urls = [
url(r'^$', InventoryScriptList.as_view(), name='inventory_script_list'),
url(r'^(?P<pk>[0-9]+)/$', InventoryScriptDetail.as_view(), name='inventory_script_detail'),
url(r'^(?P<pk>[0-9]+)/object_roles/$', InventoryScriptObjectRolesList.as_view(), name='inventory_script_object_roles_list'),
url(r'^(?P<pk>[0-9]+)/copy/$', InventoryScriptCopy.as_view(), name='inventory_script_copy'),
]
__all__ = ['urls']

View File

@@ -9,6 +9,7 @@ from awx.api.views import (
InventoryUpdateCancel,
InventoryUpdateStdout,
InventoryUpdateNotificationsList,
InventoryUpdateEventsList,
)
@@ -18,6 +19,7 @@ urls = [
url(r'^(?P<pk>[0-9]+)/cancel/$', InventoryUpdateCancel.as_view(), name='inventory_update_cancel'),
url(r'^(?P<pk>[0-9]+)/stdout/$', InventoryUpdateStdout.as_view(), name='inventory_update_stdout'),
url(r'^(?P<pk>[0-9]+)/notifications/$', InventoryUpdateNotificationsList.as_view(), name='inventory_update_notifications_list'),
url(r'^(?P<pk>[0-9]+)/events/$', InventoryUpdateEventsList.as_view(), name='inventory_update_events_list'),
]
__all__ = ['urls']

View File

@@ -19,6 +19,7 @@ from awx.api.views import (
JobTemplateAccessList,
JobTemplateObjectRolesList,
JobTemplateLabelList,
JobTemplateCopy,
)
@@ -41,6 +42,7 @@ urls = [
url(r'^(?P<pk>[0-9]+)/access_list/$', JobTemplateAccessList.as_view(), name='job_template_access_list'),
url(r'^(?P<pk>[0-9]+)/object_roles/$', JobTemplateObjectRolesList.as_view(), name='job_template_object_roles_list'),
url(r'^(?P<pk>[0-9]+)/labels/$', JobTemplateLabelList.as_view(), name='job_template_label_list'),
url(r'^(?P<pk>[0-9]+)/copy/$', JobTemplateCopy.as_view(), name='job_template_copy'),
]
__all__ = ['urls']

View File

@@ -8,6 +8,7 @@ from awx.api.views import (
NotificationTemplateDetail,
NotificationTemplateTest,
NotificationTemplateNotificationList,
NotificationTemplateCopy,
)
@@ -16,6 +17,7 @@ urls = [
url(r'^(?P<pk>[0-9]+)/$', NotificationTemplateDetail.as_view(), name='notification_template_detail'),
url(r'^(?P<pk>[0-9]+)/test/$', NotificationTemplateTest.as_view(), name='notification_template_test'),
url(r'^(?P<pk>[0-9]+)/notifications/$', NotificationTemplateNotificationList.as_view(), name='notification_template_notification_list'),
url(r'^(?P<pk>[0-9]+)/copy/$', NotificationTemplateCopy.as_view(), name='notification_template_copy'),
]
__all__ = ['urls']

View File

@@ -19,10 +19,11 @@ from awx.api.views import (
ProjectNotificationTemplatesSuccessList,
ProjectObjectRolesList,
ProjectAccessList,
ProjectCopy,
)
urls = [
urls = [
url(r'^$', ProjectList.as_view(), name='project_list'),
url(r'^(?P<pk>[0-9]+)/$', ProjectDetail.as_view(), name='project_detail'),
url(r'^(?P<pk>[0-9]+)/playbooks/$', ProjectPlaybooks.as_view(), name='project_playbooks'),
@@ -39,6 +40,7 @@ urls = [
name='project_notification_templates_success_list'),
url(r'^(?P<pk>[0-9]+)/object_roles/$', ProjectObjectRolesList.as_view(), name='project_object_roles_list'),
url(r'^(?P<pk>[0-9]+)/access_list/$', ProjectAccessList.as_view(), name='project_access_list'),
url(r'^(?P<pk>[0-9]+)/copy/$', ProjectCopy.as_view(), name='project_copy'),
]
__all__ = ['urls']

View File

@@ -10,6 +10,7 @@ from awx.api.views import (
ProjectUpdateStdout,
ProjectUpdateScmInventoryUpdates,
ProjectUpdateNotificationsList,
ProjectUpdateEventsList,
)
@@ -20,6 +21,7 @@ urls = [
url(r'^(?P<pk>[0-9]+)/stdout/$', ProjectUpdateStdout.as_view(), name='project_update_stdout'),
url(r'^(?P<pk>[0-9]+)/scm_inventory_updates/$', ProjectUpdateScmInventoryUpdates.as_view(), name='project_update_scm_inventory_updates'),
url(r'^(?P<pk>[0-9]+)/notifications/$', ProjectUpdateNotificationsList.as_view(), name='project_update_notifications_list'),
url(r'^(?P<pk>[0-9]+)/events/$', ProjectUpdateEventsList.as_view(), name='project_update_events_list'),
]
__all__ = ['urls']

View File

@@ -8,6 +8,7 @@ from awx.api.views import (
SystemJobDetail,
SystemJobCancel,
SystemJobNotificationsList,
SystemJobEventsList
)
@@ -16,6 +17,7 @@ urls = [
url(r'^(?P<pk>[0-9]+)/$', SystemJobDetail.as_view(), name='system_job_detail'),
url(r'^(?P<pk>[0-9]+)/cancel/$', SystemJobCancel.as_view(), name='system_job_cancel'),
url(r'^(?P<pk>[0-9]+)/notifications/$', SystemJobNotificationsList.as_view(), name='system_job_notifications_list'),
url(r'^(?P<pk>[0-9]+)/events/$', SystemJobEventsList.as_view(), name='system_job_events_list'),
]
__all__ = ['urls']

View File

@@ -2,6 +2,7 @@
# All Rights Reserved.
from __future__ import absolute_import, unicode_literals
from django.conf import settings
from django.conf.urls import include, url
from awx.api.views import (
@@ -22,6 +23,8 @@ from awx.api.views import (
JobExtraCredentialsList,
JobTemplateCredentialsList,
JobTemplateExtraCredentialsList,
SchedulePreview,
ScheduleZoneInfo,
)
from .organization import urls as organization_urls
@@ -113,11 +116,18 @@ v2_urls = [
url(r'^jobs/(?P<pk>[0-9]+)/credentials/$', JobCredentialsList.as_view(), name='job_credentials_list'),
url(r'^job_templates/(?P<pk>[0-9]+)/extra_credentials/$', JobTemplateExtraCredentialsList.as_view(), name='job_template_extra_credentials_list'),
url(r'^job_templates/(?P<pk>[0-9]+)/credentials/$', JobTemplateCredentialsList.as_view(), name='job_template_credentials_list'),
url(r'^schedules/preview/$', SchedulePreview.as_view(), name='schedule_rrule'),
url(r'^schedules/zoneinfo/$', ScheduleZoneInfo.as_view(), name='schedule_zoneinfo'),
]
app_name = 'api'
urlpatterns = [
url(r'^$', ApiRootView.as_view(), name='api_root_view'),
url(r'^(?P<version>(v2))/', include(v2_urls)),
url(r'^(?P<version>(v1|v2))/', include(v1_urls))
url(r'^(?P<version>(v1|v2))/', include(v1_urls)),
]
if settings.SETTINGS_MODULE == 'awx.settings.development':
from awx.api.swagger import SwaggerSchemaView
urlpatterns += [
url(r'^swagger/$', SwaggerSchemaView.as_view(), name='swagger_view'),
]

File diff suppressed because it is too large Load Diff

View File

@@ -1,6 +1,7 @@
# Python
import logging
import urlparse
from collections import OrderedDict
# Django
from django.core.validators import URLValidator
@@ -139,6 +140,8 @@ class KeyValueField(DictField):
ret = super(KeyValueField, self).to_internal_value(data)
for value in data.values():
if not isinstance(value, six.string_types + six.integer_types + (float,)):
if isinstance(value, OrderedDict):
value = dict(value)
self.fail('invalid_child', input=value)
return ret

View File

@@ -120,6 +120,9 @@ class SettingsRegistry(object):
def is_setting_read_only(self, setting):
return bool(self._registry.get(setting, {}).get('read_only', False))
def get_setting_category(self, setting):
return self._registry.get(setting, {}).get('category_slug', None)
def get_setting_field(self, setting, mixin_class=None, for_user=False, **kwargs):
from rest_framework.fields import empty
field_kwargs = {}

View File

@@ -1,6 +1,8 @@
# Django REST Framework
from rest_framework import serializers
import six
# Tower
from awx.api.fields import VerbatimField
from awx.api.serializers import BaseSerializer
@@ -45,12 +47,12 @@ class SettingFieldMixin(object):
"""Mixin to use a registered setting field class for API display/validation."""
def to_representation(self, obj):
if getattr(self, 'encrypted', False) and isinstance(obj, basestring) and obj:
if getattr(self, 'encrypted', False) and isinstance(obj, six.string_types) and obj:
return '$encrypted$'
return obj
def to_internal_value(self, value):
if getattr(self, 'encrypted', False) and isinstance(value, basestring) and value.startswith('$encrypted$'):
if getattr(self, 'encrypted', False) and isinstance(value, six.string_types) and value.startswith('$encrypted$'):
raise serializers.SkipField()
obj = super(SettingFieldMixin, self).to_internal_value(value)
return super(SettingFieldMixin, self).to_representation(obj)
@@ -87,8 +89,10 @@ class SettingSingletonSerializer(serializers.Serializer):
if self.instance and not hasattr(self.instance, key):
continue
extra_kwargs = {}
# Make LICENSE read-only here; update via /api/v1/config/ only.
if key == 'LICENSE':
# Make LICENSE and AWX_ISOLATED_KEY_GENERATION read-only here;
# LICENSE is only updated via /api/v1/config/
# AWX_ISOLATED_KEY_GENERATION is only set/unset via the setup playbook
if key in ('LICENSE', 'AWX_ISOLATED_KEY_GENERATION'):
extra_kwargs['read_only'] = True
field = settings_registry.get_setting_field(key, mixin_class=SettingFieldMixin, for_user=bool(category_slug == 'user'), **extra_kwargs)
fields[key] = field

View File

@@ -14,6 +14,7 @@ from django.conf import settings, UserSettingsHolder
from django.core.cache import cache as django_cache
from django.core.exceptions import ImproperlyConfigured
from django.db import ProgrammingError, OperationalError
from django.utils.functional import cached_property
# Django REST Framework
from rest_framework.fields import empty, SkipField
@@ -230,7 +231,8 @@ class SettingsWrapper(UserSettingsHolder):
self.__dict__['cache'] = EncryptedCacheProxy(cache, registry)
self.__dict__['registry'] = registry
def _get_supported_settings(self):
@cached_property
def all_supported_settings(self):
return self.registry.get_registered_settings()
def _preload_cache(self):
@@ -273,7 +275,7 @@ class SettingsWrapper(UserSettingsHolder):
setting_ids[setting.key] = setting.id
try:
value = decrypt_field(setting, 'value')
except ValueError, e:
except ValueError as e:
#TODO: Remove in Tower 3.3
logger.debug('encountered error decrypting field: %s - attempting fallback to old', e)
value = old_decrypt_field(setting, 'value')
@@ -382,7 +384,7 @@ class SettingsWrapper(UserSettingsHolder):
def __getattr__(self, name):
value = empty
if name in self._get_supported_settings():
if name in self.all_supported_settings:
with _log_database_error():
value = self._get_local(name)
if value is not empty:
@@ -414,7 +416,7 @@ class SettingsWrapper(UserSettingsHolder):
# post_save handler will delete from cache when changed.
def __setattr__(self, name, value):
if name in self._get_supported_settings():
if name in self.all_supported_settings:
with _log_database_error():
self._set_local(name, value)
else:
@@ -430,7 +432,7 @@ class SettingsWrapper(UserSettingsHolder):
# pre_delete handler will delete from cache.
def __delattr__(self, name):
if name in self._get_supported_settings():
if name in self.all_supported_settings:
with _log_database_error():
self._del_local(name)
else:
@@ -440,7 +442,7 @@ class SettingsWrapper(UserSettingsHolder):
keys = []
with _log_database_error():
for setting in Setting.objects.filter(
key__in=self._get_supported_settings(), user__isnull=True):
key__in=self.all_supported_settings, user__isnull=True):
# Skip returning settings that have been overridden but are
# considered to be "not set".
if setting.value is None and SETTING_CACHE_NOTSET == SETTING_CACHE_NONE:
@@ -454,7 +456,7 @@ class SettingsWrapper(UserSettingsHolder):
def is_overridden(self, setting):
set_locally = False
if setting in self._get_supported_settings():
if setting in self.all_supported_settings:
with _log_database_error():
set_locally = Setting.objects.filter(key=setting, user__isnull=True).exists()
set_on_default = getattr(self.default_settings, 'is_overridden', lambda s: False)(setting)

View File

@@ -6,14 +6,16 @@ import glob
import os
import shutil
# RedBaron
from redbaron import RedBaron, indent
import six
__all__ = ['comment_assignments']
# AWX
from awx.conf.registry import settings_registry
__all__ = ['comment_assignments', 'conf_to_dict']
def comment_assignments(patterns, assignment_names, dry_run=True, backup_suffix='.old'):
if isinstance(patterns, basestring):
if isinstance(patterns, six.string_types):
patterns = [patterns]
diffs = []
for pattern in patterns:
@@ -30,7 +32,9 @@ def comment_assignments(patterns, assignment_names, dry_run=True, backup_suffix=
def comment_assignments_in_file(filename, assignment_names, dry_run=True, backup_filename=None):
if isinstance(assignment_names, basestring):
from redbaron import RedBaron, indent
if isinstance(assignment_names, six.string_types):
assignment_names = [assignment_names]
else:
assignment_names = assignment_names[:]
@@ -103,6 +107,13 @@ def comment_assignments_in_file(filename, assignment_names, dry_run=True, backup
return '\n'.join(diff_lines)
def conf_to_dict(obj):
return {
'category': settings_registry.get_setting_category(obj.key),
'name': obj.key,
}
if __name__ == '__main__':
pattern = os.path.join(os.path.dirname(__file__), '..', 'settings', 'local_*.py')
diffs = comment_assignments(pattern, ['AUTH_LDAP_ORGANIZATION_MAP'])

View File

@@ -21,7 +21,7 @@ from awx.api.generics import * # noqa
from awx.api.permissions import IsSuperUser
from awx.api.versioning import reverse, get_request_version
from awx.main.utils import * # noqa
from awx.main.utils.handlers import BaseHTTPSHandler, LoggingConnectivityException
from awx.main.utils.handlers import BaseHTTPSHandler, UDPHandler, LoggingConnectivityException
from awx.main.tasks import handle_setting_changes
from awx.conf.license import get_licensed_features
from awx.conf.models import Setting
@@ -44,7 +44,6 @@ class SettingCategoryList(ListAPIView):
model = Setting # Not exactly, but needed for the view.
serializer_class = SettingCategorySerializer
filter_backends = []
new_in_310 = True
view_name = _('Setting Categories')
def get_queryset(self):
@@ -69,7 +68,6 @@ class SettingSingletonDetail(RetrieveUpdateDestroyAPIView):
model = Setting # Not exactly, but needed for the view.
serializer_class = SettingSingletonSerializer
filter_backends = []
new_in_310 = True
view_name = _('Setting Detail')
def get_queryset(self):
@@ -170,7 +168,6 @@ class SettingLoggingTest(GenericAPIView):
serializer_class = SettingSingletonSerializer
permission_classes = (IsSuperUser,)
filter_backends = []
new_in_320 = True
def post(self, request, *args, **kwargs):
defaults = dict()
@@ -202,7 +199,11 @@ class SettingLoggingTest(GenericAPIView):
for k, v in serializer.validated_data.items():
setattr(mock_settings, k, v)
mock_settings.LOG_AGGREGATOR_LEVEL = 'DEBUG'
BaseHTTPSHandler.perform_test(mock_settings)
if mock_settings.LOG_AGGREGATOR_PROTOCOL.upper() == 'UDP':
UDPHandler.perform_test(mock_settings)
return Response(status=status.HTTP_201_CREATED)
else:
BaseHTTPSHandler.perform_test(mock_settings)
except LoggingConnectivityException as e:
return Response({'error': str(e)}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
return Response(status=status.HTTP_200_OK)

View File

@@ -29,6 +29,8 @@ import threading
import uuid
import memcache
from six.moves import xrange
__all__ = ['event_context']
@@ -123,6 +125,8 @@ class EventContext(object):
event_data['job_id'] = int(os.getenv('JOB_ID', '0'))
if os.getenv('AD_HOC_COMMAND_ID', ''):
event_data['ad_hoc_command_id'] = int(os.getenv('AD_HOC_COMMAND_ID', '0'))
if os.getenv('PROJECT_UPDATE_ID', ''):
event_data['project_update_id'] = int(os.getenv('PROJECT_UPDATE_ID', '0'))
event_data.setdefault('pid', os.getpid())
event_data.setdefault('uuid', str(uuid.uuid4()))
event_data.setdefault('created', datetime.datetime.utcnow().isoformat())
@@ -145,7 +149,7 @@ class EventContext(object):
event_data['res'] = {}
event_dict = dict(event=event, event_data=event_data)
for key in event_data.keys():
if key in ('job_id', 'ad_hoc_command_id', 'uuid', 'parent_uuid', 'created',):
if key in ('job_id', 'ad_hoc_command_id', 'project_update_id', 'uuid', 'parent_uuid', 'created',):
event_dict[key] = event_data.pop(key)
elif key in ('verbosity', 'pid'):
event_dict[key] = event_data[key]

View File

@@ -25,4 +25,5 @@ import ansible
# Because of the way Ansible loads plugins, it's not possible to import
# ansible.plugins.callback.minimal when being loaded as the minimal plugin. Ugh.
execfile(os.path.join(os.path.dirname(ansible.__file__), 'plugins', 'callback', 'minimal.py'))
with open(os.path.join(os.path.dirname(ansible.__file__), 'plugins', 'callback', 'minimal.py')) as in_file:
exec(in_file.read())

View File

@@ -18,7 +18,11 @@
from __future__ import (absolute_import, division, print_function)
# Python
import codecs
import contextlib
import json
import os
import stat
import sys
import uuid
from copy import copy
@@ -292,10 +296,22 @@ class BaseCallbackModule(CallbackBase):
failures=stats.failures,
ok=stats.ok,
processed=stats.processed,
skipped=stats.skipped,
artifact_data=stats.custom.get('_run', {}) if hasattr(stats, 'custom') else {}
skipped=stats.skipped
)
# write custom set_stat artifact data to the local disk so that it can
# be persisted by awx after the process exits
custom_artifact_data = stats.custom.get('_run', {}) if hasattr(stats, 'custom') else {}
if custom_artifact_data:
# create the directory for custom stats artifacts to live in (if it doesn't exist)
custom_artifacts_dir = os.path.join(os.getenv('AWX_PRIVATE_DATA_DIR'), 'artifacts')
os.makedirs(custom_artifacts_dir, mode=stat.S_IXUSR + stat.S_IWUSR + stat.S_IRUSR)
custom_artifacts_path = os.path.join(custom_artifacts_dir, 'custom')
with codecs.open(custom_artifacts_path, 'w', encoding='utf-8') as f:
os.chmod(custom_artifacts_path, stat.S_IRUSR | stat.S_IWUSR)
json.dump(custom_artifact_data, f)
with self.capture_event_data('playbook_on_stats', **event_data):
super(BaseCallbackModule, self).v2_playbook_on_stats(stats)

View File

@@ -7,7 +7,9 @@ from collections import OrderedDict
import json
import mock
import os
import shutil
import sys
import tempfile
import pytest
@@ -259,3 +261,26 @@ def test_callback_plugin_strips_task_environ_variables(executor, cache, playbook
assert len(cache)
for event in cache.values():
assert os.environ['PATH'] not in json.dumps(event)
@pytest.mark.parametrize('playbook', [
{'custom_set_stat.yml': '''
- name: custom set_stat calls should persist to the local disk so awx can save them
connection: local
hosts: all
tasks:
- set_stats:
data:
foo: "bar"
'''}, # noqa
])
def test_callback_plugin_saves_custom_stats(executor, cache, playbook):
try:
private_data_dir = tempfile.mkdtemp()
with mock.patch.dict(os.environ, {'AWX_PRIVATE_DATA_DIR': private_data_dir}):
executor.run()
artifacts_path = os.path.join(private_data_dir, 'artifacts', 'custom')
with open(artifacts_path, 'r') as f:
assert json.load(f) == {'foo': 'bar'}
finally:
shutil.rmtree(os.path.join(private_data_dir))

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@@ -12,6 +12,7 @@ from django.db.models import Q, Prefetch
from django.contrib.auth.models import User
from django.contrib.contenttypes.models import ContentType
from django.utils.translation import ugettext_lazy as _
from django.core.exceptions import ObjectDoesNotExist
# Django REST Framework
from rest_framework.exceptions import ParseError, PermissionDenied, ValidationError
@@ -31,7 +32,7 @@ from awx.conf.license import LicenseForbids, feature_enabled
__all__ = ['get_user_queryset', 'check_user_access', 'check_user_access_with_errors',
'user_accessible_objects', 'consumer_access',
'user_admin_role', 'StateConflict',]
'user_admin_role', 'ActiveJobConflict',]
logger = logging.getLogger('awx.main.access')
@@ -71,9 +72,15 @@ def get_object_from_data(field, Model, data, obj=None):
raise ParseError(_("Bad data found in related field %s." % field))
class StateConflict(ValidationError):
class ActiveJobConflict(ValidationError):
status_code = 409
def __init__(self, active_jobs):
super(ActiveJobConflict, self).__init__({
"conflict": _("Resource is being used by running jobs."),
"active_jobs": active_jobs
})
def register_access(model_class, access_class):
access_registry[model_class] = access_class
@@ -301,7 +308,7 @@ class BaseAccess(object):
if check_expiration and validation_info.get('time_remaining', None) is None:
raise PermissionDenied(_("License is missing."))
if check_expiration and validation_info.get("grace_period_remaining") <= 0:
raise PermissionDenied(_("License has expired."))
logger.error(_("License has expired."))
free_instances = validation_info.get('free_instances', 0)
available_instances = validation_info.get('available_instances', 0)
@@ -309,11 +316,11 @@ class BaseAccess(object):
if add_host_name:
host_exists = Host.objects.filter(name=add_host_name).exists()
if not host_exists and free_instances == 0:
raise PermissionDenied(_("License count of %s instances has been reached.") % available_instances)
logger.error(_("License count of %s instances has been reached.") % available_instances)
elif not host_exists and free_instances < 0:
raise PermissionDenied(_("License count of %s instances has been exceeded.") % available_instances)
logger.error(_("License count of %s instances has been exceeded.") % available_instances)
elif not add_host_name and free_instances < 0:
raise PermissionDenied(_("Host count exceeds available instances."))
raise logger.error(_("Host count exceeds available instances."))
if feature is not None:
if "features" in validation_info and not validation_info["features"].get(feature, False):
@@ -417,6 +424,18 @@ class InstanceAccess(BaseAccess):
return Instance.objects.filter(
rampart_groups__in=self.user.get_queryset(InstanceGroup)).distinct()
def can_attach(self, obj, sub_obj, relationship, data,
skip_sub_obj_read_check=False):
if relationship == 'rampart_groups' and isinstance(sub_obj, InstanceGroup):
return self.user.is_superuser
return super(InstanceAccess, self).can_attach(obj, sub_obj, relationship, *args, **kwargs)
def can_unattach(self, obj, sub_obj, relationship, data=None):
if relationship == 'rampart_groups' and isinstance(sub_obj, InstanceGroup):
return self.user.is_superuser
return super(InstanceAccess, self).can_unattach(obj, sub_obj, relationship, *args, **kwargs)
def can_add(self, data):
return False
@@ -437,13 +456,13 @@ class InstanceGroupAccess(BaseAccess):
organization__in=Organization.accessible_pk_qs(self.user, 'admin_role'))
def can_add(self, data):
return False
return self.user.is_superuser
def can_change(self, obj, data):
return False
return self.user.is_superuser
def can_delete(self, obj):
return False
return self.user.is_superuser
class UserAccess(BaseAccess):
@@ -568,8 +587,7 @@ class OrganizationAccess(BaseAccess):
active_jobs.extend([dict(type="inventory_update", id=o.id)
for o in InventoryUpdate.objects.filter(inventory_source__inventory__organization=obj, status__in=ACTIVE_STATES)])
if len(active_jobs) > 0:
raise StateConflict({"conflict": _("Resource is being used by running jobs"),
"active_jobs": active_jobs})
raise ActiveJobConflict(active_jobs)
return True
def can_attach(self, obj, sub_obj, relationship, *args, **kwargs):
@@ -590,6 +608,7 @@ class InventoryAccess(BaseAccess):
I can see inventory when:
- I'm a superuser.
- I'm an org admin of the inventory's org.
- I'm an inventory admin of the inventory's org.
- I have read, write or admin permissions on it.
I can change inventory when:
- I'm a superuser.
@@ -623,9 +642,9 @@ class InventoryAccess(BaseAccess):
def can_add(self, data):
# If no data is specified, just checking for generic add permission?
if not data:
return Organization.accessible_objects(self.user, 'admin_role').exists()
return Organization.accessible_objects(self.user, 'inventory_admin_role').exists()
return self.check_related('organization', Organization, data)
return self.check_related('organization', Organization, data, role_field='inventory_admin_role')
@check_superuser
def can_change(self, obj, data):
@@ -641,7 +660,7 @@ class InventoryAccess(BaseAccess):
# Verify that the user has access to the new organization if moving an
# inventory to a new organization. Otherwise, just check for admin permission.
return (
self.check_related('organization', Organization, data, obj=obj,
self.check_related('organization', Organization, data, obj=obj, role_field='inventory_admin_role',
mandatory=org_admin_mandatory) and
self.user in obj.admin_role
)
@@ -662,8 +681,7 @@ class InventoryAccess(BaseAccess):
active_jobs.extend([dict(type="ad_hoc_command", id=o.id)
for o in AdHocCommand.objects.filter(inventory=obj, status__in=ACTIVE_STATES)])
if len(active_jobs) > 0:
raise StateConflict({"conflict": _("Resource is being used by running jobs"),
"active_jobs": active_jobs})
raise ActiveJobConflict(active_jobs)
return True
def can_run_ad_hoc_commands(self, obj):
@@ -788,8 +806,7 @@ class GroupAccess(BaseAccess):
active_jobs.extend([dict(type="inventory_update", id=o.id)
for o in InventoryUpdate.objects.filter(inventory_source__in=obj.inventory_sources.all(), status__in=ACTIVE_STATES)])
if len(active_jobs) > 0:
raise StateConflict({"conflict": _("Resource is being used by running jobs"),
"active_jobs": active_jobs})
raise ActiveJobConflict(active_jobs)
return True
def can_start(self, obj, validate_license=True):
@@ -839,8 +856,7 @@ class InventorySourceAccess(BaseAccess):
return False
active_jobs_qs = InventoryUpdate.objects.filter(inventory_source=obj, status__in=ACTIVE_STATES)
if active_jobs_qs.exists():
raise StateConflict({"conflict": _("Resource is being used by running jobs"),
"active_jobs": [dict(type="inventory_update", id=o.id) for o in active_jobs_qs.all()]})
raise ActiveJobConflict([dict(type="inventory_update", id=o.id) for o in active_jobs_qs.all()])
return True
@check_superuser
@@ -930,8 +946,12 @@ class CredentialAccess(BaseAccess):
- I'm a superuser.
- It's a user credential and it's my credential.
- It's a user credential and I'm an admin of an organization where that
user is a member of admin of the organization.
user is a member.
- It's a user credential and I'm a credential_admin of an organization
where that user is a member.
- It's a team credential and I'm an admin of the team's organization.
- It's a team credential and I'm a credential admin of the team's
organization.
- It's a team credential and I'm a member of the team.
I can change/delete when:
- I'm a superuser.
@@ -943,7 +963,8 @@ class CredentialAccess(BaseAccess):
model = Credential
select_related = ('created_by', 'modified_by',)
prefetch_related = ('admin_role', 'use_role', 'read_role',
'admin_role__parents', 'admin_role__members',)
'admin_role__parents', 'admin_role__members',
'credential_type', 'organization')
def filtered_queryset(self):
return self.model.accessible_objects(self.user, 'read_role')
@@ -964,7 +985,8 @@ class CredentialAccess(BaseAccess):
return check_user_access(self.user, Team, 'change', team_obj, None)
if data and data.get('organization', None):
organization_obj = get_object_from_data('organization', Organization, data)
return check_user_access(self.user, Organization, 'change', organization_obj, None)
return any([check_user_access(self.user, Organization, 'change', organization_obj, None),
self.user in organization_obj.credential_admin_role])
return False
@check_superuser
@@ -975,7 +997,7 @@ class CredentialAccess(BaseAccess):
def can_change(self, obj, data):
if not obj:
return False
return self.user in obj.admin_role and self.check_related('organization', Organization, data, obj=obj)
return self.user in obj.admin_role and self.check_related('organization', Organization, data, obj=obj, role_field='credential_admin_role')
def can_delete(self, obj):
# Unassociated credentials may be marked deleted by anyone, though we
@@ -1051,6 +1073,7 @@ class ProjectAccess(BaseAccess):
I can see projects when:
- I am a superuser.
- I am an admin in an organization associated with the project.
- I am a project admin in an organization associated with the project.
- I am a user in an organization associated with the project.
- I am on a team associated with the project.
- I have been explicitly granted permission to run/check jobs using the
@@ -1071,12 +1094,12 @@ class ProjectAccess(BaseAccess):
@check_superuser
def can_add(self, data):
if not data: # So the browseable API will work
return Organization.accessible_objects(self.user, 'admin_role').exists()
return self.check_related('organization', Organization, data, mandatory=True)
return Organization.accessible_objects(self.user, 'project_admin_role').exists()
return self.check_related('organization', Organization, data, role_field='project_admin_role', mandatory=True)
@check_superuser
def can_change(self, obj, data):
if not self.check_related('organization', Organization, data, obj=obj):
if not self.check_related('organization', Organization, data, obj=obj, role_field='project_admin_role'):
return False
return self.user in obj.admin_role
@@ -1090,8 +1113,7 @@ class ProjectAccess(BaseAccess):
active_jobs.extend([dict(type="project_update", id=o.id)
for o in ProjectUpdate.objects.filter(project=obj, status__in=ACTIVE_STATES)])
if len(active_jobs) > 0:
raise StateConflict({"conflict": _("Resource is being used by running jobs"),
"active_jobs": active_jobs})
raise ActiveJobConflict(active_jobs)
return True
@check_superuser
@@ -1124,8 +1146,11 @@ class ProjectUpdateAccess(BaseAccess):
def can_start(self, obj, validate_license=True):
# for relaunching
if obj and obj.project:
return self.user in obj.project.update_role
try:
if obj and obj.project:
return self.user in obj.project.update_role
except ObjectDoesNotExist:
pass
return False
@check_superuser
@@ -1142,7 +1167,11 @@ class JobTemplateAccess(BaseAccess):
model = JobTemplate
select_related = ('created_by', 'modified_by', 'inventory', 'project',
'next_schedule',)
prefetch_related = ('credentials__credential_type',)
prefetch_related = (
'instance_groups',
'credentials__credential_type',
Prefetch('labels', queryset=Label.objects.all().order_by('name')),
)
def filtered_queryset(self):
return self.model.accessible_objects(self.user, 'read_role')
@@ -1152,6 +1181,7 @@ class JobTemplateAccess(BaseAccess):
a user can create a job template if
- they are a superuser
- an org admin of any org that the project is a member
- if they are a project_admin for any org that project is a member of
- if they have user or team
based permissions tying the project to the inventory source for the
given action as well as the 'create' deploy permission.
@@ -1265,8 +1295,7 @@ class JobTemplateAccess(BaseAccess):
active_jobs = [dict(type="job", id=o.id)
for o in obj.jobs.filter(status__in=ACTIVE_STATES)]
if len(active_jobs) > 0:
raise StateConflict({"conflict": _("Resource is being used by running jobs"),
"active_jobs": active_jobs})
raise ActiveJobConflict(active_jobs)
return True
@check_superuser
@@ -1305,7 +1334,7 @@ class JobAccess(BaseAccess):
model = Job
select_related = ('created_by', 'modified_by', 'job_template', 'inventory',
'project', 'job_template',)
'project', 'project_update',)
prefetch_related = (
'unified_job_template',
'instance_group',
@@ -1411,7 +1440,7 @@ class JobAccess(BaseAccess):
elif not jt_access:
return False
org_access = obj.inventory and self.user in obj.inventory.organization.admin_role
org_access = obj.inventory and self.user in obj.inventory.organization.inventory_admin_role
project_access = obj.project is None or self.user in obj.project.admin_role
credential_access = all([self.user in cred.use_role for cred in obj.credentials.all()])
@@ -1704,13 +1733,14 @@ class WorkflowJobTemplateAccess(BaseAccess):
Users who are able to create deploy jobs can also run normal and check (dry run) jobs.
'''
if not data: # So the browseable API will work
return Organization.accessible_objects(self.user, 'admin_role').exists()
return Organization.accessible_objects(self.user, 'workflow_admin_role').exists()
# will check this if surveys are added to WFJT
if 'survey_enabled' in data and data['survey_enabled']:
self.check_license(feature='surveys')
return self.check_related('organization', Organization, data, mandatory=True)
return self.check_related('organization', Organization, data, role_field='workflow_admin_role',
mandatory=True)
def can_copy(self, obj):
if self.save_messages:
@@ -1737,7 +1767,8 @@ class WorkflowJobTemplateAccess(BaseAccess):
if missing_inventories:
self.messages['inventories_unable_to_copy'] = missing_inventories
return self.check_related('organization', Organization, {'reference_obj': obj}, mandatory=True)
return self.check_related('organization', Organization, {'reference_obj': obj}, role_field='workflow_admin_role',
mandatory=True)
def can_start(self, obj, validate_license=True):
if validate_license:
@@ -1762,7 +1793,8 @@ class WorkflowJobTemplateAccess(BaseAccess):
if self.user.is_superuser:
return True
return self.check_related('organization', Organization, data, obj=obj) and self.user in obj.admin_role
return (self.check_related('organization', Organization, data, role_field='workflow_admin_field', obj=obj) and
self.user in obj.admin_role)
def can_delete(self, obj):
is_delete_allowed = self.user.is_superuser or self.user in obj.admin_role
@@ -1771,8 +1803,7 @@ class WorkflowJobTemplateAccess(BaseAccess):
active_jobs = [dict(type="workflow_job", id=o.id)
for o in obj.workflow_jobs.filter(status__in=ACTIVE_STATES)]
if len(active_jobs) > 0:
raise StateConflict({"conflict": _("Resource is being used by running jobs"),
"active_jobs": active_jobs})
raise ActiveJobConflict(active_jobs)
return True
@@ -1804,7 +1835,7 @@ class WorkflowJobAccess(BaseAccess):
def can_delete(self, obj):
return (obj.workflow_job_template and
obj.workflow_job_template.organization and
self.user in obj.workflow_job_template.organization.admin_role)
self.user in obj.workflow_job_template.organization.workflow_admin_role)
def get_method_capability(self, method, obj, parent_obj):
if method == 'start':
@@ -1979,6 +2010,64 @@ class JobEventAccess(BaseAccess):
return False
class ProjectUpdateEventAccess(BaseAccess):
'''
I can see project update event records whenever I can access the project update
'''
model = ProjectUpdateEvent
def filtered_queryset(self):
return self.model.objects.filter(
Q(project_update__in=ProjectUpdate.accessible_pk_qs(self.user, 'read_role')))
def can_add(self, data):
return False
def can_change(self, obj, data):
return False
def can_delete(self, obj):
return False
class InventoryUpdateEventAccess(BaseAccess):
'''
I can see inventory update event records whenever I can access the inventory update
'''
model = InventoryUpdateEvent
def filtered_queryset(self):
return self.model.objects.filter(
Q(inventory_update__in=InventoryUpdate.accessible_pk_qs(self.user, 'read_role')))
def can_add(self, data):
return False
def can_change(self, obj, data):
return False
def can_delete(self, obj):
return False
class SystemJobEventAccess(BaseAccess):
'''
I can only see manage System Jobs events if I'm a super user
'''
model = SystemJobEvent
def can_add(self, data):
return False
def can_change(self, obj, data):
return False
def can_delete(self, obj):
return False
class UnifiedJobTemplateAccess(BaseAccess):
'''
I can see a unified job template whenever I can see the same project,
@@ -2081,13 +2170,9 @@ class ScheduleAccess(BaseAccess):
prefetch_related = ('unified_job_template', 'credentials',)
def filtered_queryset(self):
qs = self.model.objects.all()
unified_pk_qs = UnifiedJobTemplate.accessible_pk_qs(self.user, 'read_role')
inv_src_qs = InventorySource.objects.filter(inventory_id=Inventory._accessible_pk_qs(Inventory, self.user, 'read_role'))
return qs.filter(
Q(unified_job_template_id__in=unified_pk_qs) |
Q(unified_job_template_id__in=inv_src_qs.values_list('pk', flat=True)))
return self.model.objects.filter(
unified_job_template__in=UnifiedJobTemplateAccess(self.user).filtered_queryset()
)
@check_superuser
def can_add(self, data):
@@ -2130,7 +2215,7 @@ class NotificationTemplateAccess(BaseAccess):
def filtered_queryset(self):
return self.model.objects.filter(
Q(organization__in=self.user.admin_of_organizations) |
Q(organization__in=Organization.accessible_objects(self.user, 'notification_admin_role')) |
Q(organization__in=self.user.auditor_of_organizations)
).distinct()
@@ -2138,22 +2223,22 @@ class NotificationTemplateAccess(BaseAccess):
if self.user.is_superuser or self.user.is_system_auditor:
return True
if obj.organization is not None:
if self.user in obj.organization.admin_role or self.user in obj.organization.auditor_role:
if self.user in obj.organization.notification_admin_role or self.user in obj.organization.auditor_role:
return True
return False
@check_superuser
def can_add(self, data):
if not data:
return Organization.accessible_objects(self.user, 'admin_role').exists()
return self.check_related('organization', Organization, data, mandatory=True)
return Organization.accessible_objects(self.user, 'notification_admin_role').exists()
return self.check_related('organization', Organization, data, role_field='notification_admin_role', mandatory=True)
@check_superuser
def can_change(self, obj, data):
if obj.organization is None:
# only superusers are allowed to edit orphan notification templates
return False
return self.check_related('organization', Organization, data, obj=obj, mandatory=True)
return self.check_related('organization', Organization, data, obj=obj, role_field='notification_admin_role', mandatory=True)
def can_admin(self, obj, data):
return self.can_change(obj, data)
@@ -2165,7 +2250,7 @@ class NotificationTemplateAccess(BaseAccess):
def can_start(self, obj, validate_license=True):
if obj.organization is None:
return False
return self.user in obj.organization.admin_role
return self.user in obj.organization.notification_admin_role
class NotificationAccess(BaseAccess):
@@ -2177,7 +2262,7 @@ class NotificationAccess(BaseAccess):
def filtered_queryset(self):
return self.model.objects.filter(
Q(notification_template__organization__in=self.user.admin_of_organizations) |
Q(notification_template__organization__in=Organization.accessible_objects(self.user, 'notification_admin_role')) |
Q(notification_template__organization__in=self.user.auditor_of_organizations)
).distinct()

View File

@@ -5,7 +5,7 @@ import re
from django.utils.translation import ugettext_lazy as _
CLOUD_PROVIDERS = ('azure_rm', 'ec2', 'gce', 'vmware', 'openstack', 'satellite6', 'cloudforms')
CLOUD_PROVIDERS = ('azure_rm', 'ec2', 'gce', 'vmware', 'openstack', 'rhv', 'satellite6', 'cloudforms', 'tower')
SCHEDULEABLE_PROVIDERS = CLOUD_PROVIDERS + ('custom', 'scm',)
PRIVILEGE_ESCALATION_METHODS = [
('sudo', _('Sudo')), ('su', _('Su')), ('pbrun', _('Pbrun')), ('pfexec', _('Pfexec')),

View File

@@ -1,24 +1,36 @@
class AwxTaskError(Exception):
"""Base exception for errors in unified job runs"""
def __init__(self, task, message=None):
# Copyright (c) 2018 Ansible by Red Hat
# All Rights Reserved.
# Celery does not respect exception type when using a serializer different than pickle;
# and awx uses the json serializer
# https://github.com/celery/celery/issues/3586
class _AwxTaskError():
def build_exception(self, task, message=None):
if message is None:
message = "Execution error running {}".format(task.log_format)
super(AwxTaskError, self).__init__(message)
self.task = task
class TaskCancel(AwxTaskError):
"""Canceled flag caused run_pexpect to kill the job run"""
def __init__(self, task, rc):
super(TaskCancel, self).__init__(
task, message="{} was canceled (rc={})".format(task.log_format, rc))
self.rc = rc
e = Exception(message)
e.task = task
e.is_awx_task_error = True
return e
def TaskCancel(self, task, rc):
"""Canceled flag caused run_pexpect to kill the job run"""
message="{} was canceled (rc={})".format(task.log_format, rc)
e = self.build_exception(task, message)
e.rc = rc
e.awx_task_error_type = "TaskCancel"
return e
def TaskError(self, task, rc):
"""Userspace error (non-zero exit code) in run_pexpect subprocess"""
message = "{} encountered an error (rc={}), please see task stdout for details.".format(task.log_format, rc)
e = self.build_exception(task, message)
e.rc = rc
e.awx_task_error_type = "TaskError"
return e
class TaskError(AwxTaskError):
"""Userspace error (non-zero exit code) in run_pexpect subprocess"""
def __init__(self, task, rc):
super(TaskError, self).__init__(
task, message="%s encountered an error (rc=%s), please see task stdout for details.".format(task.log_format, rc))
self.rc = rc
AwxTaskError = _AwxTaskError()

View File

@@ -1,5 +1,4 @@
import base64
import cStringIO
import codecs
import StringIO
import json
@@ -15,7 +14,7 @@ from django.conf import settings
import awx
from awx.main.expect import run
from awx.main.utils import OutputEventFilter
from awx.main.utils import OutputEventFilter, get_system_task_capacity
from awx.main.queue import CallbackQueueDispatcher
logger = logging.getLogger('awx.isolated.manager')
@@ -143,7 +142,7 @@ class IsolatedManager(object):
# if an ssh private key fifo exists, read its contents and delete it
if self.ssh_key_path:
buff = cStringIO.StringIO()
buff = StringIO.StringIO()
with open(self.ssh_key_path, 'r') as fifo:
for line in fifo:
buff.write(line)
@@ -183,7 +182,7 @@ class IsolatedManager(object):
job_timeout=settings.AWX_ISOLATED_LAUNCH_TIMEOUT,
pexpect_timeout=5
)
output = buff.getvalue()
output = buff.getvalue().encode('utf-8')
playbook_logger.info('Isolated job {} dispatch:\n{}'.format(self.instance.id, output))
if status != 'successful':
self.stdout_handle.write(output)
@@ -283,7 +282,7 @@ class IsolatedManager(object):
status = 'failed'
output = ''
rc = None
buff = cStringIO.StringIO()
buff = StringIO.StringIO()
last_check = time.time()
seek = 0
job_timeout = remaining = self.job_timeout
@@ -304,7 +303,7 @@ class IsolatedManager(object):
time.sleep(1)
continue
buff = cStringIO.StringIO()
buff = StringIO.StringIO()
logger.debug('Checking on isolated job {} with `check_isolated.yml`.'.format(self.instance.id))
status, rc = IsolatedManager.run_pexpect(
args, self.awx_playbook_path(), self.management_env, buff,
@@ -314,7 +313,7 @@ class IsolatedManager(object):
pexpect_timeout=5,
proot_cmd=self.proot_cmd
)
output = buff.getvalue()
output = buff.getvalue().encode('utf-8')
playbook_logger.info('Isolated job {} check:\n{}'.format(self.instance.id, output))
path = self.path_to('artifacts', 'stdout')
@@ -356,14 +355,14 @@ class IsolatedManager(object):
}
args = self._build_args('clean_isolated.yml', '%s,' % self.host, extra_vars)
logger.debug('Cleaning up job {} on isolated host with `clean_isolated.yml` playbook.'.format(self.instance.id))
buff = cStringIO.StringIO()
buff = StringIO.StringIO()
timeout = max(60, 2 * settings.AWX_ISOLATED_CONNECTION_TIMEOUT)
status, rc = IsolatedManager.run_pexpect(
args, self.awx_playbook_path(), self.management_env, buff,
idle_timeout=timeout, job_timeout=timeout,
pexpect_timeout=5
)
output = buff.getvalue()
output = buff.getvalue().encode('utf-8')
playbook_logger.info('Isolated job {} cleanup:\n{}'.format(self.instance.id, output))
if status != 'successful':
@@ -382,10 +381,14 @@ class IsolatedManager(object):
logger.error(err_template.format(instance.hostname, instance.version, awx_application_version))
instance.capacity = 0
else:
if instance.capacity == 0 and task_result['capacity']:
if instance.capacity == 0 and task_result['capacity_cpu']:
logger.warning('Isolated instance {} has re-joined.'.format(instance.hostname))
instance.capacity = int(task_result['capacity'])
instance.save(update_fields=['capacity', 'version', 'modified'])
instance.cpu_capacity = int(task_result['capacity_cpu'])
instance.mem_capacity = int(task_result['capacity_mem'])
instance.capacity = get_system_task_capacity(scale=instance.capacity_adjustment,
cpu_capacity=int(task_result['capacity_cpu']),
mem_capacity=int(task_result['capacity_mem']))
instance.save(update_fields=['cpu_capacity', 'mem_capacity', 'capacity', 'version', 'modified'])
@classmethod
def health_check(cls, instance_qs, awx_application_version):
@@ -406,14 +409,14 @@ class IsolatedManager(object):
env = cls._base_management_env()
env['ANSIBLE_STDOUT_CALLBACK'] = 'json'
buff = cStringIO.StringIO()
buff = StringIO.StringIO()
timeout = max(60, 2 * settings.AWX_ISOLATED_CONNECTION_TIMEOUT)
status, rc = IsolatedManager.run_pexpect(
args, cls.awx_playbook_path(), env, buff,
idle_timeout=timeout, job_timeout=timeout,
pexpect_timeout=5
)
output = buff.getvalue()
output = buff.getvalue().encode('utf-8')
buff.close()
try:
@@ -429,7 +432,7 @@ class IsolatedManager(object):
task_result = result['plays'][0]['tasks'][0]['hosts'][instance.hostname]
except (KeyError, IndexError):
task_result = {}
if 'capacity' in task_result:
if 'capacity_cpu' in task_result and 'capacity_mem' in task_result:
cls.update_capacity(instance, task_result, awx_application_version)
elif instance.capacity == 0:
logger.debug('Isolated instance {} previously marked as lost, could not re-join.'.format(
@@ -445,7 +448,7 @@ class IsolatedManager(object):
instance.hostname, instance.modified))
@staticmethod
def wrap_stdout_handle(instance, private_data_dir, stdout_handle, event_data_key='job_id'):
def get_stdout_handle(instance, private_data_dir, event_data_key='job_id'):
dispatcher = CallbackQueueDispatcher()
def job_event_callback(event_data):
@@ -463,7 +466,7 @@ class IsolatedManager(object):
event_data.get('event', ''), event_data['uuid'], instance.id, event_data))
dispatcher.dispatch(event_data)
return OutputEventFilter(stdout_handle, job_event_callback)
return OutputEventFilter(job_event_callback)
def run(self, instance, host, private_data_dir, proot_temp_dir):
"""

View File

@@ -47,7 +47,7 @@ def open_fifo_write(path, data):
This blocks the thread until an external process (such as ssh-agent)
reads data from the pipe.
'''
os.mkfifo(path, 0600)
os.mkfifo(path, 0o600)
thread.start_new_thread(lambda p, d: open(p, 'w').write(d), (path, data))
@@ -99,7 +99,6 @@ def run_pexpect(args, cwd, env, logfile,
password_patterns = expect_passwords.keys()
password_values = expect_passwords.values()
logfile_pos = logfile.tell()
child = pexpect.spawn(
args[0], args[1:], cwd=cwd, env=env, ignore_sighup=True,
encoding='utf-8', echo=False,
@@ -116,8 +115,6 @@ def run_pexpect(args, cwd, env, logfile,
password = password_values[result_id]
if password is not None:
child.sendline(password)
if logfile_pos != logfile.tell():
logfile_pos = logfile.tell()
last_stdout_update = time.time()
if cancelled_callback:
try:

View File

@@ -6,6 +6,7 @@ import copy
import json
import re
import six
import urllib
from jinja2 import Environment, StrictUndefined
from jinja2.exceptions import UndefinedError
@@ -73,7 +74,7 @@ class JSONField(upstream_JSONField):
class JSONBField(upstream_JSONBField):
def get_prep_lookup(self, lookup_type, value):
if isinstance(value, basestring) and value == "null":
if isinstance(value, six.string_types) and value == "null":
return 'null'
return super(JSONBField, self).get_prep_lookup(lookup_type, value)
@@ -352,9 +353,10 @@ class SmartFilterField(models.TextField):
# https://docs.python.org/2/library/stdtypes.html#truth-value-testing
if not value:
return None
value = urllib.unquote(value)
try:
SmartFilter().query_from_string(value)
except RuntimeError, e:
except RuntimeError as e:
raise models.base.ValidationError(e)
return super(SmartFilterField, self).get_prep_value(value)
@@ -504,6 +506,12 @@ class CredentialInputField(JSONSchemaField):
v != '$encrypted$',
model_instance.pk
]):
if not isinstance(getattr(model_instance, k), six.string_types):
raise django_exceptions.ValidationError(
_('secret values must be of type string, not {}').format(type(v).__name__),
code='invalid',
params={'value': v},
)
decrypted_values[k] = utils.decrypt_field(model_instance, k)
else:
decrypted_values[k] = v
@@ -693,11 +701,10 @@ class CredentialTypeInjectorField(JSONSchemaField):
'properties': {
'file': {
'type': 'object',
'properties': {
'template': {'type': 'string'},
'patternProperties': {
'^template(\.[a-zA-Z_]+[a-zA-Z0-9_]*)?$': {'type': 'string'},
},
'additionalProperties': False,
'required': ['template'],
},
'env': {
'type': 'object',
@@ -747,8 +754,22 @@ class CredentialTypeInjectorField(JSONSchemaField):
class TowerNamespace:
filename = None
valid_namespace['tower'] = TowerNamespace()
# ensure either single file or multi-file syntax is used (but not both)
template_names = [x for x in value.get('file', {}).keys() if x.startswith('template')]
if 'template' in template_names and len(template_names) > 1:
raise django_exceptions.ValidationError(
_('Must use multi-file syntax when injecting multiple files'),
code='invalid',
params={'value': value},
)
if 'template' not in template_names:
valid_namespace['tower'].filename = TowerNamespace()
for template_name in template_names:
template_name = template_name.split('.')[1]
setattr(valid_namespace['tower'].filename, template_name, 'EXAMPLE')
for type_, injector in value.items():
for key, tmpl in injector.items():
try:

View File

@@ -5,6 +5,8 @@
import datetime
import logging
import six
# Django
from django.core.management.base import BaseCommand
from django.utils.timezone import now
@@ -41,7 +43,7 @@ class Command(BaseCommand):
n_deleted_items = 0
pks_to_delete = set()
for asobj in ActivityStream.objects.iterator():
asobj_disp = '"%s" id: %s' % (unicode(asobj), asobj.id)
asobj_disp = '"%s" id: %s' % (six.text_type(asobj), asobj.id)
if asobj.timestamp >= self.cutoff:
if self.dry_run:
self.logger.info("would skip %s" % asobj_disp)

View File

@@ -5,6 +5,8 @@
import datetime
import logging
import six
# Django
from django.core.management.base import BaseCommand, CommandError
from django.db import transaction
@@ -66,7 +68,7 @@ class Command(BaseCommand):
jobs = Job.objects.filter(created__lt=self.cutoff)
for job in jobs.iterator():
job_display = '"%s" (%d host summaries, %d events)' % \
(unicode(job),
(six.text_type(job),
job.job_host_summaries.count(), job.job_events.count())
if job.status in ('pending', 'waiting', 'running'):
action_text = 'would skip' if self.dry_run else 'skipping'
@@ -87,7 +89,7 @@ class Command(BaseCommand):
ad_hoc_commands = AdHocCommand.objects.filter(created__lt=self.cutoff)
for ad_hoc_command in ad_hoc_commands.iterator():
ad_hoc_command_display = '"%s" (%d events)' % \
(unicode(ad_hoc_command),
(six.text_type(ad_hoc_command),
ad_hoc_command.ad_hoc_command_events.count())
if ad_hoc_command.status in ('pending', 'waiting', 'running'):
action_text = 'would skip' if self.dry_run else 'skipping'
@@ -107,7 +109,7 @@ class Command(BaseCommand):
skipped, deleted = 0, 0
project_updates = ProjectUpdate.objects.filter(created__lt=self.cutoff)
for pu in project_updates.iterator():
pu_display = '"%s" (type %s)' % (unicode(pu), unicode(pu.launch_type))
pu_display = '"%s" (type %s)' % (six.text_type(pu), six.text_type(pu.launch_type))
if pu.status in ('pending', 'waiting', 'running'):
action_text = 'would skip' if self.dry_run else 'skipping'
self.logger.debug('%s %s project update %s', action_text, pu.status, pu_display)
@@ -130,7 +132,7 @@ class Command(BaseCommand):
skipped, deleted = 0, 0
inventory_updates = InventoryUpdate.objects.filter(created__lt=self.cutoff)
for iu in inventory_updates.iterator():
iu_display = '"%s" (source %s)' % (unicode(iu), unicode(iu.source))
iu_display = '"%s" (source %s)' % (six.text_type(iu), six.text_type(iu.source))
if iu.status in ('pending', 'waiting', 'running'):
action_text = 'would skip' if self.dry_run else 'skipping'
self.logger.debug('%s %s inventory update %s', action_text, iu.status, iu_display)
@@ -153,7 +155,7 @@ class Command(BaseCommand):
skipped, deleted = 0, 0
system_jobs = SystemJob.objects.filter(created__lt=self.cutoff)
for sj in system_jobs.iterator():
sj_display = '"%s" (type %s)' % (unicode(sj), unicode(sj.job_type))
sj_display = '"%s" (type %s)' % (six.text_type(sj), six.text_type(sj.job_type))
if sj.status in ('pending', 'waiting', 'running'):
action_text = 'would skip' if self.dry_run else 'skipping'
self.logger.debug('%s %s system_job %s', action_text, sj.status, sj_display)
@@ -183,7 +185,7 @@ class Command(BaseCommand):
workflow_jobs = WorkflowJob.objects.filter(created__lt=self.cutoff)
for workflow_job in workflow_jobs.iterator():
workflow_job_display = '"{}" ({} nodes)'.format(
unicode(workflow_job),
six.text_type(workflow_job),
workflow_job.workflow_nodes.count())
if workflow_job.status in ('pending', 'waiting', 'running'):
action_text = 'would skip' if self.dry_run else 'skipping'
@@ -204,7 +206,7 @@ class Command(BaseCommand):
notifications = Notification.objects.filter(created__lt=self.cutoff)
for notification in notifications.iterator():
notification_display = '"{}" (started {}, {} type, {} sent)'.format(
unicode(notification), unicode(notification.created),
six.text_type(notification), six.text_type(notification.created),
notification.notification_type, notification.notifications_sent)
if notification.status in ('pending',):
action_text = 'would skip' if self.dry_run else 'skipping'
@@ -246,4 +248,3 @@ class Command(BaseCommand):
self.logger.log(99, '%s: %d would be deleted, %d would be skipped.', m.replace('_', ' '), deleted, skipped)
else:
self.logger.log(99, '%s: %d deleted, %d skipped.', m.replace('_', ' '), deleted, skipped)

View File

@@ -17,7 +17,7 @@ class Command(BaseCommand):
def handle(self, *args, **kwargs):
if getattr(settings, 'AWX_ISOLATED_PRIVATE_KEY', False):
print settings.AWX_ISOLATED_PUBLIC_KEY
print(settings.AWX_ISOLATED_PUBLIC_KEY)
return
key = rsa.generate_private_key(
@@ -41,4 +41,4 @@ class Command(BaseCommand):
) + " generated-by-awx@%s" % datetime.datetime.utcnow().isoformat()
)
pemfile.save()
print pemfile.value
print(pemfile.value)

View File

@@ -173,6 +173,7 @@ class AnsibleInventoryLoader(object):
def load(self):
base_args = self.get_base_args()
logger.info('Reading Ansible inventory source: %s', self.source)
data = self.command_to_json(base_args + ['--list'])
# TODO: remove after we run custom scripts through ansible-inventory
@@ -225,6 +226,7 @@ def load_inventory_source(source, group_filter_re=None,
'''
# Sanity check: We sanitize these module names for our API but Ansible proper doesn't follow
# good naming conventions
source = source.replace('rhv.py', 'ovirt4.py')
source = source.replace('satellite6.py', 'foreman.py')
source = source.replace('vmware.py', 'vmware_inventory.py')
if not os.path.exists(source):
@@ -600,27 +602,20 @@ class Command(BaseCommand):
def _update_inventory(self):
'''
Update/overwrite variables from "all" group. If importing from a
cloud source attached to a specific group, variables will be set on
the base group, otherwise they will be set on the whole inventory.
Update inventory variables from "all" group.
'''
# FIXME: figure out how "all" variables are handled in the new inventory source system
# TODO: We disable variable overwrite here in case user-defined inventory variables get
# mangled. But we still need to figure out a better way of processing multiple inventory
# update variables mixing with each other.
all_obj = self.inventory
all_name = 'inventory'
db_variables = all_obj.variables_dict
if self.overwrite_vars:
db_variables = self.all_group.variables
else:
db_variables.update(self.all_group.variables)
db_variables.update(self.all_group.variables)
if db_variables != all_obj.variables_dict:
all_obj.variables = json.dumps(db_variables)
all_obj.save(update_fields=['variables'])
if self.overwrite_vars:
logger.info('%s variables replaced from "all" group', all_name.capitalize())
else:
logger.info('%s variables updated from "all" group', all_name.capitalize())
logger.info('Inventory variables updated from "all" group')
else:
logger.info('%s variables unmodified', all_name.capitalize())
logger.info('Inventory variables unmodified')
def _create_update_groups(self):
'''
@@ -909,7 +904,6 @@ class Command(BaseCommand):
new_count = Host.objects.active_count()
if time_remaining <= 0 and not license_info.get('demo', False):
logger.error(LICENSE_EXPIRED_MESSAGE)
raise CommandError("License has expired!")
if free_instances < 0:
d = {
'new_count': new_count,
@@ -919,7 +913,6 @@ class Command(BaseCommand):
logger.error(DEMO_LICENSE_MESSAGE % d)
else:
logger.error(LICENSE_MESSAGE % d)
raise CommandError('License count exceeded!')
def mark_license_failure(self, save=True):
self.inventory_update.license_error = True

View File

@@ -17,6 +17,10 @@ class Command(BaseCommand):
help='Comma-Delimited Hosts to add to the Queue')
parser.add_argument('--controller', dest='controller', type=str,
default='', help='The controlling group (makes this an isolated group)')
parser.add_argument('--instance_percent', dest='instance_percent', type=int, default=0,
help='The percentage of active instances that will be assigned to this group'),
parser.add_argument('--instance_minimum', dest='instance_minimum', type=int, default=0,
help='The minimum number of instance that will be retained for this group from available instances')
def handle(self, **options):
queuename = options.get('queuename')
@@ -38,7 +42,9 @@ class Command(BaseCommand):
changed = True
else:
print("Creating instance group {}".format(queuename))
ig = InstanceGroup(name=queuename)
ig = InstanceGroup(name=queuename,
policy_instance_percentage=options.get('instance_percent'),
policy_instance_minimum=options.get('instance_minimum'))
if control_ig:
ig.controller = control_ig
ig.save()
@@ -60,5 +66,7 @@ class Command(BaseCommand):
sys.exit(1)
else:
print("Instance already registered {}".format(instance[0].hostname))
ig.policy_instance_list = instance_list
ig.save()
if changed:
print('(changed: True)')

View File

@@ -12,11 +12,17 @@ from awx.main.models import (
UnifiedJob,
Job,
AdHocCommand,
ProjectUpdate,
InventoryUpdate,
SystemJob
)
from awx.main.consumers import emit_channel_notification
from awx.api.serializers import (
JobEventWebSocketSerializer,
AdHocCommandEventWebSocketSerializer,
ProjectUpdateEventWebSocketSerializer,
InventoryUpdateEventWebSocketSerializer,
SystemJobEventWebSocketSerializer
)
@@ -60,7 +66,16 @@ class ReplayJobEvents():
return self.replay_elapsed().total_seconds() - (self.recording_elapsed(created).total_seconds() * (1.0 / speed))
def get_job_events(self, job):
job_events = job.job_events.order_by('created')
if type(job) is Job:
job_events = job.job_events.order_by('created')
elif type(job) is AdHocCommand:
job_events = job.ad_hoc_command_events.order_by('created')
elif type(job) is ProjectUpdate:
job_events = job.project_update_events.order_by('created')
elif type(job) is InventoryUpdate:
job_events = job.inventory_update_events.order_by('created')
elif type(job) is SystemJob:
job_events = job.system_job_events.order_by('created')
if job_events.count() == 0:
raise RuntimeError("No events for job id {}".format(job.id))
return job_events
@@ -70,6 +85,12 @@ class ReplayJobEvents():
return JobEventWebSocketSerializer
elif type(job) is AdHocCommand:
return AdHocCommandEventWebSocketSerializer
elif type(job) is ProjectUpdate:
return ProjectUpdateEventWebSocketSerializer
elif type(job) is InventoryUpdate:
return InventoryUpdateEventWebSocketSerializer
elif type(job) is SystemJob:
return SystemJobEventWebSocketSerializer
else:
raise RuntimeError("Job is of type {} and replay is not yet supported.".format(type(job)))
sys.exit(1)

View File

@@ -3,13 +3,14 @@
# Python
import logging
import os
import signal
import time
from uuid import UUID
from multiprocessing import Process
from multiprocessing import Queue as MPQueue
from Queue import Empty as QueueEmpty
from Queue import Full as QueueFull
import os
from kombu import Connection, Exchange, Queue
from kombu.mixins import ConsumerMixin
@@ -18,11 +19,13 @@ from kombu.mixins import ConsumerMixin
from django.conf import settings
from django.core.management.base import BaseCommand
from django.db import connection as django_connection
from django.db import DatabaseError
from django.db import DatabaseError, OperationalError
from django.db.utils import InterfaceError, InternalError
from django.core.cache import cache as django_cache
# AWX
from awx.main.models import * # noqa
from awx.main.consumers import emit_channel_notification
logger = logging.getLogger('awx.main.commands.run_callback_receiver')
@@ -39,6 +42,9 @@ class WorkerSignalHandler:
class CallbackBrokerWorker(ConsumerMixin):
MAX_RETRIES = 2
def __init__(self, connection, use_workers=True):
self.connection = connection
self.worker_queues = []
@@ -123,8 +129,17 @@ class CallbackBrokerWorker(ConsumerMixin):
logger.error("Exception on worker thread, restarting: " + str(e))
continue
try:
if 'job_id' not in body and 'ad_hoc_command_id' not in body:
raise Exception('Payload does not have a job_id or ad_hoc_command_id')
event_map = {
'job_id': JobEvent,
'ad_hoc_command_id': AdHocCommandEvent,
'project_update_id': ProjectUpdateEvent,
'inventory_update_id': InventoryUpdateEvent,
'system_job_id': SystemJobEvent,
}
if not any([key in body for key in event_map]):
raise Exception('Payload does not have a job identifier')
if settings.DEBUG:
from pygments import highlight
from pygments.lexers import PythonLexer
@@ -132,14 +147,51 @@ class CallbackBrokerWorker(ConsumerMixin):
from pprint import pformat
logger.info('Body: {}'.format(
highlight(pformat(body, width=160), PythonLexer(), Terminal256Formatter(style='friendly'))
))
try:
if 'job_id' in body:
JobEvent.create_from_data(**body)
elif 'ad_hoc_command_id' in body:
AdHocCommandEvent.create_from_data(**body)
except DatabaseError as e:
logger.error('Database Error Saving Job Event: {}'.format(e))
)[:1024 * 4])
def _save_event_data():
for key, cls in event_map.items():
if key in body:
cls.create_from_data(**body)
job_identifier = 'unknown job'
for key in event_map.keys():
if key in body:
job_identifier = body[key]
break
if body.get('event') == 'EOF':
# EOF events are sent when stdout for the running task is
# closed. don't actually persist them to the database; we
# just use them to report `summary` websocket events as an
# approximation for when a job is "done"
emit_channel_notification(
'jobs-summary',
dict(group_name='jobs', unified_job_id=job_identifier)
)
continue
retries = 0
while retries <= self.MAX_RETRIES:
try:
_save_event_data()
break
except (OperationalError, InterfaceError, InternalError) as e:
if retries >= self.MAX_RETRIES:
logger.exception('Worker could not re-establish database connectivity, shutting down gracefully: Job {}'.format(job_identifier))
os.kill(os.getppid(), signal.SIGINT)
return
delay = 60 * retries
logger.exception('Database Error Saving Job Event, retry #{i} in {delay} seconds:'.format(
i=retries + 1,
delay=delay
))
django_connection.close()
time.sleep(delay)
retries += 1
except DatabaseError as e:
logger.exception('Database Error Saving Job Event for Job {}'.format(job_identifier))
break
except Exception as exc:
import traceback
tb = traceback.format_exc()

View File

@@ -0,0 +1,49 @@
import os
import shutil
import subprocess
import sys
import tempfile
from optparse import make_option
from django.conf import settings
from django.core.management.base import BaseCommand, CommandError
from awx.main.expect import run
class Command(BaseCommand):
"""Tests SSH connectivity between a controller and target isolated node"""
help = 'Tests SSH connectivity between a controller and target isolated node'
option_list = BaseCommand.option_list + (
make_option('--hostname', dest='hostname', type='string',
help='Hostname of an isolated node'),
)
def handle(self, *args, **options):
hostname = options.get('hostname')
if not hostname:
raise CommandError("--hostname is a required argument")
try:
path = tempfile.mkdtemp(prefix='awx_isolated_ssh', dir=settings.AWX_PROOT_BASE_PATH)
args = [
'ansible', 'all', '-i', '{},'.format(hostname), '-u',
settings.AWX_ISOLATED_USERNAME, '-T5', '-m', 'shell',
'-a', 'hostname', '-vvv'
]
if all([
getattr(settings, 'AWX_ISOLATED_KEY_GENERATION', False) is True,
getattr(settings, 'AWX_ISOLATED_PRIVATE_KEY', None)
]):
ssh_key_path = os.path.join(path, '.isolated')
ssh_auth_sock = os.path.join(path, 'ssh_auth.sock')
run.open_fifo_write(ssh_key_path, settings.AWX_ISOLATED_PRIVATE_KEY)
args = run.wrap_args_with_ssh_agent(args, ssh_key_path, ssh_auth_sock)
try:
print(' '.join(args))
subprocess.check_call(args)
except subprocess.CalledProcessError as e:
sys.exit(e.returncode)
finally:
shutil.rmtree(path)

View File

@@ -2,12 +2,9 @@
# All Rights Reserved.
import sys
from datetime import timedelta
import logging
from django.db import models
from django.utils.timezone import now
from django.db.models import Sum
from django.conf import settings
from awx.main.utils.filters import SmartFilter
@@ -21,11 +18,15 @@ class HostManager(models.Manager):
"""Custom manager class for Hosts model."""
def active_count(self):
"""Return count of active, unique hosts for licensing."""
try:
return self.order_by('name').distinct('name').count()
except NotImplementedError: # For unit tests only, SQLite doesn't support distinct('name')
return len(set(self.values_list('name', flat=True)))
"""Return count of active, unique hosts for licensing.
Construction of query involves:
- remove any ordering specified in model's Meta
- Exclude hosts sourced from another Tower
- Restrict the query to only return the name column
- Only consider results that are unique
- Return the count of this query
"""
return self.order_by().exclude(inventory_sources__source='tower').values('name').distinct().count()
def get_queryset(self):
"""When the parent instance of the host query set has a `kind=smart` and a `host_filter`
@@ -89,11 +90,6 @@ class InstanceManager(models.Manager):
"""Return count of active Tower nodes for licensing."""
return self.all().count()
def total_capacity(self):
sumval = self.filter(modified__gte=now() - timedelta(seconds=settings.AWX_ACTIVE_NODE_TIME)) \
.aggregate(total_capacity=Sum('capacity'))['total_capacity']
return max(50, sumval)
def my_role(self):
# NOTE: TODO: Likely to repurpose this once standalone ramparts are a thing
return "tower"

View File

@@ -5,6 +5,10 @@ import logging
import threading
import uuid
import six
import time
import cProfile
import pstats
import os
from django.conf import settings
from django.contrib.auth.models import User
@@ -25,6 +29,40 @@ from awx.conf import fields, register
logger = logging.getLogger('awx.main.middleware')
analytics_logger = logging.getLogger('awx.analytics.activity_stream')
perf_logger = logging.getLogger('awx.analytics.performance')
class TimingMiddleware(threading.local):
dest = '/var/lib/awx/profile'
def process_request(self, request):
self.start_time = time.time()
if settings.AWX_REQUEST_PROFILE:
self.prof = cProfile.Profile()
self.prof.enable()
def process_response(self, request, response):
if not hasattr(self, 'start_time'): # some tools may not invoke process_request
return response
total_time = time.time() - self.start_time
response['X-API-Total-Time'] = '%0.3fs' % total_time
if settings.AWX_REQUEST_PROFILE:
self.prof.disable()
cprofile_file = self.save_profile_file(request)
response['cprofile_file'] = cprofile_file
perf_logger.info('api response times', extra=dict(python_objects=dict(request=request, response=response)))
return response
def save_profile_file(self, request):
if not os.path.isdir(self.dest):
os.makedirs(self.dest)
filename = '%.3fs-%s' % (pstats.Stats(self.prof).total_tt, uuid.uuid4())
filepath = os.path.join(self.dest, filename)
with open(filepath, 'w') as f:
f.write('%s %s\n' % (request.method, request.get_full_path()))
pstats.Stats(self.prof, stream=f).sort_stats('cumulative').print_stats()
return filepath
class ActivityStreamMiddleware(threading.local):

View File

@@ -8,14 +8,9 @@ from __future__ import unicode_literals
from django.db import migrations, models
from django.conf import settings
import awx.main.fields
import jsonfield.fields
def update_dashed_host_variables(apps, schema_editor):
Host = apps.get_model('main', 'Host')
for host in Host.objects.filter(variables='---'):
host.variables = ''
host.save()
import _squashed
from _squashed_30 import SQUASHED_30
class Migration(migrations.Migration):
@@ -27,13 +22,7 @@ class Migration(migrations.Migration):
(b'main', '0025_v300_update_rbac_parents'),
(b'main', '0026_v300_credential_unique'),
(b'main', '0027_v300_team_migrations'),
(b'main', '0028_v300_org_team_cascade'),
(b'main', '0029_v302_add_ask_skip_tags'),
(b'main', '0030_v302_job_survey_passwords'),
(b'main', '0031_v302_migrate_survey_passwords'),
(b'main', '0032_v302_credential_permissions_update'),
(b'main', '0033_v303_v245_host_variable_fix'),]
(b'main', '0028_v300_org_team_cascade')] + _squashed.replaces(SQUASHED_30, applied=True)
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
@@ -130,27 +119,4 @@ class Migration(migrations.Migration):
field=models.ForeignKey(related_name='teams', to='main.Organization'),
preserve_default=False,
),
# add ask skip tags
migrations.AddField(
model_name='jobtemplate',
name='ask_skip_tags_on_launch',
field=models.BooleanField(default=False),
),
# job survery passwords
migrations.AddField(
model_name='job',
name='survey_passwords',
field=jsonfield.fields.JSONField(default={}, editable=False, blank=True),
),
# RBAC credential permission updates
migrations.AlterField(
model_name='credential',
name='admin_role',
field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=[b'singleton:system_administrator', b'organization.admin_role'], to='main.Role', null=b'True'),
),
migrations.AlterField(
model_name='credential',
name='use_role',
field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=[b'admin_role'], to='main.Role', null=b'True'),
),
]
] + _squashed.operations(SQUASHED_30, applied=True)

View File

@@ -8,6 +8,9 @@ import django.db.models.deletion
import awx.main.models.workflow
import awx.main.fields
import _squashed
from _squashed_30 import SQUASHED_30
class Migration(migrations.Migration):
@@ -15,11 +18,11 @@ class Migration(migrations.Migration):
('main', '0003_squashed_v300_v303_updates'),
]
replaces = [
replaces = _squashed.replaces(SQUASHED_30) + [
(b'main', '0034_v310_release'),
]
operations = [
operations = _squashed.operations(SQUASHED_30) + [
# Create ChannelGroup table
migrations.CreateModel(
name='ChannelGroup',

View File

@@ -1,7 +1,9 @@
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations
from django.db import migrations, models
import _squashed
from _squashed_31 import SQUASHED_31
class Migration(migrations.Migration):
@@ -10,28 +12,5 @@ class Migration(migrations.Migration):
('main', '0004_squashed_v310_release'),
]
replaces = [
(b'main', '0035_v310_remove_tower_settings'),
]
operations = [
# Remove Tower settings, these settings are now in separate awx.conf app.
migrations.RemoveField(
model_name='towersettings',
name='user',
),
migrations.DeleteModel(
name='TowerSettings',
),
migrations.AlterField(
model_name='project',
name='scm_type',
field=models.CharField(default=b'', choices=[(b'', 'Manual'), (b'git', 'Git'), (b'hg', 'Mercurial'), (b'svn', 'Subversion'), (b'insights', 'Red Hat Insights')], max_length=8, blank=True, help_text='Specifies the source control system used to store the project.', verbose_name='SCM Type'),
),
migrations.AlterField(
model_name='projectupdate',
name='scm_type',
field=models.CharField(default=b'', choices=[(b'', 'Manual'), (b'git', 'Git'), (b'hg', 'Mercurial'), (b'svn', 'Subversion'), (b'insights', 'Red Hat Insights')], max_length=8, blank=True, help_text='Specifies the source control system used to store the project.', verbose_name='SCM Type'),
),
]
replaces = _squashed.replaces(SQUASHED_31)
operations = _squashed.operations(SQUASHED_31)

Some files were not shown because too many files have changed in this diff Show More