mirror of
https://github.com/ansible/awx.git
synced 2026-02-06 20:14:44 -03:30
Compare commits
29 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
b75f8ceca6 | ||
|
|
bfc74497b0 | ||
|
|
c8c982428d | ||
|
|
e6dbf71252 | ||
|
|
3701567ad7 | ||
|
|
86140dec08 | ||
|
|
50fe0392ed | ||
|
|
f18c965a8a | ||
|
|
f874e55051 | ||
|
|
1dcd2b1883 | ||
|
|
7684579464 | ||
|
|
16e89ed081 | ||
|
|
62e3b9e3b6 | ||
|
|
dc3f81920e | ||
|
|
8a66213dbe | ||
|
|
23d4122574 | ||
|
|
5900af726b | ||
|
|
9fc4c03e5b | ||
|
|
0bb1b0ed45 | ||
|
|
3b11219fff | ||
|
|
1b4c3f56fa | ||
|
|
6c5334c7d3 | ||
|
|
1371e394de | ||
|
|
ec67feef2f | ||
|
|
89e656b2a4 | ||
|
|
5910b8c562 | ||
|
|
aa717a2728 | ||
|
|
42f01b7f05 | ||
|
|
6cf1fb3c10 |
1
.gitignore
vendored
1
.gitignore
vendored
@@ -112,7 +112,6 @@ local/
|
||||
*.mo
|
||||
requirements/vendor
|
||||
.i18n_built
|
||||
VERSION
|
||||
.idea/*
|
||||
|
||||
# AWX python libs populated by requirements.txt
|
||||
|
||||
@@ -2,11 +2,11 @@
|
||||
|
||||
Hi there! We're excited to have you as a contributor.
|
||||
|
||||
Have questions about this document or anything not covered here? Come chat with us at `#ansible-awx` on irc.freenode.net, or submit your question to the [mailing list](https://groups.google.com/forum/#!forum/awx-project) .
|
||||
Have questions about this document or anything not covered here? Come chat with us at `#ansible-awx` on irc.freenode.net, or submit your question to the [mailing list](https://groups.google.com/forum/#!forum/awx-project).
|
||||
|
||||
## Table of contents
|
||||
|
||||
* [Things to know prior to submitting code](#things-to-know-prior-to-contributing-code)
|
||||
* [Things to know prior to submitting code](#things-to-know-prior-to-submitting-code)
|
||||
* [Setting up your development environment](#setting-up-your-development-environment)
|
||||
* [Prerequisites](#prerequisites)
|
||||
* [Docker](#docker)
|
||||
@@ -17,14 +17,14 @@ Have questions about this document or anything not covered here? Come chat with
|
||||
* [Create local settings](#create-local-settings)
|
||||
* [Build the base image](#build-the-base-image)
|
||||
* [Build the user interface](#build-the-user-interface)
|
||||
# [Running the environment](#running-the-environment)
|
||||
* [Running the environment](#running-the-environment)
|
||||
* [Start the containers](#start-the-containers)
|
||||
* [Start from the container shell](#start-from-the-container-shell)
|
||||
* [Post Build Steps](#post-build-steps)
|
||||
* [Start a shell](#start-the-shell)
|
||||
* [Create a superuser](#create-a-superuser)
|
||||
* [Load the data](#load-the-data)
|
||||
* [Building API Documentation](#build-documentation)
|
||||
* [Start a shell](#start-a-shell)
|
||||
* [Create a superuser](#create-a-superuser)
|
||||
* [Load the data](#load-the-data)
|
||||
* [Building API Documentation](#build-api-documentation)
|
||||
* [Accessing the AWX web interface](#accessing-the-awx-web-interface)
|
||||
* [Purging containers and images](#purging-containers-and-images)
|
||||
* [What should I work on?](#what-should-i-work-on)
|
||||
|
||||
@@ -62,8 +62,8 @@ Before you can run a deployment, you'll need the following installed in your loc
|
||||
- [docker-py](https://github.com/docker/docker-py) Python module
|
||||
- [GNU Make](https://www.gnu.org/software/make/)
|
||||
- [Git](https://git-scm.com/) Requires Version 1.8.4+
|
||||
- [Node 6.x LTS version](https://nodejs.org/en/download/)
|
||||
- [NPM 3.x LTS](https://docs.npmjs.com/)
|
||||
- [Node 8.x LTS version](https://nodejs.org/en/download/)
|
||||
- [NPM 6.x LTS](https://docs.npmjs.com/)
|
||||
|
||||
### System Requirements
|
||||
|
||||
|
||||
28
Makefile
28
Makefile
@@ -12,10 +12,7 @@ MANAGEMENT_COMMAND ?= awx-manage
|
||||
IMAGE_REPOSITORY_AUTH ?=
|
||||
IMAGE_REPOSITORY_BASE ?= https://gcr.io
|
||||
|
||||
VERSION=$(shell git describe --long --first-parent)
|
||||
VERSION3=$(shell git describe --long --first-parent | sed 's/\-g.*//')
|
||||
VERSION3DOT=$(shell git describe --long --first-parent | sed 's/\-g.*//' | sed 's/\-/\./')
|
||||
RELEASE_VERSION=$(shell git describe --long --first-parent | sed 's@\([0-9.]\{1,\}\).*@\1@')
|
||||
VERSION := $(shell cat VERSION)
|
||||
|
||||
# NOTE: This defaults the container image version to the branch that's active
|
||||
COMPOSE_TAG ?= $(GIT_BRANCH)
|
||||
@@ -30,8 +27,6 @@ DEV_DOCKER_TAG_BASE ?= gcr.io/ansible-tower-engineering
|
||||
# Comma separated list
|
||||
SRC_ONLY_PKGS ?= cffi,pycparser,psycopg2,twilio
|
||||
|
||||
CURWD = $(shell pwd)
|
||||
|
||||
# Determine appropriate shasum command
|
||||
UNAME_S := $(shell uname -s)
|
||||
ifeq ($(UNAME_S),Linux)
|
||||
@@ -48,20 +43,9 @@ DATE := $(shell date -u +%Y%m%d%H%M)
|
||||
NAME ?= awx
|
||||
GIT_REMOTE_URL = $(shell git config --get remote.origin.url)
|
||||
|
||||
ifeq ($(OFFICIAL),yes)
|
||||
VERSION_TARGET ?= $(RELEASE_VERSION)
|
||||
else
|
||||
VERSION_TARGET ?= $(VERSION3DOT)
|
||||
endif
|
||||
|
||||
# TAR build parameters
|
||||
ifeq ($(OFFICIAL),yes)
|
||||
SDIST_TAR_NAME=$(NAME)-$(RELEASE_VERSION)
|
||||
WHEEL_NAME=$(NAME)-$(RELEASE_VERSION)
|
||||
else
|
||||
SDIST_TAR_NAME=$(NAME)-$(VERSION3DOT)
|
||||
WHEEL_NAME=$(NAME)-$(VERSION3DOT)
|
||||
endif
|
||||
SDIST_TAR_NAME=$(NAME)-$(VERSION)
|
||||
WHEEL_NAME=$(NAME)-$(VERSION)
|
||||
|
||||
SDIST_COMMAND ?= sdist
|
||||
WHEEL_COMMAND ?= bdist_wheel
|
||||
@@ -112,7 +96,6 @@ clean: clean-ui clean-dist
|
||||
rm -rf requirements/vendor
|
||||
rm -rf tmp
|
||||
rm -rf $(I18N_FLAG_FILE)
|
||||
rm -f VERSION
|
||||
mkdir tmp
|
||||
rm -rf build $(NAME)-$(VERSION) *.egg-info
|
||||
find . -type f -regex ".*\.py[co]$$" -delete
|
||||
@@ -611,7 +594,7 @@ docker-compose-cluster-elk: docker-auth
|
||||
TAG=$(COMPOSE_TAG) DEV_DOCKER_TAG_BASE=$(DEV_DOCKER_TAG_BASE) docker-compose -f tools/docker-compose-cluster.yml -f tools/elastic/docker-compose.logstash-link-cluster.yml -f tools/elastic/docker-compose.elastic-override.yml up --no-recreate
|
||||
|
||||
minishift-dev:
|
||||
ansible-playbook -i localhost, -e devtree_directory=$(CURWD) tools/clusterdevel/start_minishift_dev.yml
|
||||
ansible-playbook -i localhost, -e devtree_directory=$(CURDIR) tools/clusterdevel/start_minishift_dev.yml
|
||||
|
||||
|
||||
clean-elk:
|
||||
@@ -626,5 +609,4 @@ psql-container:
|
||||
docker run -it --net tools_default --rm postgres:9.6 sh -c 'exec psql -h "postgres" -p "5432" -U postgres'
|
||||
|
||||
VERSION:
|
||||
@echo $(VERSION_TARGET) > $@
|
||||
@echo "awx: $(VERSION_TARGET)"
|
||||
@echo "awx: $(VERSION)"
|
||||
|
||||
@@ -18,7 +18,7 @@ import six
|
||||
# Django
|
||||
from django.conf import settings
|
||||
from django.core.exceptions import FieldError, ObjectDoesNotExist
|
||||
from django.db.models import Q, Count, F
|
||||
from django.db.models import Q, Count
|
||||
from django.db import IntegrityError, transaction
|
||||
from django.shortcuts import get_object_or_404
|
||||
from django.utils.encoding import smart_text
|
||||
@@ -92,7 +92,15 @@ from awx.api.serializers import * # noqa
|
||||
from awx.api.metadata import RoleMetadata, JobTypeMetadata
|
||||
from awx.main.constants import ACTIVE_STATES
|
||||
from awx.main.scheduler.tasks import run_job_complete
|
||||
from awx.api.exceptions import ActiveJobConflict
|
||||
from awx.api.views.mixin import (
|
||||
ActivityStreamEnforcementMixin,
|
||||
SystemTrackingEnforcementMixin,
|
||||
WorkflowsEnforcementMixin,
|
||||
UnifiedJobDeletionMixin,
|
||||
InstanceGroupMembershipMixin,
|
||||
RelatedJobsPreventDeleteMixin,
|
||||
OrganizationCountsMixin,
|
||||
)
|
||||
|
||||
logger = logging.getLogger('awx.api.views')
|
||||
|
||||
@@ -110,157 +118,6 @@ def api_exception_handler(exc, context):
|
||||
return exception_handler(exc, context)
|
||||
|
||||
|
||||
class ActivityStreamEnforcementMixin(object):
|
||||
'''
|
||||
Mixin to check that license supports activity streams.
|
||||
'''
|
||||
def check_permissions(self, request):
|
||||
ret = super(ActivityStreamEnforcementMixin, self).check_permissions(request)
|
||||
if not feature_enabled('activity_streams'):
|
||||
raise LicenseForbids(_('Your license does not allow use of the activity stream.'))
|
||||
return ret
|
||||
|
||||
|
||||
class SystemTrackingEnforcementMixin(object):
|
||||
'''
|
||||
Mixin to check that license supports system tracking.
|
||||
'''
|
||||
def check_permissions(self, request):
|
||||
ret = super(SystemTrackingEnforcementMixin, self).check_permissions(request)
|
||||
if not feature_enabled('system_tracking'):
|
||||
raise LicenseForbids(_('Your license does not permit use of system tracking.'))
|
||||
return ret
|
||||
|
||||
|
||||
class WorkflowsEnforcementMixin(object):
|
||||
'''
|
||||
Mixin to check that license supports workflows.
|
||||
'''
|
||||
def check_permissions(self, request):
|
||||
ret = super(WorkflowsEnforcementMixin, self).check_permissions(request)
|
||||
if not feature_enabled('workflows') and request.method not in ('GET', 'OPTIONS', 'DELETE'):
|
||||
raise LicenseForbids(_('Your license does not allow use of workflows.'))
|
||||
return ret
|
||||
|
||||
|
||||
class UnifiedJobDeletionMixin(object):
|
||||
'''
|
||||
Special handling when deleting a running unified job object.
|
||||
'''
|
||||
def destroy(self, request, *args, **kwargs):
|
||||
obj = self.get_object()
|
||||
if not request.user.can_access(self.model, 'delete', obj):
|
||||
raise PermissionDenied()
|
||||
try:
|
||||
if obj.unified_job_node.workflow_job.status in ACTIVE_STATES:
|
||||
raise PermissionDenied(detail=_('Cannot delete job resource when associated workflow job is running.'))
|
||||
except self.model.unified_job_node.RelatedObjectDoesNotExist:
|
||||
pass
|
||||
# Still allow deletion of new status, because these can be manually created
|
||||
if obj.status in ACTIVE_STATES and obj.status != 'new':
|
||||
raise PermissionDenied(detail=_("Cannot delete running job resource."))
|
||||
elif not obj.event_processing_finished:
|
||||
# Prohibit deletion if job events are still coming in
|
||||
if obj.finished and now() < obj.finished + dateutil.relativedelta.relativedelta(minutes=1):
|
||||
# less than 1 minute has passed since job finished and events are not in
|
||||
return Response({"error": _("Job has not finished processing events.")},
|
||||
status=status.HTTP_400_BAD_REQUEST)
|
||||
else:
|
||||
# if it has been > 1 minute, events are probably lost
|
||||
logger.warning('Allowing deletion of {} through the API without all events '
|
||||
'processed.'.format(obj.log_format))
|
||||
obj.delete()
|
||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||
|
||||
|
||||
class InstanceGroupMembershipMixin(object):
|
||||
'''
|
||||
Manages signaling celery to reload its queue configuration on Instance Group membership changes
|
||||
'''
|
||||
def attach(self, request, *args, **kwargs):
|
||||
response = super(InstanceGroupMembershipMixin, self).attach(request, *args, **kwargs)
|
||||
sub_id, res = self.attach_validate(request)
|
||||
if status.is_success(response.status_code):
|
||||
if self.parent_model is Instance:
|
||||
inst_name = ig_obj.hostname
|
||||
else:
|
||||
inst_name = get_object_or_400(self.model, pk=sub_id).hostname
|
||||
with transaction.atomic():
|
||||
ig_qs = InstanceGroup.objects.select_for_update()
|
||||
if self.parent_model is Instance:
|
||||
ig_obj = get_object_or_400(ig_qs, pk=sub_id)
|
||||
else:
|
||||
# similar to get_parent_object, but selected for update
|
||||
parent_filter = {
|
||||
self.lookup_field: self.kwargs.get(self.lookup_field, None),
|
||||
}
|
||||
ig_obj = get_object_or_404(ig_qs, **parent_filter)
|
||||
if inst_name not in ig_obj.policy_instance_list:
|
||||
ig_obj.policy_instance_list.append(inst_name)
|
||||
ig_obj.save(update_fields=['policy_instance_list'])
|
||||
return response
|
||||
|
||||
def is_valid_relation(self, parent, sub, created=False):
|
||||
if sub.is_isolated():
|
||||
return {'error': _('Isolated instances may not be added or removed from instances groups via the API.')}
|
||||
if self.parent_model is InstanceGroup:
|
||||
ig_obj = self.get_parent_object()
|
||||
if ig_obj.controller_id is not None:
|
||||
return {'error': _('Isolated instance group membership may not be managed via the API.')}
|
||||
return None
|
||||
|
||||
def unattach_validate(self, request):
|
||||
(sub_id, res) = super(InstanceGroupMembershipMixin, self).unattach_validate(request)
|
||||
if res:
|
||||
return (sub_id, res)
|
||||
sub = get_object_or_400(self.model, pk=sub_id)
|
||||
attach_errors = self.is_valid_relation(None, sub)
|
||||
if attach_errors:
|
||||
return (sub_id, Response(attach_errors, status=status.HTTP_400_BAD_REQUEST))
|
||||
return (sub_id, res)
|
||||
|
||||
def unattach(self, request, *args, **kwargs):
|
||||
response = super(InstanceGroupMembershipMixin, self).unattach(request, *args, **kwargs)
|
||||
if status.is_success(response.status_code):
|
||||
sub_id = request.data.get('id', None)
|
||||
if self.parent_model is Instance:
|
||||
inst_name = self.get_parent_object().hostname
|
||||
else:
|
||||
inst_name = get_object_or_400(self.model, pk=sub_id).hostname
|
||||
with transaction.atomic():
|
||||
ig_qs = InstanceGroup.objects.select_for_update()
|
||||
if self.parent_model is Instance:
|
||||
ig_obj = get_object_or_400(ig_qs, pk=sub_id)
|
||||
else:
|
||||
# similar to get_parent_object, but selected for update
|
||||
parent_filter = {
|
||||
self.lookup_field: self.kwargs.get(self.lookup_field, None),
|
||||
}
|
||||
ig_obj = get_object_or_404(ig_qs, **parent_filter)
|
||||
if inst_name in ig_obj.policy_instance_list:
|
||||
ig_obj.policy_instance_list.pop(ig_obj.policy_instance_list.index(inst_name))
|
||||
ig_obj.save(update_fields=['policy_instance_list'])
|
||||
return response
|
||||
|
||||
|
||||
class RelatedJobsPreventDeleteMixin(object):
|
||||
def perform_destroy(self, obj):
|
||||
self.check_related_active_jobs(obj)
|
||||
return super(RelatedJobsPreventDeleteMixin, self).perform_destroy(obj)
|
||||
|
||||
def check_related_active_jobs(self, obj):
|
||||
active_jobs = obj.get_active_jobs()
|
||||
if len(active_jobs) > 0:
|
||||
raise ActiveJobConflict(active_jobs)
|
||||
time_cutoff = now() - dateutil.relativedelta.relativedelta(minutes=1)
|
||||
recent_jobs = obj._get_related_jobs().filter(finished__gte = time_cutoff)
|
||||
for unified_job in recent_jobs.get_real_instances():
|
||||
if not unified_job.event_processing_finished:
|
||||
raise PermissionDenied(_(
|
||||
'Related job {} is still processing events.'
|
||||
).format(unified_job.log_format))
|
||||
|
||||
|
||||
class ApiRootView(APIView):
|
||||
|
||||
permission_classes = (AllowAny,)
|
||||
@@ -887,92 +744,6 @@ class AuthView(APIView):
|
||||
return Response(data)
|
||||
|
||||
|
||||
|
||||
class OrganizationCountsMixin(object):
|
||||
|
||||
def get_serializer_context(self, *args, **kwargs):
|
||||
full_context = super(OrganizationCountsMixin, self).get_serializer_context(*args, **kwargs)
|
||||
|
||||
if self.request is None:
|
||||
return full_context
|
||||
|
||||
db_results = {}
|
||||
org_qs = self.model.accessible_objects(self.request.user, 'read_role')
|
||||
org_id_list = org_qs.values('id')
|
||||
if len(org_id_list) == 0:
|
||||
if self.request.method == 'POST':
|
||||
full_context['related_field_counts'] = {}
|
||||
return full_context
|
||||
|
||||
inv_qs = Inventory.accessible_objects(self.request.user, 'read_role')
|
||||
project_qs = Project.accessible_objects(self.request.user, 'read_role')
|
||||
|
||||
# Produce counts of Foreign Key relationships
|
||||
db_results['inventories'] = inv_qs\
|
||||
.values('organization').annotate(Count('organization')).order_by('organization')
|
||||
|
||||
db_results['teams'] = Team.accessible_objects(
|
||||
self.request.user, 'read_role').values('organization').annotate(
|
||||
Count('organization')).order_by('organization')
|
||||
|
||||
JT_project_reference = 'project__organization'
|
||||
JT_inventory_reference = 'inventory__organization'
|
||||
db_results['job_templates_project'] = JobTemplate.accessible_objects(
|
||||
self.request.user, 'read_role').exclude(
|
||||
project__organization=F(JT_inventory_reference)).values(JT_project_reference).annotate(
|
||||
Count(JT_project_reference)).order_by(JT_project_reference)
|
||||
|
||||
db_results['job_templates_inventory'] = JobTemplate.accessible_objects(
|
||||
self.request.user, 'read_role').values(JT_inventory_reference).annotate(
|
||||
Count(JT_inventory_reference)).order_by(JT_inventory_reference)
|
||||
|
||||
db_results['projects'] = project_qs\
|
||||
.values('organization').annotate(Count('organization')).order_by('organization')
|
||||
|
||||
# Other members and admins of organization are always viewable
|
||||
db_results['users'] = org_qs.annotate(
|
||||
users=Count('member_role__members', distinct=True),
|
||||
admins=Count('admin_role__members', distinct=True)
|
||||
).values('id', 'users', 'admins')
|
||||
|
||||
count_context = {}
|
||||
for org in org_id_list:
|
||||
org_id = org['id']
|
||||
count_context[org_id] = {
|
||||
'inventories': 0, 'teams': 0, 'users': 0, 'job_templates': 0,
|
||||
'admins': 0, 'projects': 0}
|
||||
|
||||
for res, count_qs in db_results.items():
|
||||
if res == 'job_templates_project':
|
||||
org_reference = JT_project_reference
|
||||
elif res == 'job_templates_inventory':
|
||||
org_reference = JT_inventory_reference
|
||||
elif res == 'users':
|
||||
org_reference = 'id'
|
||||
else:
|
||||
org_reference = 'organization'
|
||||
for entry in count_qs:
|
||||
org_id = entry[org_reference]
|
||||
if org_id in count_context:
|
||||
if res == 'users':
|
||||
count_context[org_id]['admins'] = entry['admins']
|
||||
count_context[org_id]['users'] = entry['users']
|
||||
continue
|
||||
count_context[org_id][res] = entry['%s__count' % org_reference]
|
||||
|
||||
# Combine the counts for job templates by project and inventory
|
||||
for org in org_id_list:
|
||||
org_id = org['id']
|
||||
count_context[org_id]['job_templates'] = 0
|
||||
for related_path in ['job_templates_project', 'job_templates_inventory']:
|
||||
if related_path in count_context[org_id]:
|
||||
count_context[org_id]['job_templates'] += count_context[org_id].pop(related_path)
|
||||
|
||||
full_context['related_field_counts'] = count_context
|
||||
|
||||
return full_context
|
||||
|
||||
|
||||
class OrganizationList(OrganizationCountsMixin, ListCreateAPIView):
|
||||
|
||||
model = Organization
|
||||
@@ -4960,7 +4731,7 @@ class NotificationTemplateTest(GenericAPIView):
|
||||
if not notification:
|
||||
return Response({}, status=status.HTTP_400_BAD_REQUEST)
|
||||
else:
|
||||
send_notifications.delay([notification.id])
|
||||
connection.on_commit(lambda: send_notifications.delay([notification.id]))
|
||||
data = OrderedDict()
|
||||
data['notification'] = notification.id
|
||||
data.update(NotificationSerializer(notification, context=self.get_serializer_context()).to_representation(notification))
|
||||
273
awx/api/views/mixin.py
Normal file
273
awx/api/views/mixin.py
Normal file
@@ -0,0 +1,273 @@
|
||||
# Copyright (c) 2018 Red Hat, Inc.
|
||||
# All Rights Reserved.
|
||||
|
||||
import dateutil
|
||||
import logging
|
||||
|
||||
from django.db.models import (
|
||||
Count,
|
||||
F,
|
||||
)
|
||||
from django.db import transaction
|
||||
from django.shortcuts import get_object_or_404
|
||||
from django.utils.timezone import now
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
|
||||
from rest_framework.exceptions import PermissionDenied
|
||||
from rest_framework.response import Response
|
||||
from rest_framework import status
|
||||
|
||||
from awx.main.constants import ACTIVE_STATES
|
||||
from awx.main.utils import get_object_or_400
|
||||
from awx.main.models.ha import (
|
||||
Instance,
|
||||
InstanceGroup,
|
||||
)
|
||||
from awx.main.models.organization import Team
|
||||
from awx.main.models.projects import Project
|
||||
from awx.main.models.inventory import Inventory
|
||||
from awx.main.models.jobs import JobTemplate
|
||||
from awx.conf.license import (
|
||||
feature_enabled,
|
||||
LicenseForbids,
|
||||
)
|
||||
from awx.api.exceptions import ActiveJobConflict
|
||||
|
||||
logger = logging.getLogger('awx.api.views.mixin')
|
||||
|
||||
|
||||
class ActivityStreamEnforcementMixin(object):
|
||||
'''
|
||||
Mixin to check that license supports activity streams.
|
||||
'''
|
||||
def check_permissions(self, request):
|
||||
ret = super(ActivityStreamEnforcementMixin, self).check_permissions(request)
|
||||
if not feature_enabled('activity_streams'):
|
||||
raise LicenseForbids(_('Your license does not allow use of the activity stream.'))
|
||||
return ret
|
||||
|
||||
|
||||
class SystemTrackingEnforcementMixin(object):
|
||||
'''
|
||||
Mixin to check that license supports system tracking.
|
||||
'''
|
||||
def check_permissions(self, request):
|
||||
ret = super(SystemTrackingEnforcementMixin, self).check_permissions(request)
|
||||
if not feature_enabled('system_tracking'):
|
||||
raise LicenseForbids(_('Your license does not permit use of system tracking.'))
|
||||
return ret
|
||||
|
||||
|
||||
class WorkflowsEnforcementMixin(object):
|
||||
'''
|
||||
Mixin to check that license supports workflows.
|
||||
'''
|
||||
def check_permissions(self, request):
|
||||
ret = super(WorkflowsEnforcementMixin, self).check_permissions(request)
|
||||
if not feature_enabled('workflows') and request.method not in ('GET', 'OPTIONS', 'DELETE'):
|
||||
raise LicenseForbids(_('Your license does not allow use of workflows.'))
|
||||
return ret
|
||||
|
||||
|
||||
class UnifiedJobDeletionMixin(object):
|
||||
'''
|
||||
Special handling when deleting a running unified job object.
|
||||
'''
|
||||
def destroy(self, request, *args, **kwargs):
|
||||
obj = self.get_object()
|
||||
if not request.user.can_access(self.model, 'delete', obj):
|
||||
raise PermissionDenied()
|
||||
try:
|
||||
if obj.unified_job_node.workflow_job.status in ACTIVE_STATES:
|
||||
raise PermissionDenied(detail=_('Cannot delete job resource when associated workflow job is running.'))
|
||||
except self.model.unified_job_node.RelatedObjectDoesNotExist:
|
||||
pass
|
||||
# Still allow deletion of new status, because these can be manually created
|
||||
if obj.status in ACTIVE_STATES and obj.status != 'new':
|
||||
raise PermissionDenied(detail=_("Cannot delete running job resource."))
|
||||
elif not obj.event_processing_finished:
|
||||
# Prohibit deletion if job events are still coming in
|
||||
if obj.finished and now() < obj.finished + dateutil.relativedelta.relativedelta(minutes=1):
|
||||
# less than 1 minute has passed since job finished and events are not in
|
||||
return Response({"error": _("Job has not finished processing events.")},
|
||||
status=status.HTTP_400_BAD_REQUEST)
|
||||
else:
|
||||
# if it has been > 1 minute, events are probably lost
|
||||
logger.warning('Allowing deletion of {} through the API without all events '
|
||||
'processed.'.format(obj.log_format))
|
||||
obj.delete()
|
||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||
|
||||
|
||||
class InstanceGroupMembershipMixin(object):
|
||||
'''
|
||||
Manages signaling celery to reload its queue configuration on Instance Group membership changes
|
||||
'''
|
||||
def attach(self, request, *args, **kwargs):
|
||||
response = super(InstanceGroupMembershipMixin, self).attach(request, *args, **kwargs)
|
||||
sub_id, res = self.attach_validate(request)
|
||||
if status.is_success(response.status_code):
|
||||
if self.parent_model is Instance:
|
||||
ig_obj = get_object_or_400(self.model, pk=sub_id)
|
||||
inst_name = ig_obj.hostname
|
||||
else:
|
||||
inst_name = get_object_or_400(self.model, pk=sub_id).hostname
|
||||
with transaction.atomic():
|
||||
ig_qs = InstanceGroup.objects.select_for_update()
|
||||
if self.parent_model is Instance:
|
||||
ig_obj = get_object_or_400(ig_qs, pk=sub_id)
|
||||
else:
|
||||
# similar to get_parent_object, but selected for update
|
||||
parent_filter = {
|
||||
self.lookup_field: self.kwargs.get(self.lookup_field, None),
|
||||
}
|
||||
ig_obj = get_object_or_404(ig_qs, **parent_filter)
|
||||
if inst_name not in ig_obj.policy_instance_list:
|
||||
ig_obj.policy_instance_list.append(inst_name)
|
||||
ig_obj.save(update_fields=['policy_instance_list'])
|
||||
return response
|
||||
|
||||
def is_valid_relation(self, parent, sub, created=False):
|
||||
if sub.is_isolated():
|
||||
return {'error': _('Isolated instances may not be added or removed from instances groups via the API.')}
|
||||
if self.parent_model is InstanceGroup:
|
||||
ig_obj = self.get_parent_object()
|
||||
if ig_obj.controller_id is not None:
|
||||
return {'error': _('Isolated instance group membership may not be managed via the API.')}
|
||||
return None
|
||||
|
||||
def unattach_validate(self, request):
|
||||
(sub_id, res) = super(InstanceGroupMembershipMixin, self).unattach_validate(request)
|
||||
if res:
|
||||
return (sub_id, res)
|
||||
sub = get_object_or_400(self.model, pk=sub_id)
|
||||
attach_errors = self.is_valid_relation(None, sub)
|
||||
if attach_errors:
|
||||
return (sub_id, Response(attach_errors, status=status.HTTP_400_BAD_REQUEST))
|
||||
return (sub_id, res)
|
||||
|
||||
def unattach(self, request, *args, **kwargs):
|
||||
response = super(InstanceGroupMembershipMixin, self).unattach(request, *args, **kwargs)
|
||||
if status.is_success(response.status_code):
|
||||
sub_id = request.data.get('id', None)
|
||||
if self.parent_model is Instance:
|
||||
inst_name = self.get_parent_object().hostname
|
||||
else:
|
||||
inst_name = get_object_or_400(self.model, pk=sub_id).hostname
|
||||
with transaction.atomic():
|
||||
ig_qs = InstanceGroup.objects.select_for_update()
|
||||
if self.parent_model is Instance:
|
||||
ig_obj = get_object_or_400(ig_qs, pk=sub_id)
|
||||
else:
|
||||
# similar to get_parent_object, but selected for update
|
||||
parent_filter = {
|
||||
self.lookup_field: self.kwargs.get(self.lookup_field, None),
|
||||
}
|
||||
ig_obj = get_object_or_404(ig_qs, **parent_filter)
|
||||
if inst_name in ig_obj.policy_instance_list:
|
||||
ig_obj.policy_instance_list.pop(ig_obj.policy_instance_list.index(inst_name))
|
||||
ig_obj.save(update_fields=['policy_instance_list'])
|
||||
return response
|
||||
|
||||
|
||||
class RelatedJobsPreventDeleteMixin(object):
|
||||
def perform_destroy(self, obj):
|
||||
self.check_related_active_jobs(obj)
|
||||
return super(RelatedJobsPreventDeleteMixin, self).perform_destroy(obj)
|
||||
|
||||
def check_related_active_jobs(self, obj):
|
||||
active_jobs = obj.get_active_jobs()
|
||||
if len(active_jobs) > 0:
|
||||
raise ActiveJobConflict(active_jobs)
|
||||
time_cutoff = now() - dateutil.relativedelta.relativedelta(minutes=1)
|
||||
recent_jobs = obj._get_related_jobs().filter(finished__gte = time_cutoff)
|
||||
for unified_job in recent_jobs.get_real_instances():
|
||||
if not unified_job.event_processing_finished:
|
||||
raise PermissionDenied(_(
|
||||
'Related job {} is still processing events.'
|
||||
).format(unified_job.log_format))
|
||||
|
||||
|
||||
class OrganizationCountsMixin(object):
|
||||
|
||||
def get_serializer_context(self, *args, **kwargs):
|
||||
full_context = super(OrganizationCountsMixin, self).get_serializer_context(*args, **kwargs)
|
||||
|
||||
if self.request is None:
|
||||
return full_context
|
||||
|
||||
db_results = {}
|
||||
org_qs = self.model.accessible_objects(self.request.user, 'read_role')
|
||||
org_id_list = org_qs.values('id')
|
||||
if len(org_id_list) == 0:
|
||||
if self.request.method == 'POST':
|
||||
full_context['related_field_counts'] = {}
|
||||
return full_context
|
||||
|
||||
inv_qs = Inventory.accessible_objects(self.request.user, 'read_role')
|
||||
project_qs = Project.accessible_objects(self.request.user, 'read_role')
|
||||
|
||||
# Produce counts of Foreign Key relationships
|
||||
db_results['inventories'] = inv_qs\
|
||||
.values('organization').annotate(Count('organization')).order_by('organization')
|
||||
|
||||
db_results['teams'] = Team.accessible_objects(
|
||||
self.request.user, 'read_role').values('organization').annotate(
|
||||
Count('organization')).order_by('organization')
|
||||
|
||||
JT_project_reference = 'project__organization'
|
||||
JT_inventory_reference = 'inventory__organization'
|
||||
db_results['job_templates_project'] = JobTemplate.accessible_objects(
|
||||
self.request.user, 'read_role').exclude(
|
||||
project__organization=F(JT_inventory_reference)).values(JT_project_reference).annotate(
|
||||
Count(JT_project_reference)).order_by(JT_project_reference)
|
||||
|
||||
db_results['job_templates_inventory'] = JobTemplate.accessible_objects(
|
||||
self.request.user, 'read_role').values(JT_inventory_reference).annotate(
|
||||
Count(JT_inventory_reference)).order_by(JT_inventory_reference)
|
||||
|
||||
db_results['projects'] = project_qs\
|
||||
.values('organization').annotate(Count('organization')).order_by('organization')
|
||||
|
||||
# Other members and admins of organization are always viewable
|
||||
db_results['users'] = org_qs.annotate(
|
||||
users=Count('member_role__members', distinct=True),
|
||||
admins=Count('admin_role__members', distinct=True)
|
||||
).values('id', 'users', 'admins')
|
||||
|
||||
count_context = {}
|
||||
for org in org_id_list:
|
||||
org_id = org['id']
|
||||
count_context[org_id] = {
|
||||
'inventories': 0, 'teams': 0, 'users': 0, 'job_templates': 0,
|
||||
'admins': 0, 'projects': 0}
|
||||
|
||||
for res, count_qs in db_results.items():
|
||||
if res == 'job_templates_project':
|
||||
org_reference = JT_project_reference
|
||||
elif res == 'job_templates_inventory':
|
||||
org_reference = JT_inventory_reference
|
||||
elif res == 'users':
|
||||
org_reference = 'id'
|
||||
else:
|
||||
org_reference = 'organization'
|
||||
for entry in count_qs:
|
||||
org_id = entry[org_reference]
|
||||
if org_id in count_context:
|
||||
if res == 'users':
|
||||
count_context[org_id]['admins'] = entry['admins']
|
||||
count_context[org_id]['users'] = entry['users']
|
||||
continue
|
||||
count_context[org_id][res] = entry['%s__count' % org_reference]
|
||||
|
||||
# Combine the counts for job templates by project and inventory
|
||||
for org in org_id_list:
|
||||
org_id = org['id']
|
||||
count_context[org_id]['job_templates'] = 0
|
||||
for related_path in ['job_templates_project', 'job_templates_inventory']:
|
||||
if related_path in count_context[org_id]:
|
||||
count_context[org_id]['job_templates'] += count_context[org_id].pop(related_path)
|
||||
|
||||
full_context['related_field_counts'] = count_context
|
||||
|
||||
return full_context
|
||||
@@ -15,6 +15,8 @@ class Command(BaseCommand):
|
||||
def handle(self, *args, **kwargs):
|
||||
# Sanity check: Is there already an organization in the system?
|
||||
if Organization.objects.count():
|
||||
print('An organization is already in the system, exiting.')
|
||||
print('(changed: False)')
|
||||
return
|
||||
|
||||
# Create a default organization as the first superuser found.
|
||||
@@ -54,3 +56,4 @@ class Command(BaseCommand):
|
||||
jt.credentials.add(c)
|
||||
print('Default organization added.')
|
||||
print('Demo Credential, Inventory, and Job Template added.')
|
||||
print('(changed: True)')
|
||||
|
||||
@@ -257,6 +257,9 @@ class DeprecatedAuthTokenMiddleware(object):
|
||||
'be replaced with OAuth2.0 in the next version of Ansible Tower '
|
||||
'(see /api/o/ for more details).'
|
||||
)
|
||||
elif request.environ.get('HTTP_AUTHORIZATION', '').startswith('Token '):
|
||||
token = request.environ['HTTP_AUTHORIZATION'].split(' ', 1)[-1].strip()
|
||||
request.environ['HTTP_AUTHORIZATION'] = six.text_type('Bearer {}').format(token)
|
||||
|
||||
|
||||
class MigrationRanCheckMiddleware(object):
|
||||
|
||||
@@ -59,7 +59,7 @@ def check_system_tracking_feature_forbidden(response):
|
||||
assert 'Your license does not permit use of system tracking.' == response.data['detail']
|
||||
|
||||
|
||||
@mock.patch('awx.api.views.feature_enabled', new=mock_feature_disabled)
|
||||
@mock.patch('awx.api.views.mixin.feature_enabled', new=mock_feature_disabled)
|
||||
@pytest.mark.django_db
|
||||
@pytest.mark.license_feature
|
||||
def test_system_tracking_license_get(hosts, get, user):
|
||||
@@ -70,7 +70,7 @@ def test_system_tracking_license_get(hosts, get, user):
|
||||
check_system_tracking_feature_forbidden(response)
|
||||
|
||||
|
||||
@mock.patch('awx.api.views.feature_enabled', new=mock_feature_disabled)
|
||||
@mock.patch('awx.api.views.mixin.feature_enabled', new=mock_feature_disabled)
|
||||
@pytest.mark.django_db
|
||||
@pytest.mark.license_feature
|
||||
def test_system_tracking_license_options(hosts, options, user):
|
||||
|
||||
@@ -41,7 +41,7 @@ def check_system_tracking_feature_forbidden(response):
|
||||
assert 'Your license does not permit use of system tracking.' == response.data['detail']
|
||||
|
||||
|
||||
@mock.patch('awx.api.views.feature_enabled', new=mock_feature_disabled)
|
||||
@mock.patch('awx.api.views.mixin.feature_enabled', new=mock_feature_disabled)
|
||||
@pytest.mark.django_db
|
||||
@pytest.mark.license_feature
|
||||
def test_system_tracking_license_get(hosts, get, user):
|
||||
@@ -52,7 +52,7 @@ def test_system_tracking_license_get(hosts, get, user):
|
||||
check_system_tracking_feature_forbidden(response)
|
||||
|
||||
|
||||
@mock.patch('awx.api.views.feature_enabled', new=mock_feature_disabled)
|
||||
@mock.patch('awx.api.views.mixin.feature_enabled', new=mock_feature_disabled)
|
||||
@pytest.mark.django_db
|
||||
@pytest.mark.license_feature
|
||||
def test_system_tracking_license_options(hosts, options, user):
|
||||
|
||||
@@ -141,7 +141,7 @@ def test_block_unprocessed_events(delete, admin_user, mocker):
|
||||
view = MockView()
|
||||
|
||||
time_of_request = time_of_finish + relativedelta(seconds=2)
|
||||
with mock.patch('awx.api.views.now', lambda: time_of_request):
|
||||
with mock.patch('awx.api.views.mixin.now', lambda: time_of_request):
|
||||
r = view.destroy(request)
|
||||
assert r.status_code == 400
|
||||
|
||||
@@ -162,7 +162,7 @@ def test_block_related_unprocessed_events(mocker, organization, project, delete,
|
||||
)
|
||||
view = RelatedJobsPreventDeleteMixin()
|
||||
time_of_request = time_of_finish + relativedelta(seconds=2)
|
||||
with mock.patch('awx.api.views.now', lambda: time_of_request):
|
||||
with mock.patch('awx.api.views.mixin.now', lambda: time_of_request):
|
||||
with pytest.raises(PermissionDenied):
|
||||
view.perform_destroy(organization)
|
||||
|
||||
|
||||
@@ -380,6 +380,15 @@ def test_deprecated_authtoken_support(alice, fmt):
|
||||
assert resp.data['refresh_token'] is None
|
||||
assert resp.data['scope'] == 'write'
|
||||
|
||||
for _type in ('Token', 'Bearer'):
|
||||
request = getattr(APIRequestFactory(), 'get')(
|
||||
'/api/v2/me/',
|
||||
HTTP_AUTHORIZATION=' '.join([_type, resp.data['token']])
|
||||
)
|
||||
DeprecatedAuthTokenMiddleware().process_request(request)
|
||||
view, view_args, view_kwargs = resolve(request.path)
|
||||
assert view(request, *view_args, **view_kwargs).status_code == 200
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_deprecated_authtoken_invalid_username(alice):
|
||||
|
||||
9977
awx/ui/npm-shrinkwrap.json
generated
9977
awx/ui/npm-shrinkwrap.json
generated
File diff suppressed because it is too large
Load Diff
15036
awx/ui/package-lock.json
generated
Normal file
15036
awx/ui/package-lock.json
generated
Normal file
File diff suppressed because it is too large
Load Diff
@@ -10,8 +10,8 @@
|
||||
"django_host": "localhost"
|
||||
},
|
||||
"engines": {
|
||||
"node": "^6.11.3",
|
||||
"npm": "^3.10.10"
|
||||
"node": "^8.11.2",
|
||||
"npm": "^6.4.1"
|
||||
},
|
||||
"scripts": {
|
||||
"ui-docker-machine": "ip=$(docker-machine ip $DOCKER_MACHINE_NAME); npm set ansible-tower:django_host ${ip}; grunt dev;",
|
||||
@@ -105,17 +105,21 @@
|
||||
"angular-gettext": "^2.3.5",
|
||||
"angular-md5": "^0.1.8",
|
||||
"angular-moment": "^0.10.1",
|
||||
"angular-mousewheel": "^1.0.5",
|
||||
"angular-sanitize": "~1.6.6",
|
||||
"angular-scheduler": "git+https://git@github.com/ansible/angular-scheduler#v0.3.3",
|
||||
"angular-tz-extensions": "git+https://git@github.com/ansible/angular-tz-extensions#v0.5.2",
|
||||
"angular-xeditable": "~0.8.0",
|
||||
"ansi-to-html": "^0.6.3",
|
||||
"babel-polyfill": "^6.26.0",
|
||||
"bootstrap": "^3.3.7",
|
||||
"bootstrap-datepicker": "^1.7.1",
|
||||
"codemirror": "^5.17.0",
|
||||
"components-font-awesome": "^4.6.1",
|
||||
"d3": "~3.3.13",
|
||||
"d3": "^3.5.4",
|
||||
"hamsterjs": "^1.1.2",
|
||||
"html-entities": "^1.2.1",
|
||||
"inherits": "^1.0.2",
|
||||
"javascript-detect-element-resize": "^0.5.3",
|
||||
"jquery": "~2.2.4",
|
||||
"jquery-ui": "^1.12.1",
|
||||
@@ -123,12 +127,14 @@
|
||||
"legacy-loader": "0.0.2",
|
||||
"lodash": "~4.17.10",
|
||||
"lr-infinite-scroll": "git+https://git@github.com/lorenzofox3/lrInfiniteScroll",
|
||||
"mathjs": "^3.15.0",
|
||||
"moment": "^2.19.4",
|
||||
"ng-toast": "git+https://git@github.com/ansible/ngToast#v2.1.1",
|
||||
"nvd3": "git+https://git@github.com/ansible/nvd3#awx",
|
||||
"nvd3": "^1.8.6",
|
||||
"reconnectingwebsocket": "^1.0.0",
|
||||
"rrule": "git+https://git@github.com/jkbrzt/rrule#4ff63b2f8524fd6d5ba6e80db770953b5cd08a0c",
|
||||
"select2": "^4.0.2",
|
||||
"sprintf-js": "^1.0.3"
|
||||
"sprintf-js": "^1.0.3",
|
||||
"titlecase": "^1.1.2"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -7,7 +7,6 @@ localhost ansible_connection=local ansible_python_interpreter="/usr/bin/env pyth
|
||||
# be selected based on: latest, 1, 1.0, 1.0.0, 1.0.0.123
|
||||
# by default the base will be used to search for ansible/awx_web and ansible/awx_task
|
||||
dockerhub_base=ansible
|
||||
dockerhub_version=latest
|
||||
|
||||
# Openshift Install
|
||||
# Will need to set -e openshift_password=developer -e docker_registry_password=$(oc whoami -t)
|
||||
|
||||
@@ -1,17 +1,16 @@
|
||||
FROM centos:7
|
||||
|
||||
RUN yum install -y epel-release
|
||||
RUN yum install -y https://centos7.iuscommunity.org/ius-release.rpm
|
||||
|
||||
RUN yum install -y bzip2 \
|
||||
gcc-c++ \
|
||||
gettext \
|
||||
git2u-core \
|
||||
git \
|
||||
make \
|
||||
python \
|
||||
python-pip
|
||||
|
||||
RUN curl --silent --location https://rpm.nodesource.com/setup_6.x | bash -
|
||||
RUN curl --silent --location https://rpm.nodesource.com/setup_8.x | bash -
|
||||
RUN yum install -y nodejs
|
||||
RUN npm set progress=false
|
||||
|
||||
|
||||
@@ -1,13 +1,7 @@
|
||||
---
|
||||
- name: Get Version from checkout if not provided
|
||||
shell: "git describe --long --first-parent | sed 's/\\-g.*//' | sed 's/\\-/\\./'"
|
||||
delegate_to: localhost
|
||||
register: awx_version_command
|
||||
when: awx_version is not defined
|
||||
|
||||
- name: Set global version if not provided
|
||||
set_fact:
|
||||
awx_version: "{{ awx_version_command.stdout }}"
|
||||
awx_version: "{{ lookup('file', playbook_dir + '/../VERSION') }}"
|
||||
when: awx_version is not defined
|
||||
|
||||
- name: Verify awx-logos directory exists for official install
|
||||
|
||||
@@ -29,10 +29,9 @@ WORKDIR /tmp
|
||||
RUN mkdir -p /var/lib/awx/public/static
|
||||
RUN chgrp -Rf root /var/lib/awx && chmod -Rf g+w /var/lib/awx
|
||||
RUN yum -y install epel-release && \
|
||||
yum -y install https://centos7.iuscommunity.org/ius-release.rpm && \
|
||||
yum -y localinstall http://download.postgresql.org/pub/repos/yum/9.6/redhat/rhel-7-x86_64/pgdg-centos96-9.6-3.noarch.rpm && \
|
||||
yum -y update && \
|
||||
yum -y install ansible git2u-core mercurial subversion curl python-psycopg2 python-pip python-setuptools libselinux-python setools-libs yum-utils sudo acl make postgresql-devel nginx python-psutil libxml2-devel libxslt-devel libstdc++.so.6 gcc cyrus-sasl-devel cyrus-sasl openldap-devel libffi-devel python-pip xmlsec1-devel swig krb5-devel xmlsec1-openssl xmlsec1 xmlsec1-openssl-devel libtool-ltdl-devel bubblewrap gcc-c++ python-devel krb5-workstation krb5-libs python-crypto libcurl-devel rsync unzip && \
|
||||
yum -y install ansible git mercurial subversion curl python-psycopg2 python-pip python-setuptools libselinux-python setools-libs yum-utils sudo acl make postgresql-devel nginx python-psutil libxml2-devel libxslt-devel libstdc++.so.6 gcc cyrus-sasl-devel cyrus-sasl openldap-devel libffi-devel python-pip xmlsec1-devel swig krb5-devel xmlsec1-openssl xmlsec1 xmlsec1-openssl-devel libtool-ltdl-devel bubblewrap gcc-c++ python-devel krb5-workstation krb5-libs python-crypto libcurl-devel rsync unzip && \
|
||||
pip install virtualenv supervisor && \
|
||||
CFLAGS="-DXMLSEC_NO_SIZE_T" \
|
||||
VENV_BASE=/var/lib/awx/venv make requirements_ansible && \
|
||||
|
||||
@@ -1,4 +1,6 @@
|
||||
---
|
||||
dockerhub_version: "{{ lookup('file', playbook_dir + '/../VERSION') }}"
|
||||
|
||||
admin_user: 'admin'
|
||||
admin_email: 'root@localhost'
|
||||
admin_password: 'password'
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
---
|
||||
dockerhub_version: "{{ lookup('file', playbook_dir + '/../VERSION') }}"
|
||||
|
||||
rabbitmq_version: "3.7.4"
|
||||
rabbitmq_image: "ansible/awx_rabbitmq:{{rabbitmq_version}}"
|
||||
|
||||
9
setup.py
9
setup.py
@@ -22,13 +22,8 @@ docdir = "/usr/share/doc/awx"
|
||||
def get_version():
|
||||
current_dir = os.path.dirname(os.path.abspath(__file__))
|
||||
version_file = os.path.join(current_dir, 'VERSION')
|
||||
if os.path.isfile(version_file):
|
||||
with open(version_file, 'r') as file:
|
||||
version = file.read().strip()
|
||||
else:
|
||||
version = subprocess.Popen("git describe --long | cut -d - -f 1-1", shell=True, stdout=subprocess.PIPE).stdout.read().strip()
|
||||
return version
|
||||
|
||||
with open(version_file, 'r') as file:
|
||||
return file.read().strip()
|
||||
|
||||
if os.path.exists("/etc/debian_version"):
|
||||
sysinit = "/etc/init.d"
|
||||
|
||||
@@ -11,7 +11,7 @@ requirements/requirements_ansible_uninstall.txt \
|
||||
requirements/requirements_tower_uninstall.txt \
|
||||
/tmp/requirements/
|
||||
RUN yum -y update && yum -y install curl epel-release && yum -y install https://centos7.iuscommunity.org/ius-release.rpm
|
||||
RUN curl --silent --location https://rpm.nodesource.com/setup_6.x | bash -
|
||||
RUN curl --silent --location https://rpm.nodesource.com/setup_8.x | bash -
|
||||
RUN yum -y localinstall http://download.postgresql.org/pub/repos/yum/9.4/redhat/rhel-6-x86_64/pgdg-centos94-9.4-3.noarch.rpm
|
||||
RUN yum -y update && yum -y install openssh-server ansible mg vim tmux git2u-core mercurial subversion python-devel python-psycopg2 make postgresql postgresql-devel nginx nodejs python-psutil libxml2-devel libxslt-devel libstdc++.so.6 gcc cyrus-sasl-devel cyrus-sasl openldap-devel libffi-devel zeromq-devel python-pip xmlsec1-devel swig krb5-devel xmlsec1-openssl xmlsec1 xmlsec1-openssl-devel libtool-ltdl-devel rabbitmq-server bubblewrap zanata-python-client gettext gcc-c++ libcurl-devel python-pycurl bzip2 python-crypto rsync
|
||||
RUN pip install virtualenv
|
||||
|
||||
Reference in New Issue
Block a user