mirror of
https://github.com/ansible/awx.git
synced 2026-01-09 23:12:08 -03:30
yamllint: Make all files in awx pass yamllint
This commit updates all files that weren't passing yamllint for them to pass. A new yamllint target has been added. One can run `tox -e yamllint` or `yamllint -s .` locally to ensure yaml files are still passing. This check will be enabled in the CI so it can get on every new contributions, and prevent merging non-compliant code. Signed-off-by: Yanis Guenane <yguenane@redhat.com>
This commit is contained in:
parent
8116ec8e1f
commit
ca247182df
1
.github/BOTMETA.yml
vendored
1
.github/BOTMETA.yml
vendored
@ -1,3 +1,4 @@
|
||||
---
|
||||
files:
|
||||
awx/ui/:
|
||||
labels: component:ui
|
||||
|
||||
12
.yamllint
Normal file
12
.yamllint
Normal file
@ -0,0 +1,12 @@
|
||||
---
|
||||
ignore: |
|
||||
.tox
|
||||
awx/main/tests/data/inventory/plugins/**
|
||||
# vault files
|
||||
awx/main/tests/data/ansible_utils/playbooks/valid/vault.yml
|
||||
awx/ui/test/e2e/tests/smoke-vars.yml
|
||||
|
||||
extends: default
|
||||
|
||||
rules:
|
||||
line-length: disable
|
||||
@ -1,7 +1,7 @@
|
||||
---
|
||||
- name: Hello World Sample
|
||||
hosts: all
|
||||
tasks:
|
||||
- name: Hello Message
|
||||
debug:
|
||||
msg: "Hello World!"
|
||||
|
||||
|
||||
@ -1,7 +1,7 @@
|
||||
---
|
||||
- name: Hello World Sample
|
||||
hosts: all
|
||||
tasks:
|
||||
- name: Hello Message
|
||||
debug:
|
||||
msg: "Hello World!"
|
||||
|
||||
|
||||
@ -1 +1,2 @@
|
||||
---
|
||||
- hosts: all
|
||||
|
||||
@ -1 +1,2 @@
|
||||
---
|
||||
- import_playbook: foo
|
||||
|
||||
@ -1 +1,2 @@
|
||||
---
|
||||
- include: foo
|
||||
|
||||
@ -18,8 +18,8 @@
|
||||
src: "{{src}}/artifacts/"
|
||||
dest: "{{src}}/artifacts/"
|
||||
mode: pull
|
||||
delete: yes
|
||||
recursive: yes
|
||||
delete: true
|
||||
recursive: true
|
||||
when: ansible_kubectl_config is not defined
|
||||
|
||||
- name: Copy daemon log from the isolated host
|
||||
@ -34,9 +34,9 @@
|
||||
src: "{{src}}/artifacts/"
|
||||
dest: "{{src}}/artifacts/"
|
||||
mode: pull
|
||||
delete: yes
|
||||
recursive: yes
|
||||
set_remote_user: no
|
||||
delete: true
|
||||
recursive: true
|
||||
set_remote_user: false
|
||||
rsync_opts:
|
||||
- "--rsh=$RSH"
|
||||
environment:
|
||||
@ -49,7 +49,7 @@
|
||||
src: "{{src}}/daemon.log"
|
||||
dest: "{{src}}/daemon.log"
|
||||
mode: pull
|
||||
set_remote_user: no
|
||||
set_remote_user: false
|
||||
rsync_opts:
|
||||
- "--rsh=$RSH"
|
||||
environment:
|
||||
|
||||
@ -12,14 +12,14 @@
|
||||
|
||||
- name: cancel the job
|
||||
command: "ansible-runner stop {{private_data_dir}}"
|
||||
ignore_errors: yes
|
||||
ignore_errors: true
|
||||
|
||||
- name: remove build artifacts
|
||||
file: path="{{item}}" state=absent
|
||||
register: result
|
||||
with_items: "{{cleanup_dirs}}"
|
||||
until: result is succeeded
|
||||
ignore_errors: yes
|
||||
ignore_errors: true
|
||||
retries: 3
|
||||
delay: 5
|
||||
|
||||
|
||||
@ -30,87 +30,87 @@
|
||||
- delete
|
||||
|
||||
- block:
|
||||
- name: update project using git
|
||||
git:
|
||||
dest: "{{project_path|quote}}"
|
||||
repo: "{{scm_url}}"
|
||||
version: "{{scm_branch|quote}}"
|
||||
refspec: "{{scm_refspec|default(omit)}}"
|
||||
force: "{{scm_clean}}"
|
||||
accept_hostkey: "{{scm_accept_hostkey|default(omit)}}"
|
||||
register: git_result
|
||||
- name: update project using git
|
||||
git:
|
||||
dest: "{{project_path|quote}}"
|
||||
repo: "{{scm_url}}"
|
||||
version: "{{scm_branch|quote}}"
|
||||
refspec: "{{scm_refspec|default(omit)}}"
|
||||
force: "{{scm_clean}}"
|
||||
accept_hostkey: "{{scm_accept_hostkey|default(omit)}}"
|
||||
register: git_result
|
||||
|
||||
- name: Set the git repository version
|
||||
set_fact:
|
||||
scm_version: "{{ git_result['after'] }}"
|
||||
when: "'after' in git_result"
|
||||
- name: Set the git repository version
|
||||
set_fact:
|
||||
scm_version: "{{ git_result['after'] }}"
|
||||
when: "'after' in git_result"
|
||||
tags:
|
||||
- update_git
|
||||
|
||||
- block:
|
||||
- name: update project using hg
|
||||
hg:
|
||||
dest: "{{project_path|quote}}"
|
||||
repo: "{{scm_url|quote}}"
|
||||
revision: "{{scm_branch|quote}}"
|
||||
force: "{{scm_clean}}"
|
||||
register: hg_result
|
||||
- name: update project using hg
|
||||
hg:
|
||||
dest: "{{project_path|quote}}"
|
||||
repo: "{{scm_url|quote}}"
|
||||
revision: "{{scm_branch|quote}}"
|
||||
force: "{{scm_clean}}"
|
||||
register: hg_result
|
||||
|
||||
- name: Set the hg repository version
|
||||
set_fact:
|
||||
scm_version: "{{ hg_result['after'] }}"
|
||||
when: "'after' in hg_result"
|
||||
- name: Set the hg repository version
|
||||
set_fact:
|
||||
scm_version: "{{ hg_result['after'] }}"
|
||||
when: "'after' in hg_result"
|
||||
|
||||
- name: parse hg version string properly
|
||||
set_fact:
|
||||
scm_version: "{{scm_version|regex_replace('^([A-Za-z0-9]+).*$', '\\1')}}"
|
||||
- name: parse hg version string properly
|
||||
set_fact:
|
||||
scm_version: "{{scm_version|regex_replace('^([A-Za-z0-9]+).*$', '\\1')}}"
|
||||
tags:
|
||||
- update_hg
|
||||
|
||||
- block:
|
||||
- name: update project using svn
|
||||
subversion:
|
||||
dest: "{{project_path|quote}}"
|
||||
repo: "{{scm_url|quote}}"
|
||||
revision: "{{scm_branch|quote}}"
|
||||
force: "{{scm_clean}}"
|
||||
username: "{{scm_username|default(omit)}}"
|
||||
password: "{{scm_password|default(omit)}}"
|
||||
environment:
|
||||
LC_ALL: 'en_US.UTF-8'
|
||||
register: svn_result
|
||||
- name: update project using svn
|
||||
subversion:
|
||||
dest: "{{project_path|quote}}"
|
||||
repo: "{{scm_url|quote}}"
|
||||
revision: "{{scm_branch|quote}}"
|
||||
force: "{{scm_clean}}"
|
||||
username: "{{scm_username|default(omit)}}"
|
||||
password: "{{scm_password|default(omit)}}"
|
||||
environment:
|
||||
LC_ALL: 'en_US.UTF-8'
|
||||
register: svn_result
|
||||
|
||||
- name: Set the svn repository version
|
||||
set_fact:
|
||||
scm_version: "{{ svn_result['after'] }}"
|
||||
when: "'after' in svn_result"
|
||||
- name: Set the svn repository version
|
||||
set_fact:
|
||||
scm_version: "{{ svn_result['after'] }}"
|
||||
when: "'after' in svn_result"
|
||||
|
||||
- name: parse subversion version string properly
|
||||
set_fact:
|
||||
scm_version: "{{scm_version|regex_replace('^.*Revision: ([0-9]+).*$', '\\1')}}"
|
||||
- name: parse subversion version string properly
|
||||
set_fact:
|
||||
scm_version: "{{scm_version|regex_replace('^.*Revision: ([0-9]+).*$', '\\1')}}"
|
||||
tags:
|
||||
- update_svn
|
||||
|
||||
- block:
|
||||
- name: Ensure the project directory is present
|
||||
file:
|
||||
dest: "{{project_path|quote}}"
|
||||
state: directory
|
||||
- name: Ensure the project directory is present
|
||||
file:
|
||||
dest: "{{project_path|quote}}"
|
||||
state: directory
|
||||
|
||||
- name: Fetch Insights Playbook(s)
|
||||
insights:
|
||||
insights_url: "{{insights_url}}"
|
||||
username: "{{scm_username}}"
|
||||
password: "{{scm_password}}"
|
||||
project_path: "{{project_path}}"
|
||||
awx_license_type: "{{awx_license_type}}"
|
||||
awx_version: "{{awx_version}}"
|
||||
register: results
|
||||
- name: Fetch Insights Playbook(s)
|
||||
insights:
|
||||
insights_url: "{{insights_url}}"
|
||||
username: "{{scm_username}}"
|
||||
password: "{{scm_password}}"
|
||||
project_path: "{{project_path}}"
|
||||
awx_license_type: "{{awx_license_type}}"
|
||||
awx_version: "{{awx_version}}"
|
||||
register: results
|
||||
|
||||
- name: Save Insights Version
|
||||
set_fact:
|
||||
scm_version: "{{results.version}}"
|
||||
when: results is defined
|
||||
- name: Save Insights Version
|
||||
set_fact:
|
||||
scm_version: "{{results.version}}"
|
||||
when: results is defined
|
||||
tags:
|
||||
- update_insights
|
||||
|
||||
@ -129,39 +129,39 @@
|
||||
tasks:
|
||||
|
||||
- block:
|
||||
- name: detect requirements.yml
|
||||
stat: path={{project_path|quote}}/roles/requirements.yml
|
||||
register: doesRequirementsExist
|
||||
- name: detect requirements.yml
|
||||
stat: path={{project_path|quote}}/roles/requirements.yml
|
||||
register: doesRequirementsExist
|
||||
|
||||
- name: fetch galaxy roles from requirements.yml
|
||||
command: ansible-galaxy install -r requirements.yml -p {{roles_destination|quote}}{{ ' -' + 'v' * ansible_verbosity if ansible_verbosity else '' }}
|
||||
args:
|
||||
chdir: "{{project_path|quote}}/roles"
|
||||
register: galaxy_result
|
||||
when: doesRequirementsExist.stat.exists
|
||||
changed_when: "'was installed successfully' in galaxy_result.stdout"
|
||||
environment:
|
||||
ANSIBLE_FORCE_COLOR: False
|
||||
- name: fetch galaxy roles from requirements.yml
|
||||
command: ansible-galaxy install -r requirements.yml -p {{roles_destination|quote}}{{ ' -' + 'v' * ansible_verbosity if ansible_verbosity else '' }}
|
||||
args:
|
||||
chdir: "{{project_path|quote}}/roles"
|
||||
register: galaxy_result
|
||||
when: doesRequirementsExist.stat.exists
|
||||
changed_when: "'was installed successfully' in galaxy_result.stdout"
|
||||
environment:
|
||||
ANSIBLE_FORCE_COLOR: false
|
||||
|
||||
when: roles_enabled|bool
|
||||
tags:
|
||||
- install_roles
|
||||
|
||||
- block:
|
||||
- name: detect collections/requirements.yml
|
||||
stat: path={{project_path|quote}}/collections/requirements.yml
|
||||
register: doesCollectionRequirementsExist
|
||||
- name: detect collections/requirements.yml
|
||||
stat: path={{project_path|quote}}/collections/requirements.yml
|
||||
register: doesCollectionRequirementsExist
|
||||
|
||||
- name: fetch galaxy collections from collections/requirements.yml
|
||||
command: ansible-galaxy collection install -r requirements.yml -p {{collections_destination|quote}}{{ ' -' + 'v' * ansible_verbosity if ansible_verbosity else '' }}
|
||||
args:
|
||||
chdir: "{{project_path|quote}}/collections"
|
||||
register: galaxy_collection_result
|
||||
when: doesCollectionRequirementsExist.stat.exists
|
||||
changed_when: "'Installing ' in galaxy_collection_result.stdout"
|
||||
environment:
|
||||
ANSIBLE_FORCE_COLOR: False
|
||||
ANSIBLE_COLLECTIONS_PATHS: "{{ collections_destination }}"
|
||||
- name: fetch galaxy collections from collections/requirements.yml
|
||||
command: ansible-galaxy collection install -r requirements.yml -p {{collections_destination|quote}}{{ ' -' + 'v' * ansible_verbosity if ansible_verbosity else '' }}
|
||||
args:
|
||||
chdir: "{{project_path|quote}}/collections"
|
||||
register: galaxy_collection_result
|
||||
when: doesCollectionRequirementsExist.stat.exists
|
||||
changed_when: "'Installing ' in galaxy_collection_result.stdout"
|
||||
environment:
|
||||
ANSIBLE_FORCE_COLOR: false
|
||||
ANSIBLE_COLLECTIONS_PATHS: "{{ collections_destination }}"
|
||||
|
||||
when:
|
||||
- "ansible_version.full is version_compare('2.8', '>=')"
|
||||
|
||||
@ -13,17 +13,17 @@
|
||||
tasks:
|
||||
- name: synchronize job environment with isolated host
|
||||
synchronize:
|
||||
copy_links: yes
|
||||
copy_links: true
|
||||
src: "{{ src }}"
|
||||
dest: "{{ dest }}"
|
||||
when: ansible_kubectl_config is not defined
|
||||
|
||||
- name: synchronize job environment with remote job container
|
||||
synchronize:
|
||||
copy_links: yes
|
||||
copy_links: true
|
||||
src: "{{ src }}"
|
||||
dest: "{{ dest }}"
|
||||
set_remote_user: no
|
||||
set_remote_user: false
|
||||
rsync_opts:
|
||||
- "--rsh=$RSH"
|
||||
environment:
|
||||
@ -51,4 +51,4 @@
|
||||
content: "{{secret}}"
|
||||
path: "{{src}}/env/ssh_key"
|
||||
when: key.stat.exists
|
||||
no_log: True
|
||||
no_log: true
|
||||
|
||||
@ -1,3 +1,4 @@
|
||||
---
|
||||
- hosts: all
|
||||
vars:
|
||||
scan_use_checksum: false
|
||||
@ -33,4 +34,3 @@
|
||||
get_checksum: '{{ scan_use_checksum }}'
|
||||
recursive: '{{ scan_use_recursive }}'
|
||||
when: scan_file_paths is defined and ansible_os_family == "Windows"
|
||||
|
||||
|
||||
@ -1,3 +1,4 @@
|
||||
---
|
||||
clouds:
|
||||
vexxhost:
|
||||
profile: vexxhost
|
||||
@ -19,6 +20,6 @@ clouds:
|
||||
password: stack
|
||||
project_name: stack
|
||||
ansible:
|
||||
use_hostnames: True
|
||||
expand_hostvars: False
|
||||
fail_on_errors: True
|
||||
use_hostnames: true
|
||||
expand_hostvars: false
|
||||
fail_on_errors: true
|
||||
|
||||
@ -1,3 +1,4 @@
|
||||
---
|
||||
- hosts: localhost
|
||||
gather_facts: false
|
||||
connection: local
|
||||
@ -7,28 +8,28 @@
|
||||
collection_version: 0.0.1 # not for updating, pass in extra_vars
|
||||
|
||||
tasks:
|
||||
- name: Do file content replacements for non-default namespace or package name
|
||||
block:
|
||||
- name: Find all module files
|
||||
find:
|
||||
paths: "{{ playbook_dir }}/plugins/modules"
|
||||
patterns: "*.py"
|
||||
register: module_files
|
||||
- name: Do file content replacements for non-default namespace or package name
|
||||
block:
|
||||
- name: Find all module files
|
||||
find:
|
||||
paths: "{{ playbook_dir }}/plugins/modules"
|
||||
patterns: "*.py"
|
||||
register: module_files
|
||||
|
||||
- name: Change files to support desired namespace and package names
|
||||
replace:
|
||||
path: "{{ item.path }}"
|
||||
regexp: '^extends_documentation_fragment: awx.awx.auth$'
|
||||
replace: 'extends_documentation_fragment: {{ collection_namespace }}.{{ collection_package }}.auth'
|
||||
with_items: "{{ module_files.files }}"
|
||||
- name: Change files to support desired namespace and package names
|
||||
replace:
|
||||
path: "{{ item.path }}"
|
||||
regexp: '^extends_documentation_fragment: awx.awx.auth$'
|
||||
replace: 'extends_documentation_fragment: {{ collection_namespace }}.{{ collection_package }}.auth'
|
||||
with_items: "{{ module_files.files }}"
|
||||
|
||||
- name: Change files to support desired namespace and package names
|
||||
replace:
|
||||
path: "{{ playbook_dir }}/plugins/inventory/tower.py"
|
||||
regexp: "^ NAME = 'awx.awx.tower' # REPLACE$"
|
||||
replace: " NAME = '{{ collection_namespace }}.{{ collection_package }}.tower' # REPLACE"
|
||||
when:
|
||||
- (collection_package != 'awx') or (collection_namespace != 'awx')
|
||||
- name: Change files to support desired namespace and package names
|
||||
replace:
|
||||
path: "{{ playbook_dir }}/plugins/inventory/tower.py"
|
||||
regexp: "^ NAME = 'awx.awx.tower' # REPLACE$"
|
||||
replace: " NAME = '{{ collection_namespace }}.{{ collection_package }}.tower' # REPLACE"
|
||||
when:
|
||||
- (collection_package != 'awx') or (collection_namespace != 'awx')
|
||||
|
||||
- name: Template the galaxy.yml file
|
||||
template: src={{ playbook_dir }}/galaxy.yml.j2 dest={{ playbook_dir }}/galaxy.yml
|
||||
- name: Template the galaxy.yml file
|
||||
template: src={{ playbook_dir }}/galaxy.yml.j2 dest={{ playbook_dir }}/galaxy.yml
|
||||
|
||||
@ -21,12 +21,12 @@ kind: Role
|
||||
metadata:
|
||||
name: pod-manager
|
||||
rules:
|
||||
- apiGroups: [""] # "" indicates the core API group
|
||||
resources: ["pods"]
|
||||
verbs: ["get", "list", "watch", "create", "update", "patch", "delete"]
|
||||
- apiGroups: [""]
|
||||
resources: ["pods/exec"]
|
||||
verbs: ["create"]
|
||||
- apiGroups: [""] # "" indicates the core API group
|
||||
resources: ["pods"]
|
||||
verbs: ["get", "list", "watch", "create", "update", "patch", "delete"]
|
||||
- apiGroups: [""]
|
||||
resources: ["pods/exec"]
|
||||
verbs: ["create"]
|
||||
|
||||
---
|
||||
kind: RoleBinding
|
||||
@ -34,8 +34,8 @@ apiVersion: rbac.authorization.k8s.io/v1beta1
|
||||
metadata:
|
||||
name: awx-pod-manager
|
||||
subjects:
|
||||
- kind: ServiceAccount
|
||||
name: awx
|
||||
- kind: ServiceAccount
|
||||
name: awx
|
||||
roleRef:
|
||||
apiGroup: rbac.authorization.k8s.io
|
||||
kind: Role
|
||||
|
||||
@ -3,5 +3,5 @@
|
||||
hosts: all
|
||||
gather_facts: false
|
||||
roles:
|
||||
- { role: image_build }
|
||||
- { role: image_push, when: "docker_registry is defined" }
|
||||
- {role: image_build}
|
||||
- {role: image_push, when: "docker_registry is defined"}
|
||||
|
||||
@ -2,8 +2,8 @@
|
||||
- name: Build and deploy AWX
|
||||
hosts: all
|
||||
roles:
|
||||
- { role: check_vars }
|
||||
- { role: image_build, when: "dockerhub_base is not defined" }
|
||||
- { role: image_push, when: "docker_registry is defined and dockerhub_base is not defined" }
|
||||
- { role: kubernetes, when: "openshift_host is defined or kubernetes_context is defined" }
|
||||
- { role: local_docker, when: "openshift_host is not defined and kubernetes_context is not defined" }
|
||||
- {role: check_vars}
|
||||
- {role: image_build, when: "dockerhub_base is not defined"}
|
||||
- {role: image_push, when: "docker_registry is defined and dockerhub_base is not defined"}
|
||||
- {role: kubernetes, when: "openshift_host is defined or kubernetes_context is defined"}
|
||||
- {role: local_docker, when: "openshift_host is not defined and kubernetes_context is not defined"}
|
||||
|
||||
@ -3,12 +3,12 @@
|
||||
- name: postgres_data_dir should be defined
|
||||
assert:
|
||||
that:
|
||||
- postgres_data_dir is defined and postgres_data_dir != ''
|
||||
- postgres_data_dir is defined and postgres_data_dir != ''
|
||||
msg: "Set the value of 'postgres_data_dir' in the inventory file."
|
||||
when: pg_hostname is not defined or pg_hostname == ''
|
||||
|
||||
- name: host_port should be defined
|
||||
assert:
|
||||
that:
|
||||
- host_port is defined and host_port != ''
|
||||
- host_port is defined and host_port != ''
|
||||
msg: "Set the value of 'host_port' in the inventory file."
|
||||
|
||||
@ -3,47 +3,47 @@
|
||||
- name: openshift_project should be defined
|
||||
assert:
|
||||
that:
|
||||
- openshift_project is defined and openshift_project != ''
|
||||
- openshift_project is defined and openshift_project != ''
|
||||
msg: "Set the value of 'openshift_project' in the inventory file."
|
||||
|
||||
- name: openshift_user should be defined
|
||||
assert:
|
||||
that:
|
||||
- openshift_user is defined and openshift_user != ''
|
||||
- openshift_user is defined and openshift_user != ''
|
||||
msg: "Set the value of 'openshift_user' in the inventory file."
|
||||
|
||||
- name: openshift_password or openshift_token should be defined
|
||||
assert:
|
||||
that:
|
||||
- (openshift_password is defined and openshift_password != '') or
|
||||
(openshift_token is defined and openshift_token != '')
|
||||
- (openshift_password is defined and openshift_password != '') or
|
||||
(openshift_token is defined and openshift_token != '')
|
||||
msg: "Set the value of 'openshift_password' or 'openshift_token' in the inventory file."
|
||||
|
||||
- name: docker_registry should be defined if not using dockerhub
|
||||
assert:
|
||||
that:
|
||||
- docker_registry is defined and docker_registry != ''
|
||||
- docker_registry is defined and docker_registry != ''
|
||||
msg: "Set the value of 'docker_registry' in the inventory file."
|
||||
when: dockerhub_base is not defined
|
||||
|
||||
- name: docker_registry_repository should be defined if not using dockerhub
|
||||
assert:
|
||||
that:
|
||||
- docker_registry_repository is defined and docker_registry_repository != ''
|
||||
- docker_registry_repository is defined and docker_registry_repository != ''
|
||||
msg: "Set the value of 'docker_registry_repository' in the inventory file."
|
||||
when: dockerhub_base is not defined
|
||||
|
||||
- name: docker_registry_username should be defined if not using dockerhub
|
||||
assert:
|
||||
that:
|
||||
- docker_registry_username is defined and docker_registry_username != ''
|
||||
- docker_registry_username is defined and docker_registry_username != ''
|
||||
msg: "Set the value of 'docker_registry_username' in the inventory file."
|
||||
when: dockerhub_base is not defined
|
||||
|
||||
- name: docker_registry_password should be defined
|
||||
assert:
|
||||
that:
|
||||
- docker_registry_password is defined and docker_registry_password != ''
|
||||
- docker_registry_password is defined and docker_registry_password != ''
|
||||
msg: "Set the value of 'docker_registry_password' in the inventory file."
|
||||
when: dockerhub_base is not defined
|
||||
|
||||
|
||||
@ -37,7 +37,7 @@
|
||||
shell: make clean
|
||||
args:
|
||||
chdir: ..
|
||||
ignore_errors: yes
|
||||
ignore_errors: true
|
||||
when: not sdist.stat.exists
|
||||
delegate_to: localhost
|
||||
|
||||
@ -46,7 +46,7 @@
|
||||
build:
|
||||
path: "{{ role_path }}/files"
|
||||
dockerfile: Dockerfile.sdist
|
||||
pull: no
|
||||
pull: false
|
||||
args:
|
||||
http_proxy: "{{ http_proxy | default('') }}"
|
||||
https_proxy: "{{ https_proxy | default('') }}"
|
||||
@ -178,7 +178,7 @@
|
||||
build:
|
||||
path: "{{ docker_base_path }}"
|
||||
dockerfile: Dockerfile
|
||||
pull: no
|
||||
pull: false
|
||||
args:
|
||||
http_proxy: "{{ http_proxy | default('') }}"
|
||||
https_proxy: "{{ https_proxy | default('') }}"
|
||||
@ -186,7 +186,7 @@
|
||||
name: "{{ web_image }}"
|
||||
tag: "{{ awx_version }}"
|
||||
source: 'build'
|
||||
force_source: yes
|
||||
force_source: true
|
||||
delegate_to: localhost
|
||||
|
||||
- name: Build base task image
|
||||
@ -194,7 +194,7 @@
|
||||
build:
|
||||
path: "{{ docker_base_path }}"
|
||||
dockerfile: Dockerfile.task
|
||||
pull: no
|
||||
pull: false
|
||||
args:
|
||||
http_proxy: "{{ http_proxy | default('') }}"
|
||||
https_proxy: "{{ https_proxy | default('') }}"
|
||||
@ -202,7 +202,7 @@
|
||||
name: "{{ task_image }}"
|
||||
tag: "{{ awx_version }}"
|
||||
source: 'build'
|
||||
force_source: yes
|
||||
force_source: true
|
||||
delegate_to: localhost
|
||||
|
||||
- name: Tag task and web images as latest
|
||||
|
||||
@ -4,7 +4,7 @@
|
||||
registry: "{{ docker_registry }}"
|
||||
username: "{{ docker_registry_username }}"
|
||||
password: "{{ docker_registry_password | quote }}"
|
||||
reauthorize: yes
|
||||
reauthorize: true
|
||||
when: docker_registry is defined and docker_registry_password is defined
|
||||
delegate_to: localhost
|
||||
|
||||
@ -30,7 +30,7 @@
|
||||
name: "{{ web_image }}"
|
||||
repository: "{{ docker_registry }}/{{ docker_registry_repository }}/{{ web_image }}"
|
||||
tag: "{{ item }}"
|
||||
push: yes
|
||||
push: true
|
||||
with_items:
|
||||
- "latest"
|
||||
- "{{ awx_version }}"
|
||||
@ -40,7 +40,7 @@
|
||||
name: "{{ task_image }}"
|
||||
repository: "{{ docker_registry }}/{{ docker_registry_repository }}/{{ task_image }}"
|
||||
tag: "{{ item }}"
|
||||
push: yes
|
||||
push: true
|
||||
with_items:
|
||||
- "latest"
|
||||
- "{{ awx_version }}"
|
||||
|
||||
@ -1,6 +1,6 @@
|
||||
---
|
||||
dockerhub_version: "{{ lookup('file', playbook_dir + '/../VERSION') }}"
|
||||
create_preload_data: True
|
||||
create_preload_data: true
|
||||
|
||||
admin_user: 'admin'
|
||||
admin_email: 'root@localhost'
|
||||
@ -36,7 +36,7 @@ kubernetes_rabbitmq_image: "ansible/awx_rabbitmq"
|
||||
kubernetes_memcached_version: "latest"
|
||||
kubernetes_memcached_image: "memcached"
|
||||
|
||||
openshift_pg_emptydir: no
|
||||
openshift_pg_emptydir: false
|
||||
openshift_pg_pvc_name: postgresql
|
||||
|
||||
kubernetes_deployment_name: awx
|
||||
@ -55,6 +55,6 @@ custom_venvs_path: "/opt/custom-venvs"
|
||||
custom_venvs_python: "python2"
|
||||
|
||||
ca_trust_bundle: "/etc/pki/tls/certs/ca-bundle.crt"
|
||||
rabbitmq_use_ssl: False
|
||||
rabbitmq_use_ssl: false
|
||||
|
||||
container_groups_image: "ansible/ansible-runner"
|
||||
|
||||
@ -55,7 +55,7 @@
|
||||
--port={{ pg_port | default('5432') }} \
|
||||
--username='{{ pg_username }}' \
|
||||
--dbname='{{ pg_database }}'" > {{ playbook_dir }}/tower-openshift-backup-{{ now }}/tower.db
|
||||
no_log: yes
|
||||
no_log: true
|
||||
|
||||
- name: Copy inventory into backup directory
|
||||
copy:
|
||||
|
||||
@ -2,7 +2,7 @@
|
||||
- name: Get Namespace Detail
|
||||
shell: "kubectl get namespace {{ kubernetes_namespace }}"
|
||||
register: namespace_details
|
||||
ignore_errors: yes
|
||||
ignore_errors: true
|
||||
|
||||
- name: Create AWX Kubernetes Project
|
||||
shell: "kubectl create namespace {{ kubernetes_namespace }}"
|
||||
@ -12,4 +12,3 @@
|
||||
set_fact:
|
||||
postgresql_service_name: "{{ kubernetes_deployment_name }}-postgresql"
|
||||
when: "pg_hostname is not defined or pg_hostname == ''"
|
||||
|
||||
|
||||
@ -32,7 +32,7 @@
|
||||
{{ kubernetes_deployment_name }} \
|
||||
-n {{ kubernetes_namespace }} -o=jsonpath='{.status.replicas}'
|
||||
register: deployment_details
|
||||
ignore_errors: yes
|
||||
ignore_errors: true
|
||||
|
||||
- name: Set expected post-deployment Replicas value
|
||||
set_fact:
|
||||
@ -48,7 +48,7 @@
|
||||
- name: Get Postgres Service Detail
|
||||
shell: "{{ kubectl_or_oc }} describe svc {{ postgresql_service_name }} -n {{ kubernetes_namespace }}"
|
||||
register: postgres_svc_details
|
||||
ignore_errors: yes
|
||||
ignore_errors: true
|
||||
when: "pg_hostname is not defined or pg_hostname == ''"
|
||||
|
||||
- name: Deploy PostgreSQL (OpenShift)
|
||||
@ -72,7 +72,7 @@
|
||||
-e POSTGRESQL_VERSION=10 \
|
||||
-n {{ kubernetes_namespace }}
|
||||
register: openshift_pg_activate
|
||||
no_log: yes
|
||||
no_log: true
|
||||
when:
|
||||
- pg_hostname is not defined or pg_hostname == ''
|
||||
- postgres_svc_details is defined and postgres_svc_details.rc != 0
|
||||
@ -83,7 +83,7 @@
|
||||
- name: Template PostgreSQL Deployment (Kubernetes)
|
||||
set_fact:
|
||||
pg_values: "{{ lookup('template', 'postgresql-values.yml.j2') }}"
|
||||
no_log: yes
|
||||
no_log: true
|
||||
|
||||
- name: Deploy and Activate Postgres (Kubernetes)
|
||||
shell: |
|
||||
@ -95,7 +95,7 @@
|
||||
--values - \
|
||||
stable/postgresql
|
||||
register: kubernetes_pg_activate
|
||||
no_log: yes
|
||||
no_log: true
|
||||
when:
|
||||
- pg_hostname is not defined or pg_hostname == ''
|
||||
- postgres_svc_details is defined and postgres_svc_details.rc != 0
|
||||
@ -206,7 +206,7 @@
|
||||
- 'configmap'
|
||||
- 'deployment'
|
||||
- 'secret'
|
||||
no_log: yes
|
||||
no_log: true
|
||||
|
||||
- name: Apply Deployment
|
||||
shell: |
|
||||
@ -215,7 +215,7 @@
|
||||
- "{{ configmap }}"
|
||||
- "{{ deployment }}"
|
||||
- "{{ secret }}"
|
||||
no_log: yes
|
||||
no_log: true
|
||||
|
||||
- name: Delete any existing management pod
|
||||
shell: |
|
||||
@ -249,21 +249,21 @@
|
||||
{{ kubectl_or_oc }} -n {{ kubernetes_namespace }} exec ansible-tower-management -- \
|
||||
bash -c "echo 'from django.contrib.auth.models import User; nsu = User.objects.filter(is_superuser=True).count(); exit(0 if nsu > 0 else 1)' | awx-manage shell"
|
||||
register: super_check
|
||||
ignore_errors: yes
|
||||
ignore_errors: true
|
||||
changed_when: super_check.rc > 0
|
||||
|
||||
- name: create django super user if it does not exist
|
||||
shell: |
|
||||
{{ kubectl_or_oc }} -n {{ kubernetes_namespace }} exec ansible-tower-management -- \
|
||||
bash -c "echo \"from django.contrib.auth.models import User; User.objects.create_superuser('{{ admin_user }}', '{{ admin_email }}', '{{ admin_password }}')\" | awx-manage shell"
|
||||
no_log: yes
|
||||
no_log: true
|
||||
when: super_check.rc > 0
|
||||
|
||||
- name: update django super user password
|
||||
shell: |
|
||||
{{ kubectl_or_oc }} -n {{ kubernetes_namespace }} exec ansible-tower-management -- \
|
||||
bash -c "awx-manage update_password --username='{{ admin_user }}' --password='{{ admin_password }}'"
|
||||
no_log: yes
|
||||
no_log: true
|
||||
register: result
|
||||
changed_when: "'Password updated' in result.stdout"
|
||||
|
||||
|
||||
@ -2,7 +2,7 @@
|
||||
- name: Get Project Detail
|
||||
shell: "{{ openshift_oc_bin }} get project {{ openshift_project }}"
|
||||
register: project_details
|
||||
ignore_errors: yes
|
||||
ignore_errors: true
|
||||
|
||||
- name: Create AWX Openshift Project
|
||||
shell: "{{ openshift_oc_bin }} new-project {{ openshift_project }}"
|
||||
@ -13,7 +13,7 @@
|
||||
- name: Check PVC status
|
||||
command: "{{ openshift_oc_bin }} get pvc {{ openshift_pg_pvc_name }} -n {{ openshift_project }} -o=jsonpath='{.status.phase}'"
|
||||
register: pg_pvc_status
|
||||
ignore_errors: yes
|
||||
ignore_errors: true
|
||||
|
||||
- name: Ensure PostgreSQL PVC is available
|
||||
assert:
|
||||
|
||||
@ -54,4 +54,3 @@
|
||||
fail:
|
||||
msg: "{{ openshift_auth_result.stderr | default('Invalid token') }}"
|
||||
when: openshift_auth_result.rc is defined and openshift_auth_result.rc != 0
|
||||
|
||||
|
||||
@ -18,4 +18,3 @@ memcached_image: "memcached"
|
||||
memcached_version: "alpine"
|
||||
memcached_hostname: "memcached"
|
||||
memcached_port: "11211"
|
||||
|
||||
|
||||
@ -17,7 +17,7 @@
|
||||
file:
|
||||
state: directory
|
||||
path: "{{ item }}"
|
||||
recurse: yes
|
||||
recurse: true
|
||||
when: upgrade_postgres | bool
|
||||
with_items:
|
||||
- "{{ postgres_data_dir }}/10/data"
|
||||
@ -25,7 +25,7 @@
|
||||
- name: Stop AWX before upgrading postgres
|
||||
docker_service:
|
||||
project_src: "{{ docker_compose_dir }}"
|
||||
stopped: yes
|
||||
stopped: true
|
||||
when: upgrade_postgres | bool
|
||||
|
||||
- name: Upgrade Postgres
|
||||
|
||||
@ -19,3 +19,11 @@ exclude=.tox,venv,awx/lib/site-packages,awx/plugins/inventory/ec2.py,awx/plugins
|
||||
max-line-length=160
|
||||
ignore=E201,E203,E221,E225,E231,E241,E251,E261,E265,E303,W291,W391,W293,E731,W504
|
||||
exclude=.tox,venv,awx/lib/site-packages,awx/plugins/inventory,awx/ui,awx/api/urls.py,awx/main/migrations,awx/main/tests/data,node_modules/,awx/projects/,tools/docker,awx/settings/local_*.py,installer/openshift/settings.py,build/,installer/,awxkit/test,awx_collection/
|
||||
|
||||
[testenv:linters]
|
||||
deps =
|
||||
flake8
|
||||
yamllint
|
||||
commands =
|
||||
- flake8
|
||||
- yamllint -s .
|
||||
|
||||
@ -49,12 +49,12 @@
|
||||
- name: Get Project Detail
|
||||
shell: "oc get project {{ awx_dev_project }}"
|
||||
register: project_details
|
||||
ignore_errors: yes
|
||||
ignore_errors: true
|
||||
|
||||
- name: Get Postgres Service Detail
|
||||
shell: "oc describe svc postgresql -n {{ awx_dev_project }}"
|
||||
register: postgres_svc_details
|
||||
ignore_errors: yes
|
||||
ignore_errors: true
|
||||
|
||||
- name: Create AWX Openshift Project
|
||||
shell: "oc new-project {{ awx_dev_project }}"
|
||||
|
||||
@ -8,4 +8,4 @@
|
||||
- awx_task_cpu_request: 500
|
||||
- awx_task_mem_request: 512
|
||||
roles:
|
||||
- { role: minishift }
|
||||
- {role: minishift}
|
||||
|
||||
@ -1,3 +1,4 @@
|
||||
---
|
||||
version: '2'
|
||||
services:
|
||||
haproxy:
|
||||
|
||||
@ -1,3 +1,4 @@
|
||||
---
|
||||
version: '2'
|
||||
services:
|
||||
# Primary AWX Development Container
|
||||
@ -48,7 +49,7 @@ services:
|
||||
image: postgres:10
|
||||
container_name: tools_postgres_1
|
||||
ports:
|
||||
- "5432:5432"
|
||||
- "5432:5432"
|
||||
memcached:
|
||||
image: memcached:alpine
|
||||
container_name: tools_memcached_1
|
||||
|
||||
@ -1,3 +1,4 @@
|
||||
---
|
||||
version: '2'
|
||||
services:
|
||||
# Primary Tower Development Container link
|
||||
@ -21,7 +22,8 @@ services:
|
||||
environment:
|
||||
DATABASE_URL: postgres://postgres@postgres/postgres
|
||||
CONJUR_DATA_KEY: 'dveUwOI/71x9BPJkIgvQRRBF3SdASc+HP4CUGL7TKvM='
|
||||
depends_on: [ postgres ]
|
||||
depends_on:
|
||||
- postgres
|
||||
links:
|
||||
- postgres
|
||||
ports:
|
||||
|
||||
@ -1,3 +1,4 @@
|
||||
---
|
||||
version: '2'
|
||||
services:
|
||||
# Primary Tower Development Container link
|
||||
|
||||
@ -1,3 +1,4 @@
|
||||
---
|
||||
# Structure for the Elastic Stack docker configuration came from docker-elk:
|
||||
# https://github.com/deviantony/docker-elk
|
||||
# docker-elk is under the MIT License,
|
||||
@ -15,8 +16,8 @@ services:
|
||||
- "9300:9300"
|
||||
environment:
|
||||
ES_JAVA_OPTS: "-Xms1g -Xmx1g"
|
||||
# networks: # add back in when a connection to tower_tools is possible
|
||||
# - docker_elk
|
||||
# networks: # add back in when a connection to tower_tools is possible
|
||||
# - docker_elk
|
||||
|
||||
logstash:
|
||||
build: elastic/logstash/
|
||||
|
||||
@ -1,3 +1,4 @@
|
||||
---
|
||||
version: '2'
|
||||
services:
|
||||
# Tower Development Cluster
|
||||
@ -9,4 +10,4 @@ services:
|
||||
- logstash
|
||||
tower_3:
|
||||
links:
|
||||
- logstash
|
||||
- logstash
|
||||
|
||||
@ -1,3 +1,4 @@
|
||||
---
|
||||
version: '2'
|
||||
services:
|
||||
# Primary Tower Development Container
|
||||
|
||||
@ -1,3 +1,4 @@
|
||||
---
|
||||
# Kibana is served by a back end server. This setting specifies the port to use.
|
||||
server.port: 5601
|
||||
|
||||
@ -89,4 +90,4 @@ elasticsearch.url: "http://elasticsearch:9200"
|
||||
|
||||
# Set the interval in milliseconds to sample system and process performance
|
||||
# metrics. Minimum is 100ms. Defaults to 10000.
|
||||
# ops.interval: 10000
|
||||
# ops.interval: 10000
|
||||
|
||||
@ -1,21 +1,22 @@
|
||||
---
|
||||
# prometheus.yml
|
||||
# my global config
|
||||
global:
|
||||
scrape_interval: 15s # Set the scrape interval to every 15 seconds. Default is every 1 minute.
|
||||
evaluation_interval: 15s # Evaluate rules every 15 seconds. The default is every 1 minute.
|
||||
# scrape_timeout is set to the global default (10s).
|
||||
scrape_interval: 15s # Set the scrape interval to every 15 seconds. Default is every 1 minute.
|
||||
evaluation_interval: 15s # Evaluate rules every 15 seconds. The default is every 1 minute.
|
||||
# scrape_timeout is set to the global default (10s).
|
||||
|
||||
# Alertmanager configuration
|
||||
alerting:
|
||||
alertmanagers:
|
||||
- static_configs:
|
||||
- targets:
|
||||
- static_configs:
|
||||
- targets:
|
||||
# - alertmanager:9093
|
||||
|
||||
# Load rules once and periodically evaluate them according to the global 'evaluation_interval'.
|
||||
rule_files:
|
||||
# - "first_rules.yml"
|
||||
# - "second_rules.yml"
|
||||
# - "first_rules.yml"
|
||||
# - "second_rules.yml"
|
||||
|
||||
|
||||
# A scrape configuration containing exactly one endpoint to scrape:
|
||||
@ -23,14 +24,14 @@ rule_files:
|
||||
scrape_configs:
|
||||
# The job name is added as a label `job=<job_name>` to any timeseries scraped from this config.
|
||||
- job_name: 'prometheus'
|
||||
# metrics_path defaults to '/metrics'
|
||||
# scheme defaults to 'http'.
|
||||
# metrics_path defaults to '/metrics'
|
||||
# scheme defaults to 'http'.
|
||||
static_configs:
|
||||
- targets: ['127.0.0.1:9090']
|
||||
- targets: ['127.0.0.1:9090']
|
||||
|
||||
- job_name: 'awx'
|
||||
tls_config:
|
||||
insecure_skip_verify: True
|
||||
insecure_skip_verify: true
|
||||
metrics_path: /api/v2/metrics
|
||||
scrape_interval: 5s
|
||||
scheme: http
|
||||
@ -42,4 +43,4 @@ scrape_configs:
|
||||
# bearer_token: oauth-token
|
||||
static_configs:
|
||||
- targets:
|
||||
- awxweb:8013
|
||||
- awxweb:8013
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user