Label k8s apps, adjust collect/upload info steps

- Drop debugs from collect-info playbook
- Drop sudo from collect-info step and add target dir var (required for travis jobs)
- Label all k8s apps, including static manifests
- Add logs for K8s apps to be collected as well
- Fix upload to GCS as a public-read tarball

Signed-off-by: Bogdan Dobrelya <bdobrelia@mirantis.com>
This commit is contained in:
Bogdan Dobrelya
2016-11-09 14:15:27 +01:00
parent 57e467c03c
commit cf7c60029b
10 changed files with 106 additions and 39 deletions

View File

@@ -0,0 +1,11 @@
[Credentials]
gs_access_key_id = {{ gs_key }}
gs_secret_access_key = {{ gs_skey }}
[Boto]
https_validate_certificates = True
[GoogleCompute]
[GSUtil]
default_project_id = {{ gce_project_id }}
content_language = en
default_api_version = 2
[OAuth2]

View File

@@ -3,7 +3,7 @@
[
{
"action": {"type": "Delete"},
"condition": {"age": 2}
"condition": {"age": {{expire_days}}}
}
]
}

View File

@@ -3,65 +3,73 @@
become: false
gather_facts: no
vars:
expire_days: 2
tasks:
- name: Generate uniq bucket name prefix
shell: date +%s | sha256sum | base64 | head -c 32
shell: date +%Y%m%d
register: out
- name: replace_test_id
set_fact:
test_name: "kargo-{{ commit }}-{{ pr }}-{{ out.stdout|lower }}-{{ test_id | regex_replace('\\.', '-') }}"
test_name: "kargo-ci-{{ out.stdout }}"
- set_fact:
file_name: "{{ostype}}-{{kube_network_plugin}}-{{commit}}-logs.tar.gz"
- name: Create a bucket
gc_storage:
bucket: "{{ test_name }}"
mode: create
permission: private
permission: public-read
gs_access_key: "{{ gs_key }}"
gs_secret_key: "{{ gs_skey }}"
no_log: True
- name: Create a lifecycle template for the bucket
template:
src: gcs_life.json.j2
dest: "{{dir}}/gcs_life.json"
- name: Create a boto config to access GCS
template:
src: boto.j2
dest: "{{dir}}/.boto"
no_log: True
- name: Download gsutil cp installer
get_url:
url: https://dl.google.com/dl/cloudsdk/channels/rapid/install_google_cloud_sdk.bash
dest: /tmp/gcp-installer.sh
dest: "{{dir}}/gcp-installer.sh"
- name: Get gsutil tool
script: /tmp/gcp-installer.sh
script: "{{dir}}/gcp-installer.sh"
environment:
CLOUDSDK_CORE_DISABLE_PROMPTS: 1
CLOUDSDK_INSTALL_DIR: "{{dir}}"
no_log: True
- name: Create a lifecycle template for the bucket
file: src=gcs_life.json path=/tmp/gcs_life.json
- name: Hack the boto config for GCS access keys
lineinfile:
dest: .boto
line: "gs_access_key_id = {{ gs_key }}"
regexp: "^#gs_access_key_id = .*$"
no_log: True
- name: Hack the boto config for GCS secret access keys
lineinfile:
dest: .boto
line: "gs_secret_access_key = {{ gs_skey }}"
regexp: "^#gs_secret_access_key = .*$"
no_log: True
ignore_errors: true
- name: Apply the lifecycle rules
shell: bash google-cloud-sdk/bin/gsutil lifecycle set /tmp/gcs_life.json gs://{{ test_name }}
command: "{{dir}}/google-cloud-sdk/bin/gsutil lifecycle set {{dir}}/gcs_life.json gs://{{test_name}}"
environment:
BOTO_CONFIG: .boto
BOTO_CONFIG: "{{dir}}/.boto"
no_log: True
- name: Upload collected diagnostic info
gc_storage:
bucket: "{{ test_name }}"
mode: put
permission: private
object: "build-{{ ostype }}-{{ kube_network_plugin }}-logs.tar.gz"
src: logs.tar.gz
permission: public-read
object: "{{ file_name }}"
src: "{{dir}}/logs.tar.gz"
headers: '{"Content-Encoding": "x-gzip"}'
gs_access_key: "{{ gs_key }}"
gs_secret_key: "{{ gs_skey }}"
expiration: "{{expire_days * 36000|int}}"
ignore_errors: true
no_log: True
- debug:
msg: "A public url https://storage.googleapis.com/{{test_name}}/{{file_name}}"