Add plugin to cleanly manage possibly versioned project archives

Signed-off-by: Philip Douglass <philip.douglass@amadeus.com>
This commit is contained in:
Philip DOUGLASS 2020-08-16 14:47:23 -04:00 committed by Philip Douglass
parent 6720cd9bda
commit f72b777b07
3 changed files with 157 additions and 50 deletions

View File

@ -0,0 +1,82 @@
from __future__ import absolute_import, division, print_function
__metaclass__ = type
import zipfile
import tarfile
import os
from ansible.plugins.action import ActionBase
from ansible.utils.display import Display
display = Display()
class ActionModule(ActionBase):
def run(self, tmp=None, task_vars=None):
self._supports_check_mode = False
result = super(ActionModule, self).run(tmp, task_vars)
src = self._task.args.get("src")
proj_path = self._task.args.get("project_path")
force = self._task.args.get("force", False)
try:
archive = zipfile.ZipFile(src)
get_filenames = archive.namelist
get_members = archive.infolist
except zipfile.BadZipFile:
archive = tarfile.open(src)
get_filenames = archive.getnames
get_members = archive.getmembers
except tarfile.ReadError:
result["failed"] = True
result["msg"] = "{0} is not a valid archive".format(src)
return result
# Most well formed archives contain a single root directory, typically named
# project-name-1.0.0. The project contents should be inside that directory.
start_index = 0
root_contents = set(
[filename.split(os.path.sep)[0] for filename in get_filenames()]
)
if len(root_contents) == 1:
start_index = len(list(root_contents)[0]) + 1
for member in get_members():
try:
filename = member.filename
except AttributeError:
filename = member.name
# Skip the archive base directory
if not filename[start_index:]:
continue
dest = os.path.join(proj_path, filename[start_index:])
if not force and os.path.exists(dest):
continue
try:
is_dir = member.is_dir()
except AttributeError:
is_dir = member.isdir()
if is_dir:
os.makedirs(dest, exist_ok=True)
else:
try:
member_f = archive.open(member)
except TypeError:
member_f = tarfile.ExFileObject(archive, member)
with open(dest, "wb") as f:
f.write(member_f.read())
member_f.close()
archive.close()
result["changed"] = True
return result

View File

@ -0,0 +1,40 @@
ANSIBLE_METADATA = {
"metadata_version": "1.0",
"status": ["stableinterface"],
"supported_by": "community",
}
DOCUMENTATION = """
---
module: project_archive
short_description: unpack a project archive
description:
- Unpacks an archive that contains a project, in order to support handling versioned
artifacts from (for example) GitHub Releases or Artifactory builds.
- Handles projects in the archive root, or in a single base directory of the archive.
version_added: "2.9"
options:
src:
description:
- The source archive of the project artifact
required: true
project_path:
description:
- Directory to write the project archive contents
required: true
force:
description:
- Files in the project_path will be overwritten by matching files in the archive
default: False
author:
- "Philip Douglass" @philipsd6
"""
EXAMPLES = """
- project_archive:
src: "{{ project_path }}/.archive/project.tar.gz"
project_path: "{{ project_path }}"
force: "{{ scm_clean }}"
"""

View File

@ -102,61 +102,46 @@
- update_insights
- block:
- name: Ensure the project directory is present
file:
dest: "{{ project_path|quote }}"
state: directory
- name: Ensure the project archive directory is present
file:
dest: "{{ project_path|quote }}/.archive"
state: directory
- name: Get archive from url
get_url:
url: "{{ scm_url|quote }}"
dest: "{{ project_path|quote }}"
url_username: "{{ scm_username|default(omit) }}"
url_password: "{{ scm_password|default(omit) }}"
force_basic_auth: yes
force: "{{ scm_clean }}"
register: get_url_result
- name: Get archive from url
get_url:
url: "{{ scm_url|quote }}"
dest: "{{ project_path|quote }}/.archive/"
url_username: "{{ scm_username|default(omit) }}"
url_password: "{{ scm_password|default(omit) }}"
force_basic_auth: true
register: get_archive
- name: Unpack archive
unarchive:
src: "{{ get_url_result.dest }}"
dest: "{{ project_path|quote }}"
keep_newer: "{{ not scm_clean }}"
list_files: yes
register: unarchived
- name: Unpack archive
project_archive:
src: "{{ get_archive.dest }}"
project_path: "{{ project_path|quote }}"
force: "{{ scm_clean }}"
when: get_archive.changed or scm_clean
register: unarchived
- set_fact:
archive_root_dirs: "{{ archive_root_dirs |default([]) |union(item.split('/')[0:1]) }}"
loop: "{{ unarchived.files }}"
- name: Find previous archives
find:
paths: "{{ project_path|quote }}/.archive/"
excludes:
- "{{ get_archive.dest|basename }}"
when: unarchived.changed
register: previous_archive
- block:
- name: Delete unarchived single root directory
file:
dest: "{{ project_path|quote }}/{{ archive_root_dirs[0] }}"
state: absent
- name: Link single root directory to project directory
file:
src: "{{ project_path|quote }}"
dest: "{{ project_path|quote }}/{{ archive_root_dirs[0] }}"
state: link
- name: Unpack archive
unarchive:
src: "{{ get_url_result.dest }}"
dest: "{{ project_path|quote }}"
keep_newer: "{{ not scm_clean }}"
- name: Delete link
file:
dest: "{{ project_path|quote }}/{{ archive_root_dirs[0] }}"
state: absent
when: archive_root_dirs |length == 1
- name: Set scm_version to archive sha1 checksum
set_fact:
scm_version: "{{ get_url_result.checksum_src }}"
- name: Remove previous archives
file:
path: "{{ item.path }}"
state: absent
loop: "{{ previous_archive.files }}"
when: previous_archive.files|default([])
- name: Set scm_version to archive sha1 checksum
set_fact:
scm_version: "{{ get_archive.checksum_src }}"
tags:
- update_archive