mirror of
https://github.com/kubernetes-sigs/kubespray.git
synced 2026-01-14 19:30:42 -03:30
Merge pull request #11891 from VannTen/download_graphql
Overhaul of the python hashes updater
This commit is contained in:
commit
a5142e7dfd
35
scripts/component_hash_update/pyproject.toml
Normal file
35
scripts/component_hash_update/pyproject.toml
Normal file
@ -0,0 +1,35 @@
|
||||
[build-system]
|
||||
requires = ["setuptools >= 61.0",
|
||||
"setuptools_scm >= 8.0",
|
||||
]
|
||||
build-backend = "setuptools.build_meta"
|
||||
|
||||
[project]
|
||||
name = "kubespray_component_hash_update"
|
||||
version = "1.0.0"
|
||||
dependencies = [
|
||||
"more_itertools",
|
||||
"ruamel.yaml",
|
||||
"requests",
|
||||
"packaging",
|
||||
]
|
||||
|
||||
requires-python = ">= 3.10"
|
||||
|
||||
authors = [
|
||||
{ name = "Craig Rodrigues", email = "rodrigc@crodrigues.org" },
|
||||
{ name = "Simon Wessel" },
|
||||
{ name = "Max Gautier", email = "mg@max.gautier.name" },
|
||||
]
|
||||
maintainers = [
|
||||
{ name = "The Kubespray maintainers" },
|
||||
]
|
||||
|
||||
description = "Download or compute hashes for new versions of components deployed by Kubespray"
|
||||
|
||||
classifiers = [
|
||||
"License :: OSI Approved :: Apache-2.0",
|
||||
]
|
||||
|
||||
[project.scripts]
|
||||
update-hashes = "component_hash_update.download:main"
|
||||
@ -0,0 +1,94 @@
|
||||
"""
|
||||
Static download metadata for components updated by the update-hashes command.
|
||||
"""
|
||||
|
||||
infos = {
|
||||
"calicoctl_binary": {
|
||||
"url": "https://github.com/projectcalico/calico/releases/download/v{version}/SHA256SUMS",
|
||||
"graphql_id": "R_kgDOA87D0g",
|
||||
},
|
||||
"ciliumcli_binary": {
|
||||
"url": "https://github.com/cilium/cilium-cli/releases/download/v{version}/cilium-{os}-{arch}.tar.gz.sha256sum",
|
||||
"graphql_id": "R_kgDOE0nmLg",
|
||||
},
|
||||
"cni_binary": {
|
||||
"url": "https://github.com/containernetworking/plugins/releases/download/v{version}/cni-plugins-{os}-{arch}-v{version}.tgz.sha256",
|
||||
"graphql_id": "R_kgDOBQqEpg",
|
||||
},
|
||||
"containerd_archive": {
|
||||
"url": "https://github.com/containerd/containerd/releases/download/v{version}/containerd-{version}-{os}-{arch}.tar.gz.sha256sum",
|
||||
"graphql_id": "R_kgDOAr9FWA",
|
||||
},
|
||||
"cri_dockerd_archive": {
|
||||
"binary": True,
|
||||
"url": "https://github.com/Mirantis/cri-dockerd/releases/download/v{version}/cri-dockerd-{version}.{arch}.tgz",
|
||||
"graphql_id": "R_kgDOEvvLcQ",
|
||||
},
|
||||
"crictl": {
|
||||
"url": "https://github.com/kubernetes-sigs/cri-tools/releases/download/v{version}/crictl-v{version}-{os}-{arch}.tar.gz.sha256",
|
||||
"graphql_id": "R_kgDOBMdURA",
|
||||
},
|
||||
"crio_archive": {
|
||||
"url": "https://storage.googleapis.com/cri-o/artifacts/cri-o.{arch}.v{version}.tar.gz.sha256sum",
|
||||
"graphql_id": "R_kgDOBAr5pg",
|
||||
},
|
||||
"crun": {
|
||||
"url": "https://github.com/containers/crun/releases/download/{version}/crun-{version}-linux-{arch}",
|
||||
"binary": True,
|
||||
"graphql_id": "R_kgDOBip3vA",
|
||||
},
|
||||
"etcd_binary": {
|
||||
"url": "https://github.com/etcd-io/etcd/releases/download/v{version}/SHA256SUMS",
|
||||
"graphql_id": "R_kgDOAKtHtg",
|
||||
},
|
||||
"gvisor_containerd_shim_binary": {
|
||||
"url": "https://storage.googleapis.com/gvisor/releases/release/{version}/{alt_arch}/containerd-shim-runsc-v1.sha512",
|
||||
"hashtype": "sha512",
|
||||
"tags": True,
|
||||
"graphql_id": "R_kgDOB9IlXg",
|
||||
},
|
||||
"gvisor_runsc_binary": {
|
||||
"url": "https://storage.googleapis.com/gvisor/releases/release/{version}/{alt_arch}/runsc.sha512",
|
||||
"hashtype": "sha512",
|
||||
"tags": True,
|
||||
"graphql_id": "R_kgDOB9IlXg",
|
||||
},
|
||||
"kata_containers_binary": {
|
||||
"url": "https://github.com/kata-containers/kata-containers/releases/download/{version}/kata-static-{version}-{arch}.tar.xz",
|
||||
"binary": True,
|
||||
"graphql_id": "R_kgDOBsJsHQ",
|
||||
},
|
||||
"kubeadm": {
|
||||
"url": "https://dl.k8s.io/release/v{version}/bin/linux/{arch}/kubeadm.sha256",
|
||||
"graphql_id": "R_kgDOAToIkg",
|
||||
},
|
||||
"kubectl": {
|
||||
"url": "https://dl.k8s.io/release/v{version}/bin/linux/{arch}/kubectl.sha256",
|
||||
"graphql_id": "R_kgDOAToIkg",
|
||||
},
|
||||
"kubelet": {
|
||||
"url": "https://dl.k8s.io/release/v{version}/bin/linux/{arch}/kubelet.sha256",
|
||||
"graphql_id": "R_kgDOAToIkg",
|
||||
},
|
||||
"nerdctl_archive": {
|
||||
"url": "https://github.com/containerd/nerdctl/releases/download/v{version}/SHA256SUMS",
|
||||
"graphql_id": "R_kgDOEvuRnQ",
|
||||
},
|
||||
"runc": {
|
||||
"url": "https://github.com/opencontainers/runc/releases/download/v{version}/runc.sha256sum",
|
||||
"graphql_id": "R_kgDOAjP4QQ",
|
||||
},
|
||||
"skopeo_binary": {
|
||||
"url": "https://github.com/lework/skopeo-binary/releases/download/v{version}/skopeo-{os}-{arch}.sha256",
|
||||
"graphql_id": "R_kgDOHQ6J9w",
|
||||
},
|
||||
"youki": {
|
||||
"url": "https://github.com/youki-dev/youki/releases/download/v{version}/youki-{version}-{alt_arch}-gnu.tar.gz",
|
||||
"binary": True,
|
||||
"graphql_id": "R_kgDOFPvgPg",
|
||||
},
|
||||
"yq": {
|
||||
"url": "https://github.com/mikefarah/yq/releases/download/v{version}/checksums-bsd", # see https://github.com/mikefarah/yq/pull/1691 for why we use this url
|
||||
"graphql_id": "R_kgDOApOQGQ",
|
||||
},
|
||||
}
|
||||
@ -0,0 +1,335 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
# After a new version of Kubernetes has been released,
|
||||
# run this script to update roles/kubespray-defaults/defaults/main/download.yml
|
||||
# with new hashes.
|
||||
|
||||
import sys
|
||||
import os
|
||||
import logging
|
||||
import subprocess
|
||||
|
||||
from itertools import groupby, chain
|
||||
from more_itertools import partition
|
||||
from functools import cache
|
||||
import argparse
|
||||
import requests
|
||||
import hashlib
|
||||
from datetime import datetime
|
||||
from ruamel.yaml import YAML
|
||||
from packaging.version import Version, InvalidVersion
|
||||
from importlib.resources import files
|
||||
from pathlib import Path
|
||||
|
||||
from typing import Optional, Any
|
||||
|
||||
from . import components
|
||||
|
||||
CHECKSUMS_YML = Path("roles/kubespray-defaults/defaults/main/checksums.yml")
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def open_yaml(file: Path):
|
||||
yaml = YAML()
|
||||
yaml.explicit_start = True
|
||||
yaml.preserve_quotes = True
|
||||
yaml.width = 4096
|
||||
|
||||
with open(file, "r") as checksums_yml:
|
||||
data = yaml.load(checksums_yml)
|
||||
|
||||
return data, yaml
|
||||
|
||||
|
||||
arch_alt_name = {
|
||||
"amd64": "x86_64",
|
||||
"arm64": "aarch64",
|
||||
"ppc64le": None,
|
||||
"arm": None,
|
||||
}
|
||||
|
||||
# TODO: downloads not supported
|
||||
# gvisor: sha512 checksums
|
||||
# helm_archive: PGP signatures
|
||||
# krew_archive: different yaml structure (in our download)
|
||||
# calico_crds_archive: different yaml structure (in our download)
|
||||
|
||||
# TODO:
|
||||
# noarch support -> k8s manifests, helm charts
|
||||
# different checksum format (needs download role changes)
|
||||
# different verification methods (gpg, cosign) ( needs download role changes) (or verify the sig in this script and only use the checksum in the playbook)
|
||||
# perf improvements (async)
|
||||
|
||||
|
||||
def download_hash(downloads: {str: {str: Any}}) -> None:
|
||||
# Handle file with multiples hashes, with various formats.
|
||||
# the lambda is expected to produce a dictionary of hashes indexed by arch name
|
||||
download_hash_extract = {
|
||||
"calicoctl_binary": lambda hashes: {
|
||||
line.split("-")[-1]: line.split()[0]
|
||||
for line in hashes.strip().split("\n")
|
||||
if line.count("-") == 2 and line.split("-")[-2] == "linux"
|
||||
},
|
||||
"etcd_binary": lambda hashes: {
|
||||
line.split("-")[-1].removesuffix(".tar.gz"): line.split()[0]
|
||||
for line in hashes.strip().split("\n")
|
||||
if line.split("-")[-2] == "linux"
|
||||
},
|
||||
"nerdctl_archive": lambda hashes: {
|
||||
line.split()[1].removesuffix(".tar.gz").split("-")[3]: line.split()[0]
|
||||
for line in hashes.strip().split("\n")
|
||||
if [x for x in line.split(" ") if x][1].split("-")[2] == "linux"
|
||||
},
|
||||
"runc": lambda hashes: {
|
||||
parts[1].split(".")[1]: parts[0]
|
||||
for parts in (line.split() for line in hashes.split("\n")[3:9])
|
||||
},
|
||||
"yq": lambda rhashes_bsd: {
|
||||
pair[0].split("_")[-1]: pair[1]
|
||||
# pair = (yq_<os>_<arch>, <hash>)
|
||||
for pair in (
|
||||
(line.split()[1][1:-1], line.split()[3])
|
||||
for line in rhashes_bsd.splitlines()
|
||||
if line.startswith("SHA256")
|
||||
)
|
||||
if pair[0].startswith("yq")
|
||||
and pair[0].split("_")[1] == "linux"
|
||||
and not pair[0].endswith(".tar.gz")
|
||||
},
|
||||
}
|
||||
|
||||
checksums_file = (
|
||||
Path(
|
||||
subprocess.Popen(
|
||||
["git", "rev-parse", "--show-toplevel"], stdout=subprocess.PIPE
|
||||
)
|
||||
.communicate()[0]
|
||||
.rstrip()
|
||||
.decode("utf-8")
|
||||
)
|
||||
/ CHECKSUMS_YML
|
||||
)
|
||||
logger.info("Opening checksums file %s...", checksums_file)
|
||||
data, yaml = open_yaml(checksums_file)
|
||||
s = requests.Session()
|
||||
|
||||
@cache
|
||||
def _get_hash_by_arch(download: str, version: str) -> {str: str}:
|
||||
|
||||
hash_file = s.get(
|
||||
downloads[download]["url"].format(
|
||||
version=version,
|
||||
os="linux",
|
||||
),
|
||||
allow_redirects=True,
|
||||
)
|
||||
hash_file.raise_for_status()
|
||||
return download_hash_extract[download](hash_file.content.decode())
|
||||
|
||||
releases, tags = map(
|
||||
dict, partition(lambda r: r[1].get("tags", False), downloads.items())
|
||||
)
|
||||
repos = {
|
||||
"with_releases": [r["graphql_id"] for r in releases.values()],
|
||||
"with_tags": [t["graphql_id"] for t in tags.values()],
|
||||
}
|
||||
response = s.post(
|
||||
"https://api.github.com/graphql",
|
||||
json={
|
||||
"query": files(__package__).joinpath("list_releases.graphql").read_text(),
|
||||
"variables": repos,
|
||||
},
|
||||
headers={
|
||||
"Authorization": f"Bearer {os.environ['API_KEY']}",
|
||||
},
|
||||
)
|
||||
if "x-ratelimit-used" in response.headers._store:
|
||||
logger.info(
|
||||
"Github graphQL API ratelimit status: used %s of %s. Next reset at %s",
|
||||
response.headers["X-RateLimit-Used"],
|
||||
response.headers["X-RateLimit-Limit"],
|
||||
datetime.fromtimestamp(int(response.headers["X-RateLimit-Reset"])),
|
||||
)
|
||||
response.raise_for_status()
|
||||
|
||||
def valid_version(possible_version: str) -> Optional[Version]:
|
||||
try:
|
||||
return Version(possible_version)
|
||||
except InvalidVersion:
|
||||
return None
|
||||
|
||||
repos = response.json()["data"]
|
||||
github_versions = dict(
|
||||
zip(
|
||||
chain(releases.keys(), tags.keys()),
|
||||
[
|
||||
{
|
||||
v
|
||||
for r in repo["releases"]["nodes"]
|
||||
if not r["isPrerelease"]
|
||||
and (v := valid_version(r["tagName"])) is not None
|
||||
}
|
||||
for repo in repos["with_releases"]
|
||||
]
|
||||
+ [
|
||||
{
|
||||
v
|
||||
for t in repo["refs"]["nodes"]
|
||||
if (v := valid_version(t["name"].removeprefix("release-")))
|
||||
is not None
|
||||
}
|
||||
for repo in repos["with_tags"]
|
||||
],
|
||||
strict=True,
|
||||
)
|
||||
)
|
||||
|
||||
components_supported_arch = {
|
||||
component.removesuffix("_checksums"): [a for a in archs.keys()]
|
||||
for component, archs in data.items()
|
||||
}
|
||||
new_versions = {
|
||||
c: {
|
||||
v
|
||||
for v in github_versions[c]
|
||||
if any(
|
||||
v > version
|
||||
and (
|
||||
(v.major, v.minor) == (version.major, version.minor)
|
||||
or c.startswith("gvisor")
|
||||
)
|
||||
for version in [
|
||||
max(minors)
|
||||
for _, minors in groupby(cur_v, lambda v: (v.minor, v.major))
|
||||
]
|
||||
)
|
||||
# only get:
|
||||
# - patch versions (no minor or major bump) (exception for gvisor which does not have a major.minor.patch scheme
|
||||
# - newer ones (don't get old patch version)
|
||||
}
|
||||
- set(cur_v)
|
||||
for component, archs in data.items()
|
||||
if (c := component.removesuffix("_checksums")) in downloads.keys()
|
||||
# this is only to bound cur_v in the scope
|
||||
and (
|
||||
cur_v := sorted(
|
||||
Version(str(k)) for k in next(archs.values().__iter__()).keys()
|
||||
)
|
||||
)
|
||||
}
|
||||
|
||||
hash_set_to_0 = {
|
||||
c: {
|
||||
Version(str(v))
|
||||
for v, h in chain.from_iterable(a.items() for a in archs.values())
|
||||
if h == 0
|
||||
}
|
||||
for component, archs in data.items()
|
||||
if (c := component.removesuffix("_checksums")) in downloads.keys()
|
||||
}
|
||||
|
||||
def get_hash(component: str, version: Version, arch: str):
|
||||
if component in download_hash_extract:
|
||||
hashes = _get_hash_by_arch(component, version)
|
||||
return hashes[arch]
|
||||
else:
|
||||
hash_file = s.get(
|
||||
downloads[component]["url"].format(
|
||||
version=version,
|
||||
os="linux",
|
||||
arch=arch,
|
||||
alt_arch=arch_alt_name[arch],
|
||||
),
|
||||
allow_redirects=True,
|
||||
)
|
||||
hash_file.raise_for_status()
|
||||
if downloads[component].get("binary", False):
|
||||
return hashlib.new(
|
||||
downloads[component].get("hashtype", "sha256"), hash_file.content
|
||||
).hexdigest()
|
||||
return hash_file.content.decode().split()[0]
|
||||
|
||||
for component, versions in chain(new_versions.items(), hash_set_to_0.items()):
|
||||
c = component + "_checksums"
|
||||
for arch in components_supported_arch[component]:
|
||||
for version in versions:
|
||||
data[c][arch][
|
||||
str(version)
|
||||
] = f"{downloads[component].get('hashtype', 'sha256')}:{get_hash(component, version, arch)}"
|
||||
|
||||
data[c] = {
|
||||
arch: {
|
||||
v: versions[v]
|
||||
for v in sorted(
|
||||
versions.keys(), key=lambda v: Version(str(v)), reverse=True
|
||||
)
|
||||
}
|
||||
for arch, versions in data[c].items()
|
||||
}
|
||||
|
||||
with open(checksums_file, "w") as checksums_yml:
|
||||
yaml.dump(data, checksums_yml)
|
||||
logger.info("Updated %s", checksums_file)
|
||||
|
||||
|
||||
def main():
|
||||
|
||||
logging.basicConfig(stream=sys.stdout, level=logging.INFO)
|
||||
parser = argparse.ArgumentParser(
|
||||
description=f"Add new patch versions hashes in {CHECKSUMS_YML}",
|
||||
formatter_class=argparse.RawTextHelpFormatter,
|
||||
epilog=f"""
|
||||
This script only lookup new patch versions relative to those already existing
|
||||
in the data in {CHECKSUMS_YML},
|
||||
which means it won't add new major or minor versions.
|
||||
In order to add one of these, edit {CHECKSUMS_YML}
|
||||
by hand, adding the new versions with a patch number of 0 (or the lowest relevant patch versions)
|
||||
and a hash value of 0.
|
||||
; then run this script.
|
||||
|
||||
Note that the script will try to add the versions on all
|
||||
architecture keys already present for a given download target.
|
||||
|
||||
EXAMPLES:
|
||||
|
||||
crictl_checksums:
|
||||
...
|
||||
amd64:
|
||||
+ 1.30.0: 0
|
||||
1.29.0: d16a1ffb3938f5a19d5c8f45d363bd091ef89c0bc4d44ad16b933eede32fdcbb
|
||||
1.28.0: 8dc78774f7cbeaf787994d386eec663f0a3cf24de1ea4893598096cb39ef2508""",
|
||||
)
|
||||
|
||||
# Workaround for https://github.com/python/cpython/issues/53834#issuecomment-2060825835
|
||||
# Fixed in python 3.14
|
||||
class Choices(tuple):
|
||||
|
||||
def __init__(self, _iterable=None, default=None):
|
||||
self.default = default or []
|
||||
|
||||
def __contains__(self, item):
|
||||
return super().__contains__(item) or item == self.default
|
||||
|
||||
choices = Choices(components.infos.keys(), default=list(components.infos.keys()))
|
||||
|
||||
parser.add_argument(
|
||||
"only",
|
||||
nargs="*",
|
||||
choices=choices,
|
||||
help="if provided, only obtain hashes for these compoments",
|
||||
default=choices.default,
|
||||
)
|
||||
parser.add_argument(
|
||||
"-e",
|
||||
"--exclude",
|
||||
action="append",
|
||||
choices=components.infos.keys(),
|
||||
help="do not obtain hashes for this component",
|
||||
default=[],
|
||||
)
|
||||
|
||||
args = parser.parse_args()
|
||||
download_hash(
|
||||
{k: components.infos[k] for k in (set(args.only) - set(args.exclude))}
|
||||
)
|
||||
@ -0,0 +1,24 @@
|
||||
query($with_releases: [ID!]!, $with_tags: [ID!]!) {
|
||||
with_releases: nodes(ids: $with_releases) {
|
||||
|
||||
... on Repository {
|
||||
releases(first: 100) {
|
||||
nodes {
|
||||
tagName
|
||||
isPrerelease
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
with_tags: nodes(ids: $with_tags) {
|
||||
|
||||
... on Repository {
|
||||
refs(refPrefix: "refs/tags/", last: 25) {
|
||||
nodes {
|
||||
name
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -1,205 +0,0 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
# After a new version of Kubernetes has been released,
|
||||
# run this script to update roles/kubespray-defaults/defaults/main/download.yml
|
||||
# with new hashes.
|
||||
|
||||
import sys
|
||||
|
||||
from itertools import count, groupby
|
||||
from collections import defaultdict
|
||||
from functools import cache
|
||||
import argparse
|
||||
import requests
|
||||
from ruamel.yaml import YAML
|
||||
from packaging.version import Version
|
||||
|
||||
CHECKSUMS_YML = "../roles/kubespray-defaults/defaults/main/checksums.yml"
|
||||
|
||||
def open_checksums_yaml():
|
||||
yaml = YAML()
|
||||
yaml.explicit_start = True
|
||||
yaml.preserve_quotes = True
|
||||
yaml.width = 4096
|
||||
|
||||
with open(CHECKSUMS_YML, "r") as checksums_yml:
|
||||
data = yaml.load(checksums_yml)
|
||||
|
||||
return data, yaml
|
||||
|
||||
def version_compare(version):
|
||||
return Version(version.removeprefix("v"))
|
||||
|
||||
downloads = {
|
||||
"calicoctl_binary": "https://github.com/projectcalico/calico/releases/download/{version}/SHA256SUMS",
|
||||
"ciliumcli_binary": "https://github.com/cilium/cilium-cli/releases/download/{version}/cilium-{os}-{arch}.tar.gz.sha256sum",
|
||||
"cni_binary": "https://github.com/containernetworking/plugins/releases/download/{version}/cni-plugins-{os}-{arch}-{version}.tgz.sha256",
|
||||
"containerd_archive": "https://github.com/containerd/containerd/releases/download/v{version}/containerd-{version}-{os}-{arch}.tar.gz.sha256sum",
|
||||
"crictl": "https://github.com/kubernetes-sigs/cri-tools/releases/download/{version}/crictl-{version}-{os}-{arch}.tar.gz.sha256",
|
||||
"crio_archive": "https://storage.googleapis.com/cri-o/artifacts/cri-o.{arch}.{version}.tar.gz.sha256sum",
|
||||
"etcd_binary": "https://github.com/etcd-io/etcd/releases/download/{version}/SHA256SUMS",
|
||||
"kubeadm": "https://dl.k8s.io/release/{version}/bin/linux/{arch}/kubeadm.sha256",
|
||||
"kubectl": "https://dl.k8s.io/release/{version}/bin/linux/{arch}/kubectl.sha256",
|
||||
"kubelet": "https://dl.k8s.io/release/{version}/bin/linux/{arch}/kubelet.sha256",
|
||||
"nerdctl_archive": "https://github.com/containerd/nerdctl/releases/download/v{version}/SHA256SUMS",
|
||||
"runc": "https://github.com/opencontainers/runc/releases/download/{version}/runc.sha256sum",
|
||||
"skopeo_binary": "https://github.com/lework/skopeo-binary/releases/download/{version}/skopeo-{os}-{arch}.sha256",
|
||||
"yq": "https://github.com/mikefarah/yq/releases/download/{version}/checksums-bsd", # see https://github.com/mikefarah/yq/pull/1691 for why we use this url
|
||||
}
|
||||
# TODO: downloads not supported
|
||||
# youki: no checkusms in releases
|
||||
# kata: no checksums in releases
|
||||
# gvisor: sha512 checksums
|
||||
# crun : PGP signatures
|
||||
# cri_dockerd: no checksums or signatures
|
||||
# helm_archive: PGP signatures
|
||||
# krew_archive: different yaml structure
|
||||
# calico_crds_archive: different yaml structure
|
||||
|
||||
# TODO:
|
||||
# noarch support -> k8s manifests, helm charts
|
||||
# different checksum format (needs download role changes)
|
||||
# different verification methods (gpg, cosign) ( needs download role changes) (or verify the sig in this script and only use the checksum in the playbook)
|
||||
# perf improvements (async)
|
||||
|
||||
def download_hash(only_downloads: [str]) -> None:
|
||||
# Handle file with multiples hashes, with various formats.
|
||||
# the lambda is expected to produce a dictionary of hashes indexed by arch name
|
||||
download_hash_extract = {
|
||||
"calicoctl_binary": lambda hashes : {
|
||||
line.split('-')[-1] : line.split()[0]
|
||||
for line in hashes.strip().split('\n')
|
||||
if line.count('-') == 2 and line.split('-')[-2] == "linux"
|
||||
},
|
||||
"etcd_binary": lambda hashes : {
|
||||
line.split('-')[-1].removesuffix('.tar.gz') : line.split()[0]
|
||||
for line in hashes.strip().split('\n')
|
||||
if line.split('-')[-2] == "linux"
|
||||
},
|
||||
"nerdctl_archive": lambda hashes : {
|
||||
line.split()[1].removesuffix('.tar.gz').split('-')[3] : line.split()[0]
|
||||
for line in hashes.strip().split('\n')
|
||||
if [x for x in line.split(' ') if x][1].split('-')[2] == "linux"
|
||||
},
|
||||
"runc": lambda hashes : {
|
||||
parts[1].split('.')[1] : parts[0]
|
||||
for parts in (line.split()
|
||||
for line in hashes.split('\n')[3:9])
|
||||
},
|
||||
"yq": lambda rhashes_bsd : {
|
||||
pair[0].split('_')[-1] : pair[1]
|
||||
# pair = (yq_<os>_<arch>, <hash>)
|
||||
for pair in ((line.split()[1][1:-1], line.split()[3])
|
||||
for line in rhashes_bsd.splitlines()
|
||||
if line.startswith("SHA256"))
|
||||
if pair[0].startswith("yq")
|
||||
and pair[0].split('_')[1] == "linux"
|
||||
and not pair[0].endswith(".tar.gz")
|
||||
},
|
||||
}
|
||||
|
||||
data, yaml = open_checksums_yaml()
|
||||
s = requests.Session()
|
||||
|
||||
@cache
|
||||
def _get_hash_by_arch(download: str, version: str) -> {str: str}:
|
||||
|
||||
hash_file = s.get(downloads[download].format(
|
||||
version = version,
|
||||
os = "linux",
|
||||
),
|
||||
allow_redirects=True)
|
||||
if hash_file.status_code == 404:
|
||||
print(f"Unable to find {download} hash file for version {version} at {hash_file.url}")
|
||||
return None
|
||||
hash_file.raise_for_status()
|
||||
return download_hash_extract[download](hash_file.content.decode())
|
||||
|
||||
for download, url in (downloads if only_downloads == []
|
||||
else {k:downloads[k] for k in downloads.keys() & only_downloads}).items():
|
||||
checksum_name = f"{download}_checksums"
|
||||
# Propagate new patch versions to all architectures
|
||||
for arch in data[checksum_name].values():
|
||||
for arch2 in data[checksum_name].values():
|
||||
arch.update({
|
||||
v:("NONE" if arch2[v] == "NONE" else 0)
|
||||
for v in (set(arch2.keys()) - set(arch.keys()))
|
||||
if v.split('.')[2] == '0'})
|
||||
# this is necessary to make the script indempotent,
|
||||
# by only adding a vX.X.0 version (=minor release) in each arch
|
||||
# and letting the rest of the script populate the potential
|
||||
# patch versions
|
||||
|
||||
for arch, versions in data[checksum_name].items():
|
||||
for minor, patches in groupby(versions.copy().keys(), lambda v : '.'.join(v.split('.')[:-1])):
|
||||
for version in (f"{minor}.{patch}" for patch in
|
||||
count(start=int(max(patches, key=version_compare).split('.')[-1]),
|
||||
step=1)):
|
||||
# Those barbaric generators do the following:
|
||||
# Group all patches versions by minor number, take the newest and start from that
|
||||
# to find new versions
|
||||
if version in versions and versions[version] != 0:
|
||||
continue
|
||||
if download in download_hash_extract:
|
||||
hashes = _get_hash_by_arch(download, version)
|
||||
if hashes == None:
|
||||
break
|
||||
sha256sum = hashes.get(arch)
|
||||
if sha256sum == None:
|
||||
break
|
||||
else:
|
||||
hash_file = s.get(downloads[download].format(
|
||||
version = version,
|
||||
os = "linux",
|
||||
arch = arch
|
||||
),
|
||||
allow_redirects=True)
|
||||
if hash_file.status_code == 404:
|
||||
print(f"Unable to find {download} hash file for version {version} (arch: {arch}) at {hash_file.url}")
|
||||
break
|
||||
hash_file.raise_for_status()
|
||||
sha256sum = hash_file.content.decode().split()[0]
|
||||
|
||||
if len(sha256sum) != 64:
|
||||
raise Exception(f"Checksum has an unexpected length: {len(sha256sum)} (binary: {download}, arch: {arch}, release: {version}, checksum: '{sha256sum}')")
|
||||
data[checksum_name][arch][version] = sha256sum
|
||||
data[checksum_name] = {arch : {r : releases[r] for r in sorted(releases.keys(),
|
||||
key=version_compare,
|
||||
reverse=True)}
|
||||
for arch, releases in data[checksum_name].items()}
|
||||
|
||||
with open(CHECKSUMS_YML, "w") as checksums_yml:
|
||||
yaml.dump(data, checksums_yml)
|
||||
print(f"\n\nUpdated {CHECKSUMS_YML}\n")
|
||||
|
||||
parser = argparse.ArgumentParser(description=f"Add new patch versions hashes in {CHECKSUMS_YML}",
|
||||
formatter_class=argparse.RawTextHelpFormatter,
|
||||
epilog=f"""
|
||||
This script only lookup new patch versions relative to those already existing
|
||||
in the data in {CHECKSUMS_YML},
|
||||
which means it won't add new major or minor versions.
|
||||
In order to add one of these, edit {CHECKSUMS_YML}
|
||||
by hand, adding the new versions with a patch number of 0 (or the lowest relevant patch versions)
|
||||
; then run this script.
|
||||
|
||||
Note that the script will try to add the versions on all
|
||||
architecture keys already present for a given download target.
|
||||
|
||||
The '0' value for a version hash is treated as a missing hash, so the script will try to download it again.
|
||||
To notify a non-existing version (yanked, or upstream does not have monotonically increasing versions numbers),
|
||||
use the special value 'NONE'.
|
||||
|
||||
EXAMPLES:
|
||||
|
||||
crictl_checksums:
|
||||
...
|
||||
amd64:
|
||||
+ v1.30.0: 0
|
||||
v1.29.0: d16a1ffb3938f5a19d5c8f45d363bd091ef89c0bc4d44ad16b933eede32fdcbb
|
||||
v1.28.0: 8dc78774f7cbeaf787994d386eec663f0a3cf24de1ea4893598096cb39ef2508"""
|
||||
|
||||
)
|
||||
parser.add_argument('binaries', nargs='*', choices=downloads.keys())
|
||||
|
||||
args = parser.parse_args()
|
||||
download_hash(args.binaries)
|
||||
51
scripts/get_node_ids.sh
Executable file
51
scripts/get_node_ids.sh
Executable file
@ -0,0 +1,51 @@
|
||||
#!/bin/sh
|
||||
gh api graphql -H "X-Github-Next-Global-ID: 1" -f query='{
|
||||
calicoctl_binary: repository(owner: "projectcalico", name: "calico") {
|
||||
id
|
||||
}
|
||||
ciliumcli_binary: repository(owner: "cilium", name: "cilium-cli") {
|
||||
id
|
||||
}
|
||||
crictl: repository(owner: "kubernetes-sigs", name: "cri-tools") {
|
||||
id
|
||||
}
|
||||
crio_archive: repository(owner: "cri-o", name: "cri-o") {
|
||||
id
|
||||
}
|
||||
etcd_binary: repository(owner: "etcd-io", name: "etcd") {
|
||||
id
|
||||
}
|
||||
kubectl: repository(owner: "kubernetes", name: "kubernetes") {
|
||||
id
|
||||
}
|
||||
nerdctl_archive: repository(owner: "containerd", name: "nerdctl") {
|
||||
id
|
||||
}
|
||||
runc: repository(owner: "opencontainers", name: "runc") {
|
||||
id
|
||||
}
|
||||
skopeo_binary: repository(owner: "lework", name: "skopeo-binary") {
|
||||
id
|
||||
}
|
||||
yq: repository(owner: "mikefarah", name: "yq") {
|
||||
id
|
||||
}
|
||||
youki: repository(owner: "youki-dev", name: "youki") {
|
||||
id
|
||||
}
|
||||
kubernetes: repository(owner: "kubernetes", name: "kubernetes") {
|
||||
id
|
||||
}
|
||||
cri_dockerd: repository(owner: "Mirantis", name: "cri-dockerd") {
|
||||
id
|
||||
}
|
||||
kata: repository(owner: "kata-containers", name: "kata-containers") {
|
||||
id
|
||||
}
|
||||
crun: repository(owner: "containers", name: "crun") {
|
||||
id
|
||||
}
|
||||
gvisor: repository(owner: "google", name: "gvisor") {
|
||||
id
|
||||
}
|
||||
}'
|
||||
Loading…
x
Reference in New Issue
Block a user