|
|
@ -7,7 +7,7 @@ |
|
|
|
import sys |
|
|
|
import os |
|
|
|
|
|
|
|
from itertools import count, groupby |
|
|
|
from itertools import groupby |
|
|
|
from collections import defaultdict |
|
|
|
from functools import cache |
|
|
|
import argparse |
|
|
@ -30,9 +30,6 @@ def open_checksums_yaml(): |
|
|
|
|
|
|
|
return data, yaml |
|
|
|
|
|
|
|
def version_compare(version): |
|
|
|
return Version(version.removeprefix("v")) |
|
|
|
|
|
|
|
downloads = { |
|
|
|
"calicoctl_binary": { |
|
|
|
'url': "https://github.com/projectcalico/calico/releases/download/v{version}/SHA256SUMS", |
|
|
@ -219,58 +216,6 @@ def download_hash(only_downloads: [str]) -> None: |
|
|
|
return (hash_file.content.decode().split()[0]) |
|
|
|
|
|
|
|
|
|
|
|
for download, url in (downloads if only_downloads == [] |
|
|
|
else {k:downloads[k] for k in downloads.keys() & only_downloads}).items(): |
|
|
|
checksum_name = f"{download}_checksums" |
|
|
|
# Propagate new patch versions to all architectures |
|
|
|
for arch in data[checksum_name].values(): |
|
|
|
for arch2 in data[checksum_name].values(): |
|
|
|
arch.update({ |
|
|
|
v:("NONE" if arch2[v] == "NONE" else 0) |
|
|
|
for v in (set(arch2.keys()) - set(arch.keys())) |
|
|
|
if v.split('.')[2] == '0'}) |
|
|
|
# this is necessary to make the script indempotent, |
|
|
|
# by only adding a vX.X.0 version (=minor release) in each arch |
|
|
|
# and letting the rest of the script populate the potential |
|
|
|
# patch versions |
|
|
|
|
|
|
|
for arch, versions in data[checksum_name].items(): |
|
|
|
for minor, patches in groupby(versions.copy().keys(), lambda v : '.'.join(v.split('.')[:-1])): |
|
|
|
for version in (f"{minor}.{patch}" for patch in |
|
|
|
count(start=int(max(patches, key=version_compare).split('.')[-1]), |
|
|
|
step=1)): |
|
|
|
# Those barbaric generators do the following: |
|
|
|
# Group all patches versions by minor number, take the newest and start from that |
|
|
|
# to find new versions |
|
|
|
if version in versions and versions[version] != 0: |
|
|
|
continue |
|
|
|
if download in download_hash_extract: |
|
|
|
hashes = _get_hash_by_arch(download, version) |
|
|
|
if hashes == None: |
|
|
|
break |
|
|
|
sha256sum = hashes.get(arch) |
|
|
|
if sha256sum == None: |
|
|
|
break |
|
|
|
else: |
|
|
|
hash_file = s.get(downloads[download].format( |
|
|
|
version = version, |
|
|
|
os = "linux", |
|
|
|
arch = arch |
|
|
|
), |
|
|
|
allow_redirects=True) |
|
|
|
if hash_file.status_code == 404: |
|
|
|
print(f"Unable to find {download} hash file for version {version} (arch: {arch}) at {hash_file.url}") |
|
|
|
break |
|
|
|
hash_file.raise_for_status() |
|
|
|
sha256sum = hash_file.content.decode().split()[0] |
|
|
|
|
|
|
|
if len(sha256sum) != 64: |
|
|
|
raise Exception(f"Checksum has an unexpected length: {len(sha256sum)} (binary: {download}, arch: {arch}, release: {version}, checksum: '{sha256sum}')") |
|
|
|
data[checksum_name][arch][version] = sha256sum |
|
|
|
data[checksum_name] = {arch : {r : releases[r] for r in sorted(releases.keys(), |
|
|
|
key=version_compare, |
|
|
|
reverse=True)} |
|
|
|
for arch, releases in data[checksum_name].items()} |
|
|
|
|
|
|
|
with open(CHECKSUMS_YML, "w") as checksums_yml: |
|
|
|
yaml.dump(data, checksums_yml) |
|
|
|