From 7a56ca059406e79a536d3434b4117cc4a0720696 Mon Sep 17 00:00:00 2001 From: Chris Hofstaedtler Date: Fri, 22 Nov 2024 21:25:38 +0100 Subject: [PATCH 1/2] grml-live: use xorriso instead of mount -o loop in -e mode mount -o loop is disabled in our new build environment, so avoid that and use xorriso (actually, osirrox) to extract the squashfs file from the ISO. --- grml-live | 23 ++++++++++------------- 1 file changed, 10 insertions(+), 13 deletions(-) diff --git a/grml-live b/grml-live index 07a0c043..389a1590 100755 --- a/grml-live +++ b/grml-live @@ -637,23 +637,21 @@ extract_iso() { if [ -n "$EXTRACT_ISO_NAME" ]; then log "Unpacking ISO from ${EXTRACT_ISO_NAME}" einfo "Unpacking ISO from ${EXTRACT_ISO_NAME}" - local mountpoint=$(mktemp -d) + local tempdir=$(mktemp -d) local rc=0 - mount -o loop "${EXTRACT_ISO_NAME}" "$mountpoint" ; rc=$? + mkdir -p "${tempdir}/live/" + osirrox -indev "${EXTRACT_ISO_NAME}" -extract live "${tempdir}/live/" ; rc=$? if [ "$rc" != 0 ]; then - rmdir "$mountpoint" - log "mount failed" - eerror "mount failed" + rm -rf "$tempdir" + log "osirrox failed" + eerror "osirrox failed" eend 1 bailout 1 fi - if ls "${mountpoint}"/live/*/*.squashfs 2>/dev/null | grep -q . ; then # ISOs >=2011.12 - log "Using ${mountpoint}/live/*/*.squashfs for unsquashfs" - unsquashfs -d "${CHROOT_OUTPUT}" "${mountpoint}"/live/*/*.squashfs ; rc=$? - elif ls "${mountpoint}"/live/*.squashfs 2>/dev/null | grep -q . ; then # ISOs before 2011.12 - log "Using ${mountpoint}/live/*.squashfs for unsquashfs" - unsquashfs -d "${CHROOT_OUTPUT}" "${mountpoint}"/live/*.squashfs ; rc=$? + if ls "${tempdir}"/live/*/*.squashfs 2>/dev/null | grep -q . ; then + log "Using ${tempdir}/live/*/*.squashfs for unsquashfs" + unsquashfs -d "${CHROOT_OUTPUT}" "${tempdir}"/live/*/*.squashfs ; rc=$? else log "Error: Could not find any *.squashfs files on the ISO" eerror "Error: Could not find any *.squashfs files on the ISO" @@ -661,8 +659,7 @@ if [ -n "$EXTRACT_ISO_NAME" ]; then bailout 1 fi - umount "$mountpoint" - rmdir "$mountpoint" + rm -rf "$tempdir" if [ "$rc" != 0 ]; then log "unsquashfs failed" eerror "unsquashfs failed" From 2822a28f5454ab7c642872f676f77790bb950eb6 Mon Sep 17 00:00:00 2001 From: Chris Hofstaedtler Date: Fri, 22 Nov 2024 19:24:06 +0100 Subject: [PATCH 2/2] Import CI build drivers --- build-driver/build | 14 + build-driver/build.py | 506 ++++++++++++++++++++++++++ build-driver/generate-changes-list.py | 180 +++++++++ build-driver/pyproject.toml | 9 + build-driver/upload-daily.py | 114 ++++++ 5 files changed, 823 insertions(+) create mode 100755 build-driver/build create mode 100755 build-driver/build.py create mode 100755 build-driver/generate-changes-list.py create mode 100644 build-driver/pyproject.toml create mode 100755 build-driver/upload-daily.py diff --git a/build-driver/build b/build-driver/build new file mode 100755 index 00000000..c206b956 --- /dev/null +++ b/build-driver/build @@ -0,0 +1,14 @@ +#!/bin/bash +# +# Entrypoint from CI jobs +# Only valid assumptions: +# - apt sources.list are valid +# - first param is path to grml-live checkout +# - remaining params are job config, so to say +set -x +GRML_LIVE_PATH=$1 +PYTHONPATH="$GRML_LIVE_PATH"/build-driver +echo -e "\e[0Ksection_start:$(date +%s):startupdeps[collapsed=true]\r\e[0KInstall dependencies for build.py" +apt satisfy -q -y --no-install-recommends 'python3-minimal, python3-yaml' +echo -e "\e[0Ksection_end:$(date +%s):startupdeps\r\e[0K" +exec "$PYTHONPATH"/build.py "$@" diff --git a/build-driver/build.py b/build-driver/build.py new file mode 100755 index 00000000..3aa9775e --- /dev/null +++ b/build-driver/build.py @@ -0,0 +1,506 @@ +#!/usr/bin/env python3 +# +# Main entry point for CI builds. +# We are started by ./build, which in turn is started by the CI config. +# Dependencies can be available, if ./build installs them first. +# +from pathlib import Path +import datetime +import contextlib +import subprocess +import os +import shutil +import time +import sys +import tempfile +from dataclasses import dataclass + +import yaml + +TOOL_DIR = Path(__file__).parent + + +@dataclass(frozen=True) +class JobProperties: + job_timestamp: datetime.datetime + job_name: str + arch: str + classes: list + debian_suite: str + version: str + release_name: str + grml_name: str + isoname: str + + +def usage(program_name): + message = f""" +Usage: {program_name} grml_live_path build_mode config_file flavor arch ... + +Examples: + {program_name} /build/job/grml-live release ./config/release-pre2024.XX-rc0 small amd64 + {program_name} /build/job/grml-live daily ./config/daily small amd64 testing + """ + print(message.strip(), file=sys.stderr) + + +def run_x(args, check: bool = True, **kwargs): + # str-ify Paths, not necessary, but for readability in logs. + args = [arg if isinstance(arg, str) else str(arg) for arg in args] + args_str = '" "'.join(args) + print(f'D: Running "{args_str}"', flush=True) + return subprocess.run(args, check=check, **kwargs) + + +@contextlib.contextmanager +def ci_section(title: str, *, collapsed: bool = True): + section_key = f"sec{time.time()}" + collapsed_str = "[collapsed=true]" if collapsed else "" + print(f"\x1b[0Ksection_start:{int(time.time())}:{section_key}{collapsed_str}\r\x1b[0K{title}", flush=True) + yield + print(f"\x1b[0Ksection_end:{int(time.time())}:{section_key}\r\x1b[0K", flush=True) + + +def is_docker(): + return ( + Path("/.dockerenv").exists() + or Path("/run/.containerenv").exists() + or (Path("/proc/1/cgroup").exists() and b"devices:/docker" in Path("/proc/1/cgroup").read_bytes()) + ) + + +def is_ci(): + return os.getenv("CI", "false") == "true" + + +def apt_satisfy(deps: str): + run_x( + ["apt-get", "satisfy", "-q", "-y", "--no-install-recommends", deps.strip()], + env=dict(os.environ) | {"DEBIAN_FRONTEND": "noninteractive"}, + ) + + +def get_grml_live(branch: str): + checkout_path = Path(os.getcwd()) / "grml-live" + run_x(["git", "clone", "-q", "--depth", "1", "-b", branch, "https://github.com/grml/grml-live", checkout_path]) + result = run_x(["git", "describe", "--always"], cwd=checkout_path, capture_output=True) + version = result.stdout.strip() + print(f"I: grml-live version: {version} from branch {branch}") + return checkout_path + + +def print_grml_live_version(grml_live_path: Path): + result = run_x(["git", "describe", "--always"], cwd=grml_live_path, capture_output=True) + version = result.stdout.strip().decode() + print(f"I: grml-live version: {version}") + + +def run_grml_live( + grml_live_path: Path, + output_dir: Path, + arch: str, + classes: list, + debian_suite: str, + version: str, + release_name: str, + grml_name: str, + isoname: str, + old_iso_path: Path | None, +): + env = dict(os.environ) + grml_fai_config = grml_live_path / "etc" / "grml" / "fai" + env.update( + { + "GRML_FAI_CONFIG": str(grml_fai_config), + "SCRIPTS_DIRECTORY": str(grml_live_path / "scripts"), + "LIVE_CONF": str(grml_live_path / "etc" / "grml" / "grml-live.conf"), + "TEMPLATE_DIRECTORY": str(grml_live_path / "templates"), + } + ) + + if not old_iso_path: + with ci_section("Creating basefile using mmdebstrap"): + basefiles_path = grml_fai_config / "config" / "basefiles" + basefiles_path.mkdir() + basefile = basefiles_path / f"{arch.upper()}.tar.gz" + run_x(["mmdebstrap", "--format=tar", debian_suite, basefile]) + + grml_live_cmd = [ + grml_live_path / "grml-live", + "-F", # do not prompt + "-V", # verbose + "-A", # cleanup afterwards + "-a", + arch, + "-c", + ",".join(classes), + "-s", + debian_suite, + "-v", + version, + "-r", + release_name, + "-g", + grml_name, + "-i", + isoname, + "-o", + output_dir, + ] + if old_iso_path: + grml_live_cmd += ["-b", "-e", old_iso_path] + with ci_section("Building with grml-live", collapsed=False): + fixup_fai() + run_x(grml_live_cmd, env=env) + + +def fixup_fai(): + # Workaround for fai, necessary to build in docker where /dev/pts is unavailable. + # apt prints: E: Can not write log (Is /dev/pts mounted?) - posix_openpt (19: No such device) + fai_subroutines = Path("/usr/lib/fai/subroutines") + old_code = fai_subroutines.read_text().splitlines() + filtered_code = "\n".join([line for line in old_code if "task_error 472" not in line]) + fai_subroutines.write_text(filtered_code) + + +def upload_daily(job_name: str, build_dir: Path, job_timestamp: datetime.datetime): + ssh_key = os.getenv("DAILY_UPLOAD_SSH_KEY") + remote = os.getenv("DAILY_UPLOAD_REMOTE") + stamped_dirname = job_timestamp.strftime("%Y-%m-%d_%H_%M_%S") + with ci_section("Uploading to daily.grml.org"): + run_x( + [ + TOOL_DIR / "upload-daily.py", + ssh_key, + f"{remote}{job_name}", + build_dir, + job_name, + stamped_dirname, + ] + ) + + +def get_dpkg_list_path_for_build(build_dir: Path) -> Path: + return build_dir / "grml_logs" / "fai" / "dpkg.list" + + +def generate_changes_list( + build_dir: Path, + output_filename: str, + old_dpkg_list: Path, + build_job_name: str, + build_version: str, +): + package_prefix = "grml" + git_url_base = "https://github.com/grml" + git_workspace = Path("/tmp") / "changes-git-workspace" + output_file = build_dir / "grml_logs" / output_filename + new_dpkg_list = get_dpkg_list_path_for_build(build_dir) + + with ci_section(f"Generating changes list {output_file!s}"): + run_x( + [ + TOOL_DIR / "generate-changes-list.py", + output_file, + new_dpkg_list, + old_dpkg_list, + package_prefix, + git_url_base, + git_workspace, + build_job_name, + build_version, + ] + ) + + +@contextlib.contextmanager +def results_mover(build_dir: Path, output_dir: Path): + try: + yield + finally: + print(f"I: moving build results from {build_dir} to {output_dir}") + if output_dir.exists(): + raise RuntimeError(f"output_dir {output_dir} exists, but shutil.move requires it not to") + shutil.move(build_dir, output_dir) + + +def download_file(url: str, local_path: Path): + """Download URL url into local_path, using curl. Raises on failure.""" + run_x(["curl", "-#fSL", "--output", local_path, url]) + + +def should_skip_sources(build_config: dict, env: dict) -> bool: + if env.get("SKIP_SOURCES", "") == "1": + return True + if build_config.get("skip_sources", False) is True: + return True + return False + + +def get_grml_live_classes(arch: str, flavor: str, classes_for_mode: list[str], skip_sources: bool) -> list[str]: + base_classes = [ + "DEBORPHAN", + "GRMLBASE", + f"GRML_{flavor.upper()}", + "RELEASE", + arch.upper(), + "IGNORE", + ] + return base_classes + classes_for_mode + + +def build( + build_dir: Path, + old_dpkg_list_daily: Path | None, + old_dpkg_list_last_release: Path | None, + job_properties: JobProperties, + grml_live_path: Path, + upload_to_daily: bool, + old_iso_path: Path | None, +): + run_grml_live( + grml_live_path, + build_dir, + job_properties.arch, + job_properties.classes, + job_properties.debian_suite, + job_properties.version, + job_properties.release_name, + job_properties.grml_name, + job_properties.isoname, + old_iso_path, + ) + + if old_dpkg_list_daily: + generate_changes_list( + build_dir, + "changes-last-daily.txt", + old_dpkg_list_daily, + job_properties.job_name, + job_properties.version, + ) + + if old_dpkg_list_last_release: + generate_changes_list( + build_dir, + "changes-last-release.txt", + old_dpkg_list_last_release, + job_properties.job_name, + job_properties.version, + ) + + if upload_to_daily: + upload_daily(job_properties.job_name, build_dir, job_properties.job_timestamp) + + +def load_config(build_config_file: str) -> dict: + with Path(build_config_file).open() as stream: + return yaml.safe_load(stream) + + +def bail(message: str) -> int: + print(f"E: {message}", file=sys.stderr) + return 2 + + +def install_debian_dependencies(): + # TODO: read (some!) deps from grml-live/debian/control + with ci_section("Installing dependencies from Debian"): + apt_satisfy( + """ + ca-certificates , + git , + bc , + bzip2 , + curl , + debootstrap , + dosfstools , + fai-client (>= 3.4.0) , + isolinux (>= 3:6.03+dfsg-5+deb8u1~) , + jo , + mksh , + mmdebstrap , + moreutils , + mtools , + pciutils , + python3-paramiko , + rsync , + squashfs-tools (>= 1:4.2-0~bpo60) , + xorriso , + imagemagick , + """ + ) + + +def download_old_dpkg_list_last_release(tmp_dir: Path, last_release_version: str | None, flavor: str) -> Path | None: + if last_release_version is None: + return None + + # TODO: + # 1) deal with (OLD!) release names + # 2) interpolate arch, etc + # 3) error handling? + path = tmp_dir / "dpkg.list.previous_release" + url = f"https://grml.org/files/grml64-{flavor}_{last_release_version}/dpkg.list" + with ci_section(f"Downloading old dpkg.list {url} to {path!s}"): + try: + download_file(url, path) + return path + except Exception as except_inst: + print(f"E: ignoring error while downloading {url}: {except_inst}") + return None + + +def download_old_iso(tmp_dir: Path, old_iso_url: str | None) -> Path | None: + if old_iso_url is None: + return None + + path = tmp_dir / "old.iso" + + with ci_section(f"Downloading old ISO {old_iso_url} to {path!s}"): + download_file(old_iso_url, path) + + return path + + +def main(program_name: str, argv: list[str]) -> int: + print(f"I: {program_name} started with {argv=}") + try: + grml_live_path = Path(argv.pop(0)) + build_mode = argv.pop(0) + build_config_file = argv.pop(0) + if build_mode == "release": + flavor = argv.pop(0) + arch = argv.pop(0) + debian_suite = "" # filled from config + classes_for_mode = ["SNAPSHOT", "NO_ONLINE"] + upload_to_daily = False + + elif build_mode == "daily": + flavor = argv.pop(0) + arch = argv.pop(0) + debian_suite = argv.pop(0) + classes_for_mode = [] + upload_to_daily = os.getenv("DO_DAILY_UPLOAD", "") == "1" + + else: + return bail(f"build_mode {build_mode} not understood, valid options are: release, daily") + + except IndexError: + usage(program_name) + return 2 + + if arch not in ("amd64", "i386", "arm64"): + return bail(f"unknown build_arch: {arch}") + + if not is_ci(): + print("I: No CI variable found, assuming local test build") + if not is_docker(): + return bail("E: Not running inside docker, exiting to avoid data damage") + + build_config = load_config(build_config_file) + + if skip_sources := should_skip_sources(build_config, dict(os.environ)): + print("I: SKIP_SOURCES=1, skipping source download (either from config or ENV)") + + classes = get_grml_live_classes(arch, flavor, classes_for_mode, skip_sources) + + build_grml_name = f"grml-{flavor}-{arch}" + last_release_version = build_config["last_release"] + + # build_grml_live_branch = os.getenv("USE_GRML_LIVE_BRANCH", "master") + + # We try to construct an ISO name like this: + # daily: grml-full-daily20230201build20unstable-amd64.iso + # release: grml-full-2024.12-arm64.iso + # Note that release builds do not carry the debian suite in their name. + + if build_mode == "release": + old_iso_url = build_config["base_iso"][flavor][arch] + build_version = build_config["release_version"] + + job_properties = JobProperties( + job_timestamp=datetime.datetime.now(), + job_name=f"{build_grml_name}-release", + arch=arch, + classes=classes, + # XXX: should load this from ISO or metadata file + debian_suite=build_config["debian_suite"], + # f.e. "pre2024.11-rc0" + version=build_version, + # f.e. "Glumpad Grumbirn" + release_name=build_config["release_name"], + grml_name=build_grml_name, + isoname=f"grml-{flavor}-{build_version}-{arch}.iso", + ) + + elif build_mode == "daily": + old_iso_url = None + date_stamp = datetime.datetime.now().strftime("%Y%m%d") + CI_PIPELINE_IID = os.getenv("CI_PIPELINE_IID", "0") + build_version = f"d{date_stamp}b{CI_PIPELINE_IID}" + build_release_name = f"daily{date_stamp}build{CI_PIPELINE_IID}{debian_suite}" + + job_properties = JobProperties( + job_timestamp=datetime.datetime.now(), + job_name=f"{build_grml_name}-{debian_suite}", + arch=arch, + classes=classes, + debian_suite=debian_suite, + version=build_version, + release_name=build_release_name, + grml_name=build_grml_name, + isoname=f"grml-{flavor}-{build_release_name}-{arch}.iso", + ) + + else: + raise ValueError(f"unexpected {build_mode=}") + + print(f"I: {job_properties=}") + print(f"I: {last_release_version=}") + + print_grml_live_version(grml_live_path) + + source_dir = Path(os.getcwd()) + cache_dir = source_dir / "cached" + output_dir = source_dir / "results" + print(f"I: {source_dir=}") + print(f"I: {cache_dir=}") + print(f"I: {output_dir=}") + + # avoid building on mounted volume + tmp_root = Path("/tmp") if True else Path(".").absolute() + tmp_dir = Path(tempfile.mkdtemp(dir=tmp_root)) + build_dir = Path(tempfile.mkdtemp(dir=tmp_root)) + + if build_mode == "daily": + old_dpkg_list_daily = source_dir / "cache" / "dpkg.list" + else: + old_dpkg_list_daily = None + + # Do it now, as the next block needs curl installed. + install_debian_dependencies() + + old_dpkg_list_last_release = download_old_dpkg_list_last_release(tmp_dir, last_release_version, flavor) + old_iso_path = download_old_iso(tmp_dir, old_iso_url) + + with results_mover(build_dir, output_dir): + build( + build_dir, + old_dpkg_list_daily, + old_dpkg_list_last_release, + job_properties, + grml_live_path, + upload_to_daily, + old_iso_path, + ) + + # Copy dpkg.list into cache for next iteration. + new_dpkg_list = get_dpkg_list_path_for_build(build_dir) + shutil.copyfile(new_dpkg_list, cache_dir) + + return 0 + + +if __name__ == "__main__": + sys.exit(main(sys.argv.pop(0), sys.argv)) diff --git a/build-driver/generate-changes-list.py b/build-driver/generate-changes-list.py new file mode 100755 index 00000000..8ad4d0ab --- /dev/null +++ b/build-driver/generate-changes-list.py @@ -0,0 +1,180 @@ +#!/usr/bin/env python3 +import os +import subprocess +import sys +import re +from pathlib import Path + + +class Listener: + def __init__(self): + self.failed = False + + def error(self, message: str): + raise NotImplementedError + + def info(self, message: str): + raise NotImplementedError + + def warn(self, message: str): + raise NotImplementedError + + +class CliListener(Listener): + def error(self, message: str): + self.failed = True + sys.stderr.write(f"E: {message}\n") + + def info(self, message: str): + sys.stdout.write(f"I: {message}\n") + + def warn(self, message: str): + sys.stdout.write(f"W: {message}\n") + + +def parse_package_list(s: str) -> dict: + package_dict = {} + for line in s.split("\n"): + match = re.match(r"^ii\s+(\S+)\s+(\S+)\s", line) + if match: + package_dict[match[1]] = match[2] + return package_dict + + +def build_changes( + output_filename: Path, + dpkg_list_new: Path, + dpkg_list_old: Path, + package_prefix: str, + git_url_base: str, + git_repo_workspace: Path, + job_name: str, + build_id: str, + listener: Listener, +): + git_repo_workspace.mkdir(parents=True, exist_ok=True) + + changelog = f"""------------------------------------------------------------------------ +Generated by CI for job {job_name} {build_id} +------------------------------------------------------------------------ +""" + + if not dpkg_list_new.exists(): + listener.error(f"Could not read package list {dpkg_list_new}") + return + + packages = parse_package_list(dpkg_list_new.read_text()) + packages_old = {} + try: + packages_old = parse_package_list(dpkg_list_old.read_text()) + except Exception as e: + listener.info(f"While parsing old package list: {e}") + + debian_changes = {"removed": [], "added": [], "changed": []} + + for package in set(packages_old) - set(packages): + if re.match(f"^{package_prefix}", package): + changelog += f"""Package {package}: Removed. +------------------------------------------------------------------------ +""" + else: + debian_changes["removed"].append(package) + + for package, version in packages.items(): + old_version = packages_old.get(package) + if re.match(f"^{package_prefix}", package): + try: + listener.info(f"Generating changes list for package {package}...") + if old_version: + listener.info(f"Version {old_version} -> {version}") + if old_version == version: + continue + + # clone repo + git_url = f"{git_url_base}/{package}" + gitpath = git_repo_workspace / f"{package}.git" + if not gitpath.exists(): + env = dict(os.environ) | {"GIT_TERMINAL_PROMPT": "0"} + subprocess.run( + ["git", "clone", "--bare", "--single-branch", git_url, gitpath], + cwd=git_repo_workspace, + env=env, + ) + if not gitpath.exists(): + raise Exception("Repository not found") + + # update repo + subprocess.run(["git", "remote", "set-url", "origin", git_url], cwd=gitpath) + subprocess.run(["git", "remote", "update", "--prune"], cwd=gitpath).check_returncode() + + if old_version: + range = f"v{old_version}..v{version}" + else: + range = f"v{version}" + + result = subprocess.run(["git", "log", "--oneline", range], cwd=gitpath, capture_output=True) + if result.returncode != 0: + git_changes = "(failed)" + else: + git_changes = "\n ".join(result.stdout.decode().splitlines()) + changelog += f"""Package {package}: {range} {'(new)' if not old_version else ''} + {git_changes} +------------------------------------------------------------------------ +""" + except Exception as e: + listener.warn(f"Generating change report for package {package} failed: {e}") + else: + if old_version: + if old_version == version: + continue + debian_changes["changed"].append(f"{package} {old_version} -> {version}") + else: + debian_changes["added"].append(package) + + changelog += """Changes to Debian package list: + Added: + {} + Changed: + {} + Removed: + {} +------------------------------------------------------------------------ +""".format( + "\n ".join(debian_changes["added"]).strip(), + "\n ".join(debian_changes["changed"]).strip(), + "\n ".join(debian_changes["removed"]).strip(), + ) + + output_filename.write_text(changelog) + + +def main() -> int: + if len(sys.argv) != 9: + print( + f"Usage: {sys.argv[0]} output_filename dpkg_list_new dpkg_list_old package_prefix git_url_base git_repo_workspace job_name build_id" + ) + return 2 + + listener = CliListener() + try: + build_changes( + Path(sys.argv[1]), + Path(sys.argv[2]), + Path(sys.argv[3]), + sys.argv[4], + sys.argv[5], + Path(sys.argv[6]), + sys.argv[7], + sys.argv[8], + listener, + ) + except Exception as except_inst: + listener.error(f"Uncaught exception: {except_inst}") + + if listener.failed: + return 1 + return 0 + + +if __name__ == "__main__": + sys.exit(main()) diff --git a/build-driver/pyproject.toml b/build-driver/pyproject.toml new file mode 100644 index 00000000..ff80772f --- /dev/null +++ b/build-driver/pyproject.toml @@ -0,0 +1,9 @@ +[tool.pycodestyle] +max-line-length = 120 + +[tool.black] +line-length = 120 +target-version = ['py311'] + +[tool.pyupgrade] +version-option = '--py311-plus' diff --git a/build-driver/upload-daily.py b/build-driver/upload-daily.py new file mode 100755 index 00000000..c54dd016 --- /dev/null +++ b/build-driver/upload-daily.py @@ -0,0 +1,114 @@ +#!/usr/bin/env python3 +import paramiko +import pathlib +import sys +from stat import S_ISDIR + + +def sftp_isdir(sftp, path: str): + try: + return S_ISDIR(sftp.stat(path).st_mode) + except IOError: + return False + + +def sftp_rm_r(sftp, remote_dir: str): + files = sftp.listdir(remote_dir) + remote_path = pathlib.Path(remote_dir) + + for f in files: + filepath = (remote_path / f).as_posix() + if sftp_isdir(sftp, filepath): + sftp_rm_r(sftp, filepath) + else: + sftp.remove(filepath) + + sftp.rmdir(remote_dir) + + +def upload_dir(sftp, local_dir: pathlib.Path, remote_dir: str): + remote_root = pathlib.Path(remote_dir) + seen = set() + for local_path in local_dir.rglob("*"): + if not local_path.is_file(): + continue + + relative_path = local_path.relative_to(local_dir) + remote_path = remote_root / relative_path + + for parent in reversed(remote_path.parents): + parent = parent.as_posix() + if parent in seen: + continue + try: + sftp.stat(parent) + except FileNotFoundError: + sftp.mkdir(parent) + seen.add(parent) + + print("Uploading", local_path, "to", remote_path) + sftp.put(local_path.as_posix(), remote_path.as_posix()) + + +def main(): + keyfile = sys.argv[1] + # user@remote.host:/grml64-small_sid + remote_site_and_path = sys.argv[2] + # /tmp/builddir + local_dir = pathlib.Path(sys.argv[3]) + # grml64-small_sid + job_name = sys.argv[4] + # 2024-11-09_01_31_01 + stamped_dirname = sys.argv[5] + + remote_site = remote_site_and_path.split(":")[0] + remote_path = remote_site_and_path.split(":")[1] + remote_host = remote_site.split("@")[1] + remote_user = remote_site.split("@")[0] + + pkey = paramiko.Ed25519Key.from_private_key_file(keyfile) + + transport = paramiko.Transport((remote_host, 22)) + transport.connect(username=remote_user, pkey=pkey) + + sftp = paramiko.SFTPClient.from_transport(transport) + assert sftp is not None + + versions = [path for path in sorted(sftp.listdir(remote_path)) if not path.endswith("/latest")] + for version in versions[:-14]: + print("Removing old version", version) + sftp_rm_r(sftp, f"{remote_path}/{version}") + + remote_stamped = f"{remote_path}/{stamped_dirname}" + upload_dir(sftp, local_dir, remote_stamped) + + remote_latest = f"{remote_path}/latest" + try: + sftp.mkdir(remote_latest) + except IOError: + pass + + real_iso_name = next(local_dir.glob("grml_isos/*iso")).name + real_checksum_name = next(local_dir.glob("grml_isos/*iso.sha256")).name + latest_iso_name = f"{job_name}_latest.iso" + latest_checksum_name = f"{job_name}_latest.iso.sha256" + + for symlink, real in [ + (latest_iso_name, real_iso_name), + (latest_checksum_name, real_checksum_name), + ]: + remote_symlink = f"{remote_latest}/{symlink}" + remote_real = f"../{stamped_dirname}/grml_isos/{real}" + print("Updating symlink", remote_symlink, "to", remote_real) + try: + sftp.unlink(remote_symlink) + except FileNotFoundError: + pass + sftp.symlink(remote_real, remote_symlink) + + sftp.close() + transport.close() + + +if __name__ == "__main__": + main()