From af9d17639515271a3cf208569038afec0994477a Mon Sep 17 00:00:00 2001 From: Chris Hofstaedtler Date: Fri, 22 Nov 2024 19:24:06 +0100 Subject: [PATCH] Import CI build drivers --- build-driver/build | 12 + build-driver/build.py | 478 ++++++++++++++++++++++++++ build-driver/generate-changes-list.py | 180 ++++++++++ build-driver/pyproject.toml | 9 + build-driver/upload-daily.py | 114 ++++++ 5 files changed, 793 insertions(+) create mode 100755 build-driver/build create mode 100755 build-driver/build.py create mode 100755 build-driver/generate-changes-list.py create mode 100644 build-driver/pyproject.toml create mode 100755 build-driver/upload-daily.py diff --git a/build-driver/build b/build-driver/build new file mode 100755 index 00000000..58080536 --- /dev/null +++ b/build-driver/build @@ -0,0 +1,12 @@ +#!/bin/bash +# +# Entrypoint from CI jobs +# Only valid assumptions: +# - apt sources.list are valid +# - first param is path to grml-live checkout +# - remaining params are job config, so to say +set -x +GRML_LIVE_PATH=$1 +PYTHONPATH="$GRML_LIVE_PATH"/build-driver +apt satisfy -q -y --no-install-recommends 'python3-minimal, python3-yaml' +exec "$PYTHONPATH"/build.py "$@" diff --git a/build-driver/build.py b/build-driver/build.py new file mode 100755 index 00000000..ab6afad2 --- /dev/null +++ b/build-driver/build.py @@ -0,0 +1,478 @@ +#!/usr/bin/env python3 +# +# Main entry point for CI builds. +# We are started by ./build, which in turn is started by the CI config. +# Dependencies can be available, if ./build installs them first. +# +from pathlib import Path +import datetime +import contextlib +import subprocess +import os +import shutil +import time +import sys +import tempfile +from dataclasses import dataclass + +import yaml + +TOOL_DIR = Path(__file__).parent + + +@dataclass(frozen=True) +class JobProperties: + job_timestamp: datetime.datetime + job_name: str + arch: str + classes: list + debian_suite: str + version: str + release_name: str + grml_name: str + isoname: str + update_only: bool + + +def usage(program_name): + message = f""" +Usage: {program_name} grml_live_path build_mode config_file flavor arch ... + +Examples: + {program_name} /build/job/grml-live release ./config/release-pre2024.XX-rc0 small amd64 + {program_name} /build/job/grml-live daily ./config/daily small amd64 testing + """ + print(message.strip(), file=sys.stderr) + + +def run_x(args, check: bool = True, **kwargs): + print(f"I: Running {args!r}") + return subprocess.run(args, check=check, **kwargs) + + +@contextlib.contextmanager +def ci_section(title: str, *, collapsed: bool = False): + ts = int(time.time()) + section_key = f"sec{ts}" + collapsed_str = "[collapsed=true]" if collapsed else "" + print(f"\x1b[0Ksection_start:{ts}:{section_key}{collapsed_str}\r\x1b[0K{title}") + yield + ts = int(time.time()) + print(f"\x1b[0K0Ksection_end:{ts}:{section_key}\r\x1b[0K") + + +def is_docker(): + return ( + Path("/.dockerenv").exists() + or Path("/run/.containerenv").exists() + or (Path("/proc/1/cgroup").exists() and b"devices:/docker" in Path("/proc/1/cgroup").read_bytes()) + ) + + +def is_ci(): + return os.getenv("CI", "false") == "true" + + +def apt_satisfy(deps: str): + run_x( + ["apt-get", "satisfy", "-q", "-y", "--no-install-recommends", deps.strip()], + env=dict(os.environ) | {"DEBIAN_FRONTEND": "noninteractive"}, + ) + + +def get_grml_live(branch: str): + checkout_path = Path(os.getcwd()) / "grml-live" + run_x(["git", "clone", "-q", "--depth", "1", "-b", branch, "https://github.com/grml/grml-live", checkout_path]) + result = run_x(["git", "describe", "--always"], cwd=checkout_path, capture_output=True) + version = result.stdout.strip() + print(f"I: grml-live version: {version} from branch {branch}") + return checkout_path + + +def print_grml_live_version(grml_live_path: Path): + result = run_x(["git", "describe", "--always"], cwd=grml_live_path, capture_output=True) + version = result.stdout.strip() + print(f"I: grml-live version: {version}") + + +class GrmlLiveBuilder: + def __init__(self, grml_live_path: Path): + self.grml_live_path = grml_live_path + + def build( + self, + output_dir: Path, + arch: str, + classes: list, + debian_suite: str, + version: str, + release_name: str, + grml_name: str, + isoname: str, + update_only: bool, + ): + env = dict(os.environ) + env.update( + { + "GRML_FAI_CONFIG": str(self.grml_live_path / "etc" / "grml" / "fai"), + "SCRIPTS_DIRECTORY": str(self.grml_live_path / "scripts"), + "LIVE_CONF": str(self.grml_live_path / "etc" / "grml" / "grml-live.conf"), + "TEMPLATE_DIRECTORY": str(self.grml_live_path / "templates"), + } + ) + + grml_live_cmd = [ + (self.grml_live_path / "grml-live"), + "-F", # do not prompt + "-V", # verbose + "-A", # cleanup afterwards + "-a", + arch, + "-c", + ",".join(classes), + "-s", + debian_suite, + "-v", + version, + "-r", + release_name, + "-g", + grml_name, + "-i", + isoname, + "-o", + output_dir, + ] + if update_only: + grml_live_cmd += "-b" + with ci_section("Building with grml-live"): + run_x(grml_live_cmd, env=env) + + +def fixup_fai(): + # Workaround for fai, necessary to build in docker where /dev/pts is unavailable. + # apt prints: E: Can not write log (Is /dev/pts mounted?) - posix_openpt (19: No such device) + fai_subroutines = Path("/usr/lib/fai/subroutines") + old_code = fai_subroutines.read_text().splitlines() + filtered_code = "\n".join([line for line in old_code if "task_error 472" not in line]) + fai_subroutines.write_text(filtered_code) + + +def upload_daily(job_name: str, build_dir: Path, job_timestamp: datetime.datetime): + ssh_key = os.getenv("DAILY_UPLOAD_SSH_KEY") + remote = os.getenv("DAILY_UPLOAD_REMOTE") + stamped_dirname = job_timestamp.strftime("%Y-%m-%d_%H_%M_%S") + with ci_section("Uploading to daily.grml.org", collapsed=True): + run_x( + [ + TOOL_DIR / "upload-daily", + ssh_key, + f"{remote}{job_name}", + build_dir, + job_name, + stamped_dirname, + ] + ) + + +def get_dpkg_list_path_for_build(build_dir: Path) -> Path: + return build_dir / "grml_logs" / "fai" / "dpkg.list" + + +def generate_changes_list( + build_dir: Path, + output_filename: str, + old_dpkg_list: Path, + build_job_name: str, + build_version: str, +): + package_prefix = "grml" + git_url_base = "https://github.com/grml" + git_workspace = Path("/tmp") / "changes-git-workspace" + output_file = build_dir / "grml_logs" / output_filename + new_dpkg_list = get_dpkg_list_path_for_build(build_dir) + + args = [ + TOOL_DIR / "generate-changes-list", + output_file, + new_dpkg_list, + old_dpkg_list, + package_prefix, + git_url_base, + git_workspace, + build_job_name, + build_version, + ] + run_x(args) + + +@contextlib.contextmanager +def results_mover(output_dir: Path, build_dir: Path): + yield + + +def download_file(url: str, local_path: Path): + """Download URL url into local_path, using curl. Raises on failure.""" + run_x(["curl", "-vSL", "--fail", url, "--output", local_path]) + + +def should_skip_sources(build_config: dict, env: dict) -> bool: + if env.get("SKIP_SOURCES", "") == "1": + return True + if build_config.get("skip_sources", False) is True: + return True + return False + + +def get_grml_live_classes(arch: str, flavor: str, classes_for_mode: list[str], skip_sources: bool) -> list[str]: + base_classes = [ + "DEBORPHAN", + "GRMLBASE", + f"GRML_{flavor.upper()}", + "RELEASE", + arch.upper(), + "IGNORE", + ] + return base_classes + classes_for_mode + + +def build( + output_dir: Path, + build_dir: Path, + old_dpkg_list_daily: Path | None, + old_dpkg_list_last_release: Path | None, + job_properties: JobProperties, + grml_live_builder: GrmlLiveBuilder, + upload_to_daily: bool, +): + grml_live_builder.build( + output_dir, + job_properties.arch, + job_properties.classes, + job_properties.debian_suite, + job_properties.version, + job_properties.release_name, + job_properties.grml_name, + job_properties.isoname, + job_properties.update_only, + ) + + if old_dpkg_list_daily: + generate_changes_list( + build_dir, + "changes-last-daily.txt", + old_dpkg_list_daily, + job_properties.job_name, + job_properties.version, + ) + + if old_dpkg_list_last_release: + generate_changes_list( + build_dir, + "changes-last-release.txt", + old_dpkg_list_last_release, + job_properties.job_name, + job_properties.version, + ) + + if upload_to_daily: + upload_daily(job_properties.job_name, build_dir, job_properties.job_timestamp) + + +def load_config(build_config_file: str) -> dict: + with Path(build_config_file).open() as stream: + return yaml.safe_load(stream) + + +def bail(message: str) -> int: + print(f"E: {message}", file=sys.stderr) + return 2 + + +def install_debian_dependencies(): + # TODO: read (some!) deps from grml-live/debian/control + with ci_section("Installing dependencies from Debian", collapsed=True): + apt_satisfy( + """ + ca-certificates , + git , + bc , + bzip2 , + curl , + debootstrap , + dosfstools , + fai-client (>= 3.4.0) , + isolinux (>= 3:6.03+dfsg-5+deb8u1~) , + jo , + mksh , + mmdebstrap , + moreutils , + mtools , + pciutils , + python3-paramiko , + rsync , + squashfs-tools (>= 1:4.2-0~bpo60) , + xorriso , + imagemagick , + """ + ) + + +def main(program_name: str, argv: list[str]) -> int: + print(f"I: {program_name} started with {argv=}") + try: + grml_live_path = Path(argv.pop(0)) + build_mode = argv.pop(0) + build_config_file = argv.pop(0) + if build_mode == "release": + flavor = argv.pop(0) + arch = argv.pop(0) + debian_suite = "" # filled from config + update_only = True + classes_for_mode = ["SNAPSHOT", "NO_ONLINE"] + upload_to_daily = False + + elif build_mode == "daily": + flavor = argv.pop(0) + arch = argv.pop(0) + debian_suite = argv.pop(0) + update_only = False + classes_for_mode = [] + upload_to_daily = os.getenv("DO_DAILY_UPLOAD", "") == "1" + + else: + return bail(f"build_mode {build_mode} not understood, valid options are: release, daily") + + except IndexError: + usage(program_name) + return 2 + + if arch not in ("amd64", "i386", "arm64"): + return bail(f"unknown build_arch: {arch}") + + if not is_ci(): + print("I: No CI variable found, assuming local test build") + if not is_docker(): + return bail("E: Not running inside docker, exiting to avoid data damage") + + # we run without dependencies, so no yaml python module. + # Turn yaml into json using yq + build_config = load_config(build_config_file) + + if skip_sources := should_skip_sources(build_config, dict(os.environ)): + print("I: SKIP_SOURCES=1, skipping source download (either from config or ENV)") + + classes = get_grml_live_classes(arch, flavor, classes_for_mode, skip_sources) + + build_grml_name = f"grml-{flavor}-{arch}" + last_release_version = build_config["last_release"] + + # build_grml_live_branch = os.getenv("USE_GRML_LIVE_BRANCH", "master") + + # We try to construct an ISO name like this: + # daily: grml-full-daily20230201build20unstable-amd64.iso + # release: grml-full-2024.12-arm64.iso + # Note that release builds do not carry the debian suite in their name. + + if build_mode == "release": + build_version = build_config["release_version"] + + job_properties = JobProperties( + job_timestamp=datetime.datetime.now(), + job_name=f"{build_grml_name}-release", + arch=arch, + classes=list(classes), + # XXX: should load this from ISO or metadata file + debian_suite=build_config["debian_suite"], + # f.e. "pre2024.11-rc0" + version=build_version, + # f.e. "Glumpad Grumbirn" + release_name=build_config["release_name"], + grml_name=build_grml_name, + isoname=f"grml-{flavor}-{build_version}-{arch}.iso", + update_only=update_only, + ) + + elif build_mode == "daily": + date_stamp = datetime.datetime.now().strftime("%Y%m%d") + CI_PIPELINE_IID = os.getenv("CI_PIPELINE_IID", "0") + build_version = f"d{date_stamp}b{CI_PIPELINE_IID}" + build_release_name = f"daily{date_stamp}build{CI_PIPELINE_IID}{debian_suite}" + + job_properties = JobProperties( + job_timestamp=datetime.datetime.now(), + job_name=f"{build_grml_name}-{debian_suite}", + arch=arch, + classes=list(classes), + debian_suite=debian_suite, + version=build_version, + release_name=build_release_name, + grml_name=build_grml_name, + isoname=f"grml-{flavor}-{build_release_name}-{arch}.iso", + update_only=update_only, + ) + + else: + raise ValueError(f"unexpected {build_mode=}") + + print(f"I: {job_properties=}") + print(f"I: {last_release_version=}") + + print_grml_live_version(grml_live_path) + grml_live_builder = GrmlLiveBuilder(grml_live_path) + + source_dir = Path(os.getcwd()) + cache_dir = source_dir / "cache" + output_dir = source_dir / "results" + output_dir.mkdir() + print("I: {source_dir=}") + print("I: {cache_dir=}") + print("I: {output_dir=}") + + # avoid building on mounted volume + tmp_root = Path("/tmp") if True else Path(".").absolute() + tmp_dir = Path(tempfile.mkdtemp(dir=tmp_root)) + build_dir = Path(tempfile.mkdtemp(dir=tmp_root)) + + if build_mode == "daily": + old_dpkg_list_daily = source_dir / "cache" / "dpkg.list" + else: + old_dpkg_list_daily = None + + if last_release_version: + # TODO: + # 1) deal with (OLD!) release names + # 2) interpolate arch, etc + # 3) error handling? + old_dpkg_list_last_release = tmp_dir / "dpkg.list.previous_release" + url = f"https://grml.org/files/grml64-{flavor}_{last_release_version}/dpkg.list" + try: + download_file(url, old_dpkg_list_last_release) + except Exception as except_inst: + print(f"E: ignoring error while downloading {url}: {except_inst}") + else: + old_dpkg_list_last_release = None + + install_debian_dependencies() + fixup_fai() + + with results_mover(output_dir, build_dir): + build( + output_dir, + build_dir, + old_dpkg_list_daily, + old_dpkg_list_last_release, + job_properties, + grml_live_builder, + upload_to_daily=upload_to_daily, + ) + + # Copy dpkg.list into cache for next iteration. + new_dpkg_list = get_dpkg_list_path_for_build(build_dir) + shutil.copyfile(new_dpkg_list, cache_dir) + + return 0 + + +if __name__ == "__main__": + sys.exit(main(sys.argv.pop(0), sys.argv)) diff --git a/build-driver/generate-changes-list.py b/build-driver/generate-changes-list.py new file mode 100755 index 00000000..8ad4d0ab --- /dev/null +++ b/build-driver/generate-changes-list.py @@ -0,0 +1,180 @@ +#!/usr/bin/env python3 +import os +import subprocess +import sys +import re +from pathlib import Path + + +class Listener: + def __init__(self): + self.failed = False + + def error(self, message: str): + raise NotImplementedError + + def info(self, message: str): + raise NotImplementedError + + def warn(self, message: str): + raise NotImplementedError + + +class CliListener(Listener): + def error(self, message: str): + self.failed = True + sys.stderr.write(f"E: {message}\n") + + def info(self, message: str): + sys.stdout.write(f"I: {message}\n") + + def warn(self, message: str): + sys.stdout.write(f"W: {message}\n") + + +def parse_package_list(s: str) -> dict: + package_dict = {} + for line in s.split("\n"): + match = re.match(r"^ii\s+(\S+)\s+(\S+)\s", line) + if match: + package_dict[match[1]] = match[2] + return package_dict + + +def build_changes( + output_filename: Path, + dpkg_list_new: Path, + dpkg_list_old: Path, + package_prefix: str, + git_url_base: str, + git_repo_workspace: Path, + job_name: str, + build_id: str, + listener: Listener, +): + git_repo_workspace.mkdir(parents=True, exist_ok=True) + + changelog = f"""------------------------------------------------------------------------ +Generated by CI for job {job_name} {build_id} +------------------------------------------------------------------------ +""" + + if not dpkg_list_new.exists(): + listener.error(f"Could not read package list {dpkg_list_new}") + return + + packages = parse_package_list(dpkg_list_new.read_text()) + packages_old = {} + try: + packages_old = parse_package_list(dpkg_list_old.read_text()) + except Exception as e: + listener.info(f"While parsing old package list: {e}") + + debian_changes = {"removed": [], "added": [], "changed": []} + + for package in set(packages_old) - set(packages): + if re.match(f"^{package_prefix}", package): + changelog += f"""Package {package}: Removed. +------------------------------------------------------------------------ +""" + else: + debian_changes["removed"].append(package) + + for package, version in packages.items(): + old_version = packages_old.get(package) + if re.match(f"^{package_prefix}", package): + try: + listener.info(f"Generating changes list for package {package}...") + if old_version: + listener.info(f"Version {old_version} -> {version}") + if old_version == version: + continue + + # clone repo + git_url = f"{git_url_base}/{package}" + gitpath = git_repo_workspace / f"{package}.git" + if not gitpath.exists(): + env = dict(os.environ) | {"GIT_TERMINAL_PROMPT": "0"} + subprocess.run( + ["git", "clone", "--bare", "--single-branch", git_url, gitpath], + cwd=git_repo_workspace, + env=env, + ) + if not gitpath.exists(): + raise Exception("Repository not found") + + # update repo + subprocess.run(["git", "remote", "set-url", "origin", git_url], cwd=gitpath) + subprocess.run(["git", "remote", "update", "--prune"], cwd=gitpath).check_returncode() + + if old_version: + range = f"v{old_version}..v{version}" + else: + range = f"v{version}" + + result = subprocess.run(["git", "log", "--oneline", range], cwd=gitpath, capture_output=True) + if result.returncode != 0: + git_changes = "(failed)" + else: + git_changes = "\n ".join(result.stdout.decode().splitlines()) + changelog += f"""Package {package}: {range} {'(new)' if not old_version else ''} + {git_changes} +------------------------------------------------------------------------ +""" + except Exception as e: + listener.warn(f"Generating change report for package {package} failed: {e}") + else: + if old_version: + if old_version == version: + continue + debian_changes["changed"].append(f"{package} {old_version} -> {version}") + else: + debian_changes["added"].append(package) + + changelog += """Changes to Debian package list: + Added: + {} + Changed: + {} + Removed: + {} +------------------------------------------------------------------------ +""".format( + "\n ".join(debian_changes["added"]).strip(), + "\n ".join(debian_changes["changed"]).strip(), + "\n ".join(debian_changes["removed"]).strip(), + ) + + output_filename.write_text(changelog) + + +def main() -> int: + if len(sys.argv) != 9: + print( + f"Usage: {sys.argv[0]} output_filename dpkg_list_new dpkg_list_old package_prefix git_url_base git_repo_workspace job_name build_id" + ) + return 2 + + listener = CliListener() + try: + build_changes( + Path(sys.argv[1]), + Path(sys.argv[2]), + Path(sys.argv[3]), + sys.argv[4], + sys.argv[5], + Path(sys.argv[6]), + sys.argv[7], + sys.argv[8], + listener, + ) + except Exception as except_inst: + listener.error(f"Uncaught exception: {except_inst}") + + if listener.failed: + return 1 + return 0 + + +if __name__ == "__main__": + sys.exit(main()) diff --git a/build-driver/pyproject.toml b/build-driver/pyproject.toml new file mode 100644 index 00000000..ff80772f --- /dev/null +++ b/build-driver/pyproject.toml @@ -0,0 +1,9 @@ +[tool.pycodestyle] +max-line-length = 120 + +[tool.black] +line-length = 120 +target-version = ['py311'] + +[tool.pyupgrade] +version-option = '--py311-plus' diff --git a/build-driver/upload-daily.py b/build-driver/upload-daily.py new file mode 100755 index 00000000..c54dd016 --- /dev/null +++ b/build-driver/upload-daily.py @@ -0,0 +1,114 @@ +#!/usr/bin/env python3 +import paramiko +import pathlib +import sys +from stat import S_ISDIR + + +def sftp_isdir(sftp, path: str): + try: + return S_ISDIR(sftp.stat(path).st_mode) + except IOError: + return False + + +def sftp_rm_r(sftp, remote_dir: str): + files = sftp.listdir(remote_dir) + remote_path = pathlib.Path(remote_dir) + + for f in files: + filepath = (remote_path / f).as_posix() + if sftp_isdir(sftp, filepath): + sftp_rm_r(sftp, filepath) + else: + sftp.remove(filepath) + + sftp.rmdir(remote_dir) + + +def upload_dir(sftp, local_dir: pathlib.Path, remote_dir: str): + remote_root = pathlib.Path(remote_dir) + seen = set() + for local_path in local_dir.rglob("*"): + if not local_path.is_file(): + continue + + relative_path = local_path.relative_to(local_dir) + remote_path = remote_root / relative_path + + for parent in reversed(remote_path.parents): + parent = parent.as_posix() + if parent in seen: + continue + try: + sftp.stat(parent) + except FileNotFoundError: + sftp.mkdir(parent) + seen.add(parent) + + print("Uploading", local_path, "to", remote_path) + sftp.put(local_path.as_posix(), remote_path.as_posix()) + + +def main(): + keyfile = sys.argv[1] + # user@remote.host:/grml64-small_sid + remote_site_and_path = sys.argv[2] + # /tmp/builddir + local_dir = pathlib.Path(sys.argv[3]) + # grml64-small_sid + job_name = sys.argv[4] + # 2024-11-09_01_31_01 + stamped_dirname = sys.argv[5] + + remote_site = remote_site_and_path.split(":")[0] + remote_path = remote_site_and_path.split(":")[1] + remote_host = remote_site.split("@")[1] + remote_user = remote_site.split("@")[0] + + pkey = paramiko.Ed25519Key.from_private_key_file(keyfile) + + transport = paramiko.Transport((remote_host, 22)) + transport.connect(username=remote_user, pkey=pkey) + + sftp = paramiko.SFTPClient.from_transport(transport) + assert sftp is not None + + versions = [path for path in sorted(sftp.listdir(remote_path)) if not path.endswith("/latest")] + for version in versions[:-14]: + print("Removing old version", version) + sftp_rm_r(sftp, f"{remote_path}/{version}") + + remote_stamped = f"{remote_path}/{stamped_dirname}" + upload_dir(sftp, local_dir, remote_stamped) + + remote_latest = f"{remote_path}/latest" + try: + sftp.mkdir(remote_latest) + except IOError: + pass + + real_iso_name = next(local_dir.glob("grml_isos/*iso")).name + real_checksum_name = next(local_dir.glob("grml_isos/*iso.sha256")).name + latest_iso_name = f"{job_name}_latest.iso" + latest_checksum_name = f"{job_name}_latest.iso.sha256" + + for symlink, real in [ + (latest_iso_name, real_iso_name), + (latest_checksum_name, real_checksum_name), + ]: + remote_symlink = f"{remote_latest}/{symlink}" + remote_real = f"../{stamped_dirname}/grml_isos/{real}" + print("Updating symlink", remote_symlink, "to", remote_real) + try: + sftp.unlink(remote_symlink) + except FileNotFoundError: + pass + sftp.symlink(remote_real, remote_symlink) + + sftp.close() + transport.close() + + +if __name__ == "__main__": + main()