Feat: improve local build script
This commit is contained in:
949
scripts/build.py
Executable file
949
scripts/build.py
Executable file
@@ -0,0 +1,949 @@
|
||||
#!/usr/bin/env python3
|
||||
# /// script
|
||||
# requires-python = ">=3.11"
|
||||
# dependencies = ["rich>=13.0", "questionary>=2.0"]
|
||||
# ///
|
||||
"""Cagire release builder — replaces build-all.sh, make-dmg.sh, make-appimage.sh."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import argparse
|
||||
import hashlib
|
||||
import os
|
||||
import platform
|
||||
import shutil
|
||||
import subprocess
|
||||
import sys
|
||||
import tempfile
|
||||
import threading
|
||||
import time
|
||||
import tomllib
|
||||
from concurrent.futures import ThreadPoolExecutor
|
||||
from dataclasses import dataclass, field
|
||||
from pathlib import Path
|
||||
|
||||
from rich.console import Console
|
||||
from rich.layout import Layout
|
||||
from rich.live import Live
|
||||
from rich.panel import Panel
|
||||
from rich.progress_bar import ProgressBar
|
||||
from rich.table import Table
|
||||
from rich.text import Text
|
||||
|
||||
import questionary
|
||||
|
||||
console = Console()
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Build progress tracking (shared between threads)
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
_progress_lock = threading.Lock()
|
||||
_build_progress: dict[str, tuple[str, int]] = {} # alias -> (phase, step)
|
||||
_build_logs: list[tuple[str, str]] = [] # (alias, line)
|
||||
|
||||
|
||||
def _update_phase(alias: str, phase: str, step: int) -> None:
|
||||
with _progress_lock:
|
||||
_build_progress[alias] = (phase, step)
|
||||
|
||||
|
||||
class BuildLog(list):
|
||||
"""A list that also feeds lines into the shared log buffer."""
|
||||
|
||||
def __init__(self, alias: str):
|
||||
super().__init__()
|
||||
self._alias = alias
|
||||
|
||||
def append(self, line: str) -> None:
|
||||
super().append(line)
|
||||
with _progress_lock:
|
||||
_build_logs.append((self._alias, line))
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Data structures
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
PLUGIN_NAME = "cagire-plugins"
|
||||
LIB_NAME = "cagire_plugins"
|
||||
OUT = "releases"
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class Platform:
|
||||
triple: str
|
||||
label: str
|
||||
alias: str
|
||||
os: str
|
||||
arch: str
|
||||
cross: bool
|
||||
native: bool
|
||||
|
||||
|
||||
def _parse_triple(triple: str) -> Platform:
|
||||
"""Derive a full Platform from a Rust target triple."""
|
||||
parts = triple.split("-")
|
||||
arch = parts[0]
|
||||
if "apple" in triple:
|
||||
os_name, cross = "macos", False
|
||||
elif "linux" in triple:
|
||||
os_name, cross = "linux", True
|
||||
elif "windows" in triple:
|
||||
os_name, cross = "windows", True
|
||||
else:
|
||||
raise ValueError(f"Unknown OS in triple: {triple}")
|
||||
host_arch = platform.machine()
|
||||
host_triple_arch = "aarch64" if host_arch == "arm64" else host_arch
|
||||
native = (os_name == "macos" and arch == host_triple_arch and platform.system() == "Darwin") \
|
||||
or (os_name == "linux" and arch == host_triple_arch and platform.system() == "Linux")
|
||||
mode = "native" if native else "cross" if cross else "native"
|
||||
alias_arch = "arm64" if (os_name == "macos" and arch == "aarch64") else arch
|
||||
alias = f"{os_name}-{alias_arch}"
|
||||
label = f"{'macOS' if os_name == 'macos' else os_name.capitalize()} {arch} ({mode})"
|
||||
return Platform(triple, label, alias, os_name, arch, cross, native)
|
||||
|
||||
|
||||
def load_platforms(root: Path) -> list[Platform]:
|
||||
"""Load platform definitions from scripts/platforms.toml."""
|
||||
with open(root / "scripts" / "platforms.toml", "rb") as f:
|
||||
data = tomllib.load(f)
|
||||
return [_parse_triple(t) for t in data["triples"]]
|
||||
|
||||
|
||||
@dataclass
|
||||
class BuildConfig:
|
||||
cli: bool = True
|
||||
desktop: bool = True
|
||||
plugins: bool = True
|
||||
|
||||
|
||||
@dataclass
|
||||
class PlatformResult:
|
||||
platform: Platform
|
||||
success: bool
|
||||
elapsed: float
|
||||
artifacts: list[str] = field(default_factory=list)
|
||||
log_lines: list[str] = field(default_factory=list)
|
||||
error: str | None = None
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Helpers
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
|
||||
def get_version(root: Path) -> str:
|
||||
with open(root / "Cargo.toml", "rb") as f:
|
||||
cargo = tomllib.load(f)
|
||||
return cargo["workspace"]["package"]["version"]
|
||||
|
||||
|
||||
def builder_for(p: Platform) -> str:
|
||||
return "cross" if p.cross else "cargo"
|
||||
|
||||
|
||||
def release_dir(root: Path, p: Platform) -> Path:
|
||||
if p.native:
|
||||
return root / "target" / "release"
|
||||
return root / "target" / p.triple / "release"
|
||||
|
||||
|
||||
def target_flags(p: Platform) -> list[str]:
|
||||
if p.native:
|
||||
return []
|
||||
return ["--target", p.triple]
|
||||
|
||||
|
||||
def suffix_for(p: Platform) -> str:
|
||||
return ".exe" if p.os == "windows" else ""
|
||||
|
||||
|
||||
def run_cmd(
|
||||
cmd: list[str], log: list[str], env: dict[str, str] | None = None,
|
||||
input: str | None = None, cwd: Path | None = None,
|
||||
) -> None:
|
||||
log.append(f" $ {' '.join(cmd)}")
|
||||
merged_env = {**os.environ, **(env or {})}
|
||||
proc = subprocess.Popen(
|
||||
cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT,
|
||||
text=True, env=merged_env,
|
||||
stdin=subprocess.PIPE if input else subprocess.DEVNULL,
|
||||
cwd=cwd,
|
||||
)
|
||||
if input:
|
||||
proc.stdin.write(input)
|
||||
proc.stdin.close()
|
||||
for line in proc.stdout:
|
||||
log.append(f" {line.rstrip()}")
|
||||
rc = proc.wait()
|
||||
if rc != 0:
|
||||
raise subprocess.CalledProcessError(rc, cmd)
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Build functions
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
def _macos_env(p: Platform) -> dict[str, str] | None:
|
||||
if p.os == "macos":
|
||||
return {"MACOSX_DEPLOYMENT_TARGET": "12.0"}
|
||||
return None
|
||||
|
||||
|
||||
def build_binary(root: Path, p: Platform, log: list[str], extra_args: list[str] | None = None) -> None:
|
||||
cmd = [builder_for(p), "build", "--release", *target_flags(p), *(extra_args or [])]
|
||||
log.append(f" Building: {' '.join(extra_args or ['default'])}")
|
||||
run_cmd(cmd, log, env=_macos_env(p))
|
||||
|
||||
|
||||
def bundle_plugins(root: Path, p: Platform, log: list[str]) -> None:
|
||||
if p.cross:
|
||||
_bundle_plugins_cross(root, p, log)
|
||||
else:
|
||||
_bundle_plugins_native(root, p, log)
|
||||
|
||||
|
||||
def _bundle_plugins_native(root: Path, p: Platform, log: list[str]) -> None:
|
||||
log.append(" Bundling plugins (native xtask)")
|
||||
cmd = ["cargo", "xtask", "bundle", PLUGIN_NAME, "--release", *target_flags(p)]
|
||||
run_cmd(cmd, log, env=_macos_env(p))
|
||||
|
||||
|
||||
def _bundle_plugins_cross(root: Path, p: Platform, log: list[str]) -> None:
|
||||
log.append(" Bundling plugins (cross)")
|
||||
build_binary(root, p, log, extra_args=["-p", PLUGIN_NAME])
|
||||
|
||||
rd = release_dir(root, p)
|
||||
if p.os == "linux":
|
||||
src_lib = rd / f"lib{LIB_NAME}.so"
|
||||
elif p.os == "windows":
|
||||
src_lib = rd / f"{LIB_NAME}.dll"
|
||||
else:
|
||||
raise RuntimeError(f"Unexpected cross OS: {p.os}")
|
||||
|
||||
if not src_lib.exists():
|
||||
raise FileNotFoundError(f"Expected library not found: {src_lib}")
|
||||
|
||||
out = root / OUT
|
||||
|
||||
# CLAP — flat file
|
||||
clap_dst = out / f"{PLUGIN_NAME}-{p.os}-{p.arch}.clap"
|
||||
shutil.copy2(src_lib, clap_dst)
|
||||
log.append(f" CLAP -> {clap_dst}")
|
||||
|
||||
# VST3 — directory tree
|
||||
vst3_dir = out / f"{PLUGIN_NAME}-{p.os}-{p.arch}.vst3"
|
||||
if p.os == "linux":
|
||||
contents = vst3_dir / "Contents" / f"{p.arch}-linux"
|
||||
contents.mkdir(parents=True, exist_ok=True)
|
||||
shutil.copy2(src_lib, contents / f"{PLUGIN_NAME}.so")
|
||||
elif p.os == "windows":
|
||||
contents = vst3_dir / "Contents" / f"{p.arch}-win"
|
||||
contents.mkdir(parents=True, exist_ok=True)
|
||||
shutil.copy2(src_lib, contents / f"{PLUGIN_NAME}.vst3")
|
||||
log.append(f" VST3 -> {vst3_dir}/")
|
||||
|
||||
|
||||
def bundle_desktop_app(root: Path, p: Platform, log: list[str]) -> None:
|
||||
if p.cross:
|
||||
return
|
||||
log.append(" Bundling desktop .app")
|
||||
cmd = ["cargo", "bundle", "--release", "--features", "desktop", "--bin", "cagire-desktop", *target_flags(p)]
|
||||
run_cmd(cmd, log, env=_macos_env(p))
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Packaging: DMG
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
|
||||
def make_dmg(root: Path, app_path: Path, arch: str, output_dir: Path, log: list[str]) -> str | None:
|
||||
log.append(f" Building DMG for {app_path.name}")
|
||||
|
||||
binary = app_path / "Contents" / "MacOS" / "cagire-desktop"
|
||||
result = subprocess.run(["lipo", "-info", str(binary)], capture_output=True, text=True)
|
||||
if result.returncode != 0:
|
||||
log.append(f" ERROR: lipo failed on {binary}")
|
||||
return None
|
||||
|
||||
lipo_out = result.stdout.strip()
|
||||
if "Architectures in the fat file" in lipo_out:
|
||||
dmg_arch = "universal"
|
||||
else:
|
||||
raw_arch = lipo_out.split()[-1]
|
||||
dmg_arch = "aarch64" if raw_arch == "arm64" else raw_arch
|
||||
|
||||
staging = Path(tempfile.mkdtemp())
|
||||
try:
|
||||
shutil.copytree(app_path, staging / "Cagire.app")
|
||||
(staging / "Applications").symlink_to("/Applications")
|
||||
readme = root / "assets" / "DMG-README.txt"
|
||||
if readme.exists():
|
||||
shutil.copy2(readme, staging / "README.txt")
|
||||
|
||||
dmg_name = f"Cagire-{dmg_arch}.dmg"
|
||||
output_dir.mkdir(parents=True, exist_ok=True)
|
||||
dmg_path = output_dir / dmg_name
|
||||
|
||||
run_cmd([
|
||||
"hdiutil", "create",
|
||||
"-volname", "Cagire",
|
||||
"-srcfolder", str(staging),
|
||||
"-ov", "-format", "UDZO",
|
||||
str(dmg_path),
|
||||
], log)
|
||||
log.append(f" DMG -> {dmg_path}")
|
||||
return str(dmg_path)
|
||||
finally:
|
||||
shutil.rmtree(staging, ignore_errors=True)
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Packaging: AppImage
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
APPRUN_SCRIPT = """\
|
||||
#!/bin/sh
|
||||
SELF="$(readlink -f "$0")"
|
||||
HERE="$(dirname "$SELF")"
|
||||
exec "$HERE/usr/bin/cagire" "$@"
|
||||
"""
|
||||
|
||||
|
||||
def _build_appdir(root: Path, binary: Path, appdir: Path) -> None:
|
||||
(appdir / "usr" / "bin").mkdir(parents=True)
|
||||
shutil.copy2(binary, appdir / "usr" / "bin" / "cagire")
|
||||
(appdir / "usr" / "bin" / "cagire").chmod(0o755)
|
||||
|
||||
icons = appdir / "usr" / "share" / "icons" / "hicolor" / "512x512" / "apps"
|
||||
icons.mkdir(parents=True)
|
||||
shutil.copy2(root / "assets" / "Cagire.png", icons / "cagire.png")
|
||||
shutil.copy2(root / "assets" / "cagire.desktop", appdir / "cagire.desktop")
|
||||
|
||||
apprun = appdir / "AppRun"
|
||||
apprun.write_text(APPRUN_SCRIPT)
|
||||
apprun.chmod(0o755)
|
||||
|
||||
(appdir / "cagire.png").symlink_to("usr/share/icons/hicolor/512x512/apps/cagire.png")
|
||||
|
||||
|
||||
def _download_if_missing(url: str, dest: Path, log: list[str]) -> None:
|
||||
if dest.exists():
|
||||
return
|
||||
dest.parent.mkdir(parents=True, exist_ok=True)
|
||||
log.append(f" Downloading {url}")
|
||||
run_cmd(["curl", "-fSL", url, "-o", str(dest)], log)
|
||||
|
||||
|
||||
def _make_appimage_native(root: Path, binary: Path, arch: str, output_dir: Path, log: list[str]) -> str:
|
||||
cache = root / ".cache"
|
||||
runtime = cache / f"runtime-{arch}"
|
||||
_download_if_missing(
|
||||
f"https://github.com/AppImage/type2-runtime/releases/download/continuous/runtime-{arch}",
|
||||
runtime, log,
|
||||
)
|
||||
|
||||
linuxdeploy = cache / f"linuxdeploy-{arch}.AppImage"
|
||||
_download_if_missing(
|
||||
f"https://github.com/linuxdeploy/linuxdeploy/releases/download/continuous/linuxdeploy-{arch}.AppImage",
|
||||
linuxdeploy, log,
|
||||
)
|
||||
linuxdeploy.chmod(0o755)
|
||||
|
||||
appdir = Path(tempfile.mkdtemp()) / "AppDir"
|
||||
_build_appdir(root, binary, appdir)
|
||||
|
||||
app_name = binary.name
|
||||
env = {"ARCH": arch, "LDAI_RUNTIME_FILE": str(runtime)}
|
||||
run_cmd([
|
||||
str(linuxdeploy),
|
||||
"--appimage-extract-and-run",
|
||||
"--appdir", str(appdir),
|
||||
"--desktop-file", str(appdir / "cagire.desktop"),
|
||||
"--icon-file", str(appdir / "usr" / "share" / "icons" / "hicolor" / "512x512" / "apps" / "cagire.png"),
|
||||
"--output", "appimage",
|
||||
], log, env=env, cwd=root)
|
||||
|
||||
# linuxdeploy drops the AppImage in cwd
|
||||
candidates = sorted(root.glob("*.AppImage"), key=lambda p: p.stat().st_mtime, reverse=True)
|
||||
if not candidates:
|
||||
raise FileNotFoundError("No AppImage produced by linuxdeploy")
|
||||
|
||||
output_dir.mkdir(parents=True, exist_ok=True)
|
||||
final = output_dir / f"{app_name}-linux-{arch}.AppImage"
|
||||
shutil.move(str(candidates[0]), final)
|
||||
log.append(f" AppImage -> {final}")
|
||||
return str(final)
|
||||
|
||||
|
||||
def _make_appimage_docker(root: Path, binary: Path, arch: str, output_dir: Path, log: list[str]) -> str:
|
||||
cache = root / ".cache"
|
||||
runtime = cache / f"runtime-{arch}"
|
||||
_download_if_missing(
|
||||
f"https://github.com/AppImage/type2-runtime/releases/download/continuous/runtime-{arch}",
|
||||
runtime, log,
|
||||
)
|
||||
|
||||
appdir = Path(tempfile.mkdtemp()) / "AppDir"
|
||||
_build_appdir(root, binary, appdir)
|
||||
|
||||
docker_platform = "linux/amd64" if arch == "x86_64" else "linux/arm64"
|
||||
image_tag = f"cagire-appimage-{arch}"
|
||||
|
||||
log.append(f" Building Docker image {image_tag} ({docker_platform})")
|
||||
dockerfile = "FROM ubuntu:22.04\nRUN apt-get update && apt-get install -y --no-install-recommends squashfs-tools && rm -rf /var/lib/apt/lists/*\n"
|
||||
run_cmd([
|
||||
"docker", "build", "--platform", docker_platform, "-q", "-t", image_tag, "-",
|
||||
], log, input=dockerfile)
|
||||
|
||||
squashfs = cache / f"appimage-{arch}.squashfs"
|
||||
run_cmd([
|
||||
"docker", "run", "--rm", "--platform", docker_platform,
|
||||
"-v", f"{appdir}:/appdir:ro",
|
||||
"-v", f"{cache}:/cache",
|
||||
image_tag,
|
||||
"mksquashfs", "/appdir", f"/cache/appimage-{arch}.squashfs",
|
||||
"-root-owned", "-noappend", "-comp", "gzip", "-no-progress",
|
||||
], log)
|
||||
|
||||
output_dir.mkdir(parents=True, exist_ok=True)
|
||||
app_name = binary.name
|
||||
final = output_dir / f"{app_name}-linux-{arch}.AppImage"
|
||||
with open(final, "wb") as out_f:
|
||||
out_f.write(runtime.read_bytes())
|
||||
out_f.write(squashfs.read_bytes())
|
||||
final.chmod(0o755)
|
||||
squashfs.unlink(missing_ok=True)
|
||||
log.append(f" AppImage -> {final}")
|
||||
return str(final)
|
||||
|
||||
|
||||
def make_appimage(root: Path, binary: Path, arch: str, output_dir: Path, log: list[str]) -> str:
|
||||
log.append(f" Building AppImage for {binary.name} ({arch})")
|
||||
host_arch = platform.machine()
|
||||
if host_arch == arch and platform.system() == "Linux":
|
||||
return _make_appimage_native(root, binary, arch, output_dir, log)
|
||||
return _make_appimage_docker(root, binary, arch, output_dir, log)
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Packaging: NSIS
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
|
||||
def make_nsis(root: Path, rd: Path, version: str, output_dir: Path, log: list[str]) -> str | None:
|
||||
if not shutil.which("makensis"):
|
||||
log.append(" makensis not found, skipping NSIS installer")
|
||||
return None
|
||||
|
||||
log.append(" Building NSIS installer")
|
||||
abs_root = str(root.resolve())
|
||||
run_cmd([
|
||||
"makensis",
|
||||
f"-DVERSION={version}",
|
||||
f"-DCLI_EXE={abs_root}/{rd.relative_to(root)}/cagire.exe",
|
||||
f"-DDESKTOP_EXE={abs_root}/{rd.relative_to(root)}/cagire-desktop.exe",
|
||||
f"-DICON={abs_root}/assets/Cagire.ico",
|
||||
f"-DOUTDIR={abs_root}/{OUT}",
|
||||
str(root / "nsis" / "cagire.nsi"),
|
||||
], log)
|
||||
|
||||
installer = f"cagire-{version}-windows-x86_64-setup.exe"
|
||||
log.append(f" Installer -> {output_dir / installer}")
|
||||
return str(output_dir / installer)
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Artifact copying & packaging dispatch
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
|
||||
def copy_artifacts(root: Path, p: Platform, config: BuildConfig, log: list[str]) -> list[str]:
|
||||
rd = release_dir(root, p)
|
||||
out = root / OUT
|
||||
sx = suffix_for(p)
|
||||
version = get_version(root)
|
||||
artifacts: list[str] = []
|
||||
|
||||
if config.cli:
|
||||
src = rd / f"cagire{sx}"
|
||||
dst = out / f"cagire-{p.os}-{p.arch}{sx}"
|
||||
shutil.copy2(src, dst)
|
||||
log.append(f" cagire -> {dst}")
|
||||
artifacts.append(str(dst))
|
||||
|
||||
if config.desktop:
|
||||
src = rd / f"cagire-desktop{sx}"
|
||||
dst = out / f"cagire-desktop-{p.os}-{p.arch}{sx}"
|
||||
shutil.copy2(src, dst)
|
||||
log.append(f" cagire-desktop -> {dst}")
|
||||
artifacts.append(str(dst))
|
||||
|
||||
if p.os == "macos":
|
||||
app_src = rd / "bundle" / "osx" / "Cagire.app"
|
||||
if not app_src.is_dir():
|
||||
raise FileNotFoundError(f".app bundle not found at {app_src}")
|
||||
app_dst = out / f"Cagire-{p.arch}.app"
|
||||
if app_dst.exists():
|
||||
shutil.rmtree(app_dst)
|
||||
shutil.copytree(app_src, app_dst)
|
||||
log.append(f" Cagire.app -> {app_dst}")
|
||||
artifacts.append(str(app_dst))
|
||||
|
||||
dmg = make_dmg(root, app_dst, p.arch, out, log)
|
||||
if dmg:
|
||||
artifacts.append(dmg)
|
||||
|
||||
if p.os == "windows":
|
||||
nsis = make_nsis(root, rd, version, out, log)
|
||||
if nsis:
|
||||
artifacts.append(nsis)
|
||||
|
||||
if p.os == "linux":
|
||||
if config.cli:
|
||||
ai = make_appimage(root, rd / "cagire", p.arch, out, log)
|
||||
artifacts.append(ai)
|
||||
if config.desktop:
|
||||
ai = make_appimage(root, rd / "cagire-desktop", p.arch, out, log)
|
||||
artifacts.append(ai)
|
||||
|
||||
if config.plugins and not p.cross:
|
||||
bundle_dir = root / "target" / "bundled"
|
||||
clap_src = bundle_dir / f"{PLUGIN_NAME}.clap"
|
||||
if clap_src.exists():
|
||||
clap_dst = out / f"{PLUGIN_NAME}-{p.os}-{p.arch}.clap"
|
||||
if clap_src.is_dir():
|
||||
if clap_dst.exists():
|
||||
shutil.rmtree(clap_dst)
|
||||
shutil.copytree(clap_src, clap_dst)
|
||||
else:
|
||||
shutil.copy2(clap_src, clap_dst)
|
||||
log.append(f" CLAP -> {clap_dst}")
|
||||
artifacts.append(str(clap_dst))
|
||||
|
||||
vst3_src = bundle_dir / f"{PLUGIN_NAME}.vst3"
|
||||
if vst3_src.is_dir():
|
||||
vst3_dst = out / f"{PLUGIN_NAME}-{p.os}-{p.arch}.vst3"
|
||||
if vst3_dst.exists():
|
||||
shutil.rmtree(vst3_dst)
|
||||
shutil.copytree(vst3_src, vst3_dst)
|
||||
log.append(f" VST3 -> {vst3_dst}/")
|
||||
artifacts.append(str(vst3_dst))
|
||||
|
||||
return artifacts
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Per-platform orchestration
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
|
||||
def _count_phases(p: Platform, config: BuildConfig) -> int:
|
||||
n = 0
|
||||
if config.cli:
|
||||
n += 1
|
||||
if config.desktop:
|
||||
n += 1
|
||||
if not p.cross:
|
||||
n += 1 # bundle .app
|
||||
if config.plugins:
|
||||
n += 1
|
||||
n += 1 # copy artifacts / packaging
|
||||
return n
|
||||
|
||||
|
||||
def build_platform(root: Path, p: Platform, config: BuildConfig) -> PlatformResult:
|
||||
log = BuildLog(p.alias)
|
||||
t0 = time.monotonic()
|
||||
step = 0
|
||||
try:
|
||||
if config.cli:
|
||||
_update_phase(p.alias, "compiling cli", step)
|
||||
build_binary(root, p, log)
|
||||
step += 1
|
||||
|
||||
if config.desktop:
|
||||
_update_phase(p.alias, "compiling desktop", step)
|
||||
build_binary(root, p, log, extra_args=["--features", "desktop", "--bin", "cagire-desktop"])
|
||||
step += 1
|
||||
if not p.cross:
|
||||
_update_phase(p.alias, "bundling .app", step)
|
||||
bundle_desktop_app(root, p, log)
|
||||
step += 1
|
||||
|
||||
if config.plugins:
|
||||
_update_phase(p.alias, "bundling plugins", step)
|
||||
bundle_plugins(root, p, log)
|
||||
step += 1
|
||||
|
||||
_update_phase(p.alias, "packaging", step)
|
||||
log.append(" Copying artifacts...")
|
||||
artifacts = copy_artifacts(root, p, config, log)
|
||||
|
||||
elapsed = time.monotonic() - t0
|
||||
return PlatformResult(p, True, elapsed, artifacts, log)
|
||||
|
||||
except Exception as e:
|
||||
elapsed = time.monotonic() - t0
|
||||
log.append(f" ERROR: {e}")
|
||||
return PlatformResult(p, False, elapsed, [], log, str(e))
|
||||
|
||||
|
||||
def _build_display(
|
||||
platforms: list[Platform],
|
||||
config: BuildConfig,
|
||||
completed: dict[str, PlatformResult],
|
||||
start_times: dict[str, float],
|
||||
log_max_lines: int,
|
||||
) -> Layout:
|
||||
layout = Layout()
|
||||
status_height = len(platforms) + 4
|
||||
layout.split_column(
|
||||
Layout(name="status", size=status_height),
|
||||
Layout(name="logs"),
|
||||
)
|
||||
|
||||
table = Table(padding=(0, 1), expand=True)
|
||||
table.add_column("Platform", style="cyan", min_width=28, no_wrap=True)
|
||||
table.add_column("Phase", min_width=20, no_wrap=True)
|
||||
table.add_column("Progress", min_width=22, no_wrap=True)
|
||||
table.add_column("Time", justify="right", min_width=6, no_wrap=True)
|
||||
|
||||
with _progress_lock:
|
||||
progress_snapshot = dict(_build_progress)
|
||||
|
||||
for p in platforms:
|
||||
alias = p.alias
|
||||
total = _count_phases(p, config)
|
||||
|
||||
if alias in completed:
|
||||
r = completed[alias]
|
||||
if r.success:
|
||||
n = len(r.artifacts)
|
||||
phase = Text(f"OK ({n} artifacts)", style="green")
|
||||
bar = ProgressBar(total=total, completed=total, width=20, complete_style="green")
|
||||
else:
|
||||
phase = Text(f"FAIL", style="red")
|
||||
bar = ProgressBar(total=total, completed=total, width=20, complete_style="red")
|
||||
elapsed = f"{r.elapsed:.0f}s"
|
||||
elif alias in progress_snapshot:
|
||||
ph, step = progress_snapshot[alias]
|
||||
phase = Text(ph, style="yellow")
|
||||
bar = ProgressBar(total=total, completed=step, width=20)
|
||||
elapsed = f"{time.monotonic() - start_times.get(alias, time.monotonic()):.0f}s"
|
||||
else:
|
||||
phase = Text("waiting", style="dim")
|
||||
bar = ProgressBar(total=total, completed=0, width=20)
|
||||
elapsed = ""
|
||||
|
||||
table.add_row(p.label, phase, bar, elapsed)
|
||||
|
||||
layout["status"].update(Panel(table, title="[bold blue]Build Progress[/]", border_style="blue"))
|
||||
|
||||
with _progress_lock:
|
||||
recent = _build_logs[-log_max_lines:]
|
||||
|
||||
if recent:
|
||||
lines: list[str] = []
|
||||
for alias, line in recent:
|
||||
short = alias.split("-")[0][:3]
|
||||
lines.append(f"[dim]{short}[/] {line.rstrip()}")
|
||||
log_text = "\n".join(lines)
|
||||
else:
|
||||
log_text = "[dim]waiting for output...[/]"
|
||||
|
||||
layout["logs"].update(Panel(log_text, title="[bold]Build Output[/]", border_style="dim"))
|
||||
|
||||
return layout
|
||||
|
||||
|
||||
def run_builds(
|
||||
root: Path, platforms: list[Platform], config: BuildConfig, version: str, verbose: bool = False,
|
||||
) -> list[PlatformResult]:
|
||||
(root / OUT).mkdir(parents=True, exist_ok=True)
|
||||
|
||||
_build_progress.clear()
|
||||
_build_logs.clear()
|
||||
for p in platforms:
|
||||
_update_phase(p.alias, "waiting", 0)
|
||||
|
||||
results: list[PlatformResult] = []
|
||||
completed: dict[str, PlatformResult] = {}
|
||||
start_times: dict[str, float] = {p.alias: time.monotonic() for p in platforms}
|
||||
|
||||
term_height = console.size.height
|
||||
log_max_lines = max(term_height - len(platforms) - 10, 5)
|
||||
|
||||
def make_display() -> Layout:
|
||||
return _build_display(platforms, config, completed, start_times, log_max_lines)
|
||||
|
||||
with Live(make_display(), console=console, refresh_per_second=4) as live:
|
||||
if len(platforms) == 1:
|
||||
p = platforms[0]
|
||||
r = build_platform(root, p, config)
|
||||
completed[p.alias] = r
|
||||
results.append(r)
|
||||
live.update(make_display())
|
||||
else:
|
||||
with ThreadPoolExecutor(max_workers=len(platforms)) as pool:
|
||||
futures = {pool.submit(build_platform, root, p, config): p for p in platforms}
|
||||
pending = set(futures.keys())
|
||||
while pending:
|
||||
done = {f for f in pending if f.done()}
|
||||
for f in done:
|
||||
r = f.result()
|
||||
completed[r.platform.alias] = r
|
||||
results.append(r)
|
||||
pending.discard(f)
|
||||
live.update(make_display())
|
||||
if pending:
|
||||
time.sleep(0.25)
|
||||
|
||||
for r in results:
|
||||
_print_platform_log(r, verbose)
|
||||
|
||||
return results
|
||||
|
||||
|
||||
def _print_platform_log(r: PlatformResult, verbose: bool = False) -> None:
|
||||
if r.success and not verbose:
|
||||
return
|
||||
style = "green" if r.success else "red"
|
||||
status = "OK" if r.success else "FAIL"
|
||||
console.print(Panel(
|
||||
"\n".join(r.log_lines) if r.log_lines else "[dim]no output[/]",
|
||||
title=f"{r.platform.label} [{status}] {r.elapsed:.1f}s",
|
||||
border_style=style,
|
||||
))
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# CLI & interactive mode
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
|
||||
def prompt_platforms(platforms: list[Platform], alias_map: dict[str, Platform]) -> list[Platform]:
|
||||
choices = [
|
||||
questionary.Choice("All platforms", value="all", checked=True),
|
||||
*[questionary.Choice(p.label, value=p.alias) for p in platforms],
|
||||
]
|
||||
selected = questionary.checkbox("Select platforms:", choices=choices).ask()
|
||||
if selected is None:
|
||||
sys.exit(0)
|
||||
if "all" in selected or not selected:
|
||||
return list(platforms)
|
||||
return [alias_map[alias] for alias in selected]
|
||||
|
||||
|
||||
def prompt_targets() -> BuildConfig:
|
||||
choices = [
|
||||
questionary.Choice("cagire (CLI)", value="cli", checked=True),
|
||||
questionary.Choice("cagire-desktop", value="desktop", checked=True),
|
||||
questionary.Choice("cagire-plugins (CLAP/VST3)", value="plugins", checked=True),
|
||||
]
|
||||
selected = questionary.checkbox("Select targets:", choices=choices).ask()
|
||||
if selected is None:
|
||||
sys.exit(0)
|
||||
if not selected:
|
||||
return BuildConfig()
|
||||
return BuildConfig(
|
||||
cli="cli" in selected,
|
||||
desktop="desktop" in selected,
|
||||
plugins="plugins" in selected,
|
||||
)
|
||||
|
||||
|
||||
def confirm_summary(platforms: list[Platform], config: BuildConfig) -> None:
|
||||
console.print("[bold]Platforms:[/]")
|
||||
for p in platforms:
|
||||
console.print(f" [cyan]{p.label}[/]")
|
||||
console.print("[bold]Targets:[/]")
|
||||
if config.cli:
|
||||
console.print(" cagire")
|
||||
if config.desktop:
|
||||
console.print(" cagire-desktop")
|
||||
if config.plugins:
|
||||
console.print(" cagire-plugins")
|
||||
console.print()
|
||||
|
||||
if not questionary.confirm("Proceed?", default=True).ask():
|
||||
console.print("[dim]Aborted.[/]")
|
||||
sys.exit(0)
|
||||
|
||||
|
||||
def print_results(results: list[PlatformResult], wall_time: float) -> None:
|
||||
table = Table(title="Results", title_style="bold")
|
||||
table.add_column("Platform", style="cyan", min_width=26)
|
||||
table.add_column("Status", justify="center")
|
||||
table.add_column("Time", justify="right")
|
||||
table.add_column("Artifacts")
|
||||
|
||||
succeeded = 0
|
||||
for r in results:
|
||||
if r.success:
|
||||
status = "[green]OK[/]"
|
||||
names = ", ".join(Path(a).name for a in r.artifacts)
|
||||
detail = names or "no artifacts"
|
||||
succeeded += 1
|
||||
else:
|
||||
status = "[red]FAIL[/]"
|
||||
detail = f"[red]{r.error or 'unknown error'}[/]"
|
||||
table.add_row(r.platform.label, status, f"{r.elapsed:.1f}s", detail)
|
||||
|
||||
console.print(table)
|
||||
|
||||
total = len(results)
|
||||
color = "green" if succeeded == total else "red"
|
||||
console.print(f"\n[{color}]{succeeded}/{total}[/] succeeded in [bold]{wall_time:.1f}s[/] (wall clock)")
|
||||
|
||||
|
||||
def resolve_cli_platforms(raw: str, alias_map: dict[str, Platform]) -> list[Platform]:
|
||||
platforms = []
|
||||
for alias in raw.split(","):
|
||||
alias = alias.strip()
|
||||
if alias not in alias_map:
|
||||
console.print(f"[red]Unknown platform:[/] {alias}")
|
||||
console.print(f"Valid: {', '.join(alias_map.keys())}")
|
||||
sys.exit(1)
|
||||
platforms.append(alias_map[alias])
|
||||
return platforms
|
||||
|
||||
|
||||
def resolve_cli_targets(raw: str) -> BuildConfig:
|
||||
cfg = BuildConfig(cli=False, desktop=False, plugins=False)
|
||||
for t in raw.split(","):
|
||||
t = t.strip()
|
||||
if t == "cli":
|
||||
cfg.cli = True
|
||||
elif t == "desktop":
|
||||
cfg.desktop = True
|
||||
elif t == "plugins":
|
||||
cfg.plugins = True
|
||||
else:
|
||||
console.print(f"[red]Unknown target:[/] {t} (expected: cli, desktop, plugins)")
|
||||
sys.exit(1)
|
||||
return cfg
|
||||
|
||||
|
||||
def check_git_clean(root: Path) -> tuple[str, bool]:
|
||||
"""Return (short SHA, is_clean)."""
|
||||
sha = subprocess.check_output(
|
||||
["git", "rev-parse", "--short", "HEAD"], text=True, cwd=root,
|
||||
).strip()
|
||||
status = subprocess.check_output(
|
||||
["git", "status", "--porcelain"], text=True, cwd=root,
|
||||
).strip()
|
||||
return sha, len(status) == 0
|
||||
|
||||
|
||||
def check_prerequisites(platforms: list[Platform], config: BuildConfig) -> None:
|
||||
"""Verify required tools are available, fail fast if not."""
|
||||
need_cross = any(p.cross for p in platforms)
|
||||
need_docker = any(p.cross and p.os == "linux" for p in platforms)
|
||||
need_bundle = config.desktop and any(not p.cross and p.os == "macos" for p in platforms)
|
||||
need_nsis = any(p.os == "windows" for p in platforms)
|
||||
|
||||
checks: list[tuple[str, bool]] = [("cargo", True)]
|
||||
if need_cross:
|
||||
checks.append(("cross", True))
|
||||
if need_docker:
|
||||
checks.append(("docker", True))
|
||||
if need_bundle:
|
||||
checks.append(("cargo-bundle", True))
|
||||
if need_nsis:
|
||||
checks.append(("makensis", False))
|
||||
|
||||
console.print("[bold]Prerequisites:[/]")
|
||||
missing_critical: list[str] = []
|
||||
for tool, critical in checks:
|
||||
found = shutil.which(tool) is not None
|
||||
if not found and critical:
|
||||
missing_critical.append(tool)
|
||||
if found:
|
||||
status = "[green]found[/]"
|
||||
elif critical:
|
||||
status = "[red]MISSING[/]"
|
||||
else:
|
||||
status = "[yellow]missing (optional)[/]"
|
||||
console.print(f" {tool}: {status}")
|
||||
|
||||
if missing_critical:
|
||||
console.print(f"\n[red]Missing critical tools: {', '.join(missing_critical)}[/]")
|
||||
sys.exit(1)
|
||||
console.print()
|
||||
|
||||
|
||||
def write_checksums(results: list[PlatformResult], out_dir: Path) -> Path:
|
||||
"""Write SHA256 checksums for all artifacts."""
|
||||
lines: list[str] = []
|
||||
for r in results:
|
||||
if not r.success:
|
||||
continue
|
||||
for artifact_path in r.artifacts:
|
||||
p = Path(artifact_path)
|
||||
if p.is_dir():
|
||||
continue
|
||||
h = hashlib.sha256(p.read_bytes()).hexdigest()
|
||||
lines.append(f"SHA256 ({p.name}) = {h}")
|
||||
lines.sort()
|
||||
checksum_file = out_dir / "checksums.sha256"
|
||||
checksum_file.write_text("\n".join(lines) + "\n")
|
||||
return checksum_file
|
||||
|
||||
|
||||
def main() -> None:
|
||||
parser = argparse.ArgumentParser(description="Cagire release builder")
|
||||
parser.add_argument("--platforms", help="Comma-separated: macos-arm64,macos-x86_64,linux-x86_64,linux-aarch64,windows-x86_64")
|
||||
parser.add_argument("--targets", help="Comma-separated: cli,desktop,plugins")
|
||||
parser.add_argument("--all", action="store_true", help="Build all platforms and targets")
|
||||
parser.add_argument("--yes", action="store_true", help="Skip confirmation prompt")
|
||||
parser.add_argument("--verbose", "-v", action="store_true", help="Show build logs for all platforms (not just failures)")
|
||||
parser.add_argument("--force", action="store_true", help="Allow building from a dirty git tree")
|
||||
parser.add_argument("--no-checksums", action="store_true", help="Skip SHA256 checksum generation")
|
||||
args = parser.parse_args()
|
||||
|
||||
root = Path(subprocess.check_output(["git", "rev-parse", "--show-toplevel"], text=True).strip())
|
||||
|
||||
all_platforms = load_platforms(root)
|
||||
alias_map = {p.alias: p for p in all_platforms}
|
||||
|
||||
version = get_version(root)
|
||||
sha, clean = check_git_clean(root)
|
||||
dirty_tag = "" if clean else ", dirty"
|
||||
console.print(Panel(f"Cagire [bold]{version}[/] ({sha}{dirty_tag}) — release builder", style="blue"))
|
||||
|
||||
if not clean and not args.force:
|
||||
console.print("[red]Working tree is dirty. Commit your changes or use --force.[/]")
|
||||
sys.exit(1)
|
||||
|
||||
if args.all:
|
||||
platforms = list(all_platforms)
|
||||
config = BuildConfig()
|
||||
elif args.platforms or args.targets:
|
||||
platforms = resolve_cli_platforms(args.platforms, alias_map) if args.platforms else list(all_platforms)
|
||||
config = resolve_cli_targets(args.targets) if args.targets else BuildConfig()
|
||||
else:
|
||||
platforms = prompt_platforms(all_platforms, alias_map)
|
||||
config = prompt_targets()
|
||||
|
||||
if not args.yes and not args.all and not (args.platforms or args.targets):
|
||||
confirm_summary(platforms, config)
|
||||
|
||||
check_prerequisites(platforms, config)
|
||||
|
||||
t0 = time.monotonic()
|
||||
results = run_builds(root, platforms, config, version, verbose=args.verbose)
|
||||
wall_time = time.monotonic() - t0
|
||||
|
||||
print_results(results, wall_time)
|
||||
|
||||
if not args.no_checksums and any(r.success for r in results):
|
||||
checksum_file = write_checksums(results, root / OUT)
|
||||
console.print(f"[green]Checksums written to {checksum_file}[/]")
|
||||
|
||||
if any(not r.success for r in results):
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
Reference in New Issue
Block a user