From 4bba1d3c5ebe532029a98950427b8f968046facd Mon Sep 17 00:00:00 2001 From: Igor Date: Fri, 2 Jan 2026 16:48:06 +0100 Subject: [PATCH] Repo tools: add parallel repository generation support (#9150) **Repository tools: enable parallel repo generation and refactor repo.sh** Add support for parallel repository generation using GitHub Actions, allowing multiple workers to build different releases concurrently without database locking conflicts. ### Highlights - Add `-R/--single-release`, `update-main`, and `merge` for CI-level parallelism - Use isolated per-release databases; remove local `-j/--parallel-jobs` - Sign all Release files; optimize signing order - Add `extract-repo.sh` for extracting and reorganizing packages from repositories - Replace file-based logging with syslog (`logger`), removing sudo requirement - Improve error handling, signing logic, and robustness - Refactor repo.sh for better readability and documentation - Apply CodeRabbit-recommended fixes Signed-off-by: Igor Pecovnik --- tools/repository/extract-repo.sh | 355 ++++++++++ tools/repository/repo.sh | 1092 ++++++++++++++++++++++++++++++ 2 files changed, 1447 insertions(+) create mode 100755 tools/repository/extract-repo.sh create mode 100755 tools/repository/repo.sh diff --git a/tools/repository/extract-repo.sh b/tools/repository/extract-repo.sh new file mode 100755 index 000000000..c5f0782d5 --- /dev/null +++ b/tools/repository/extract-repo.sh @@ -0,0 +1,355 @@ +#!/usr/bin/env bash + +# Extract packages from an existing Debian repository and organize them +# into the input structure expected by repo.sh +# +# Expected input structure: +# /root/*.deb -> main component (common across releases) +# /extra/{release}-utils/*.deb -> release-specific utils +# /extra/{release}-desktop/*.deb -> release-specific desktop + +set -e + +# Default values +REPO_URL="" +OUTPUT_DIR="" +RELEASES=() +VERBOSE=false +DRY_RUN=false + +# Logging +log() { + echo "[$(date +'%Y-%m-%d %H:%M:%S')] $*" +} + +log_verbose() { + if [[ "$VERBOSE" == true ]]; then + log "$*" + fi +} + +# Display help +show_help() { + cat << EOF +Extract packages from an existing Debian repository and organize them +for use as input to repo.sh + +Usage: $0 -u -o [options] + +Required: + -u, --url Repository path (local directory) + -o, --output Output directory for extracted packages + +Optional: + -r, --releases Comma-separated list of releases to extract + (default: auto-detect from repository) + -v, --verbose Verbose output + --dry-run Show what would be done without actually doing it + -h, --help Show this help + +Output Structure: + {output_dir}/ + *.deb -> Packages for 'main' component + extra/ + {release}-utils/ -> Release-specific utils packages + *.deb + {release}-desktop/ -> Release-specific desktop packages + *.deb + +Examples: + # Extract from local repository + $0 -u /path/to/repo/public -o /tmp/extracted + + # Extract specific releases only + $0 -u /path/to/repo/public -o /tmp/extracted -r jammy,noble,bookworm + + # Dry-run to see what would be extracted + $0 -u /path/to/repo/public -o /tmp/extracted --dry-run + +EOF + exit 0 +} + +# Parse command line arguments +parse_args() { + while [[ $# -gt 0 ]]; do + case "$1" in + -u|--url) + REPO_URL="$2" + shift 2 + ;; + -o|--output) + OUTPUT_DIR="$2" + shift 2 + ;; + -r|--releases) + IFS=',' read -r -a RELEASES <<< "$2" + shift 2 + ;; + -v|--verbose) + VERBOSE=true + shift + ;; + --dry-run) + DRY_RUN=true + shift + ;; + -h|--help) + show_help + ;; + *) + echo "Unknown option: $1" + echo "Use -h or --help for usage information" + exit 1 + ;; + esac + done + + # Validate required arguments + if [[ -z "$REPO_URL" ]]; then + echo "Error: Repository URL is required" + echo "Use -h or --help for usage information" + exit 1 + fi + + if [[ -z "$OUTPUT_DIR" ]]; then + echo "Error: Output directory is required" + echo "Use -h or --help for usage information" + exit 1 + fi + + if [[ ! -d "$REPO_URL" ]]; then + echo "Error: Repository path does not exist: $REPO_URL" + exit 1 + fi +} + +# Detect releases from repository +detect_releases() { + local repo_base="$1" + + log "Detecting releases from repository..." + + if [[ -d "$repo_base/dists" ]]; then + # Capture find/basename output and check for errors + local releases_output + local releases_exit_code + releases_output=$(find "$repo_base/dists" -maxdepth 1 -type d -not -name "dists" -exec basename {} \; 2>&1 | sort) + releases_exit_code=$? + + if [[ $releases_exit_code -ne 0 ]]; then + log "Error: Failed to detect releases (find exit code: $releases_exit_code)" >&2 + log "Output: $releases_output" >&2 + DETECTED_RELEASES=() + return 1 + fi + + # Check if output is non-empty before feeding to mapfile + if [[ -n "$releases_output" ]]; then + mapfile -t DETECTED_RELEASES <<< "$releases_output" + else + DETECTED_RELEASES=() + fi + else + DETECTED_RELEASES=() + fi + + if [[ ${#DETECTED_RELEASES[@]} -eq 0 ]]; then + log "Warning: Could not auto-detect releases" + DETECTED_RELEASES=() + else + log "Detected releases: ${DETECTED_RELEASES[*]}" + fi +} + +# Get package list from Packages file +get_packages_from_component() { + local repo_base="$1" + local release="$2" + local component="$3" + + # Find Packages file - try different architectures + local component_dir="$repo_base/dists/$release/$component" + + if [[ ! -d "$component_dir" ]]; then + return + fi + + # Find any Packages file in the component directory + local packages_file=$(find "$component_dir" -type f -name "Packages" | head -1) + + if [[ -n "$packages_file" && -f "$packages_file" ]]; then + # Extract package filenames from Packages file + grep -E '^Filename:' "$packages_file" | sed 's/Filename: //' || true + fi +} + +# Extract packages from repository +extract_packages() { + local repo_base="$1" + local output_base="$2" + + log "Starting package extraction..." + log "Repository: $repo_base" + log "Output: $output_base" + + # Create output directories + if [[ "$DRY_RUN" == false ]]; then + mkdir -p "$output_base/extra" + fi + + local total_packages=0 + local copied_packages=0 + local skipped_packages=0 + local error_packages=0 + + # Determine which releases to process + local releases_to_process=() + if [[ ${#RELEASES[@]} -gt 0 ]]; then + releases_to_process=("${RELEASES[@]}") + else + if [[ ${#DETECTED_RELEASES[@]} -eq 0 ]]; then + detect_releases "$repo_base" + fi + releases_to_process=("${DETECTED_RELEASES[@]}") + fi + + if [[ ${#releases_to_process[@]} -eq 0 ]]; then + log "Error: No releases found to process" + exit 1 + fi + + log "Processing releases: ${releases_to_process[*]}" + + # Process each release + for release in "${releases_to_process[@]}"; do + log "Processing release: $release" + + # Define components to check + # 'main' component has packages that go to root + # '{release}-utils' and '{release}-desktop' have release-specific packages + local components=("main" "${release}-utils" "${release}-desktop") + + for component in "${components[@]}"; do + log_verbose "Processing component: $release/$component" + + # Get package list + mapfile -t packages < <(get_packages_from_component "$repo_base" "$release" "$component") + + if [[ ${#packages[@]} -eq 0 ]]; then + log_verbose "No packages found for $release/$component" + continue + fi + + log "Found ${#packages[@]} packages in $release/$component" + + # Process each package + for package_path in "${packages[@]}"; do + ((total_packages++)) || true + + local package_name=$(basename "$package_path") + local source_path="$repo_base/$package_path" + + # Determine target directory based on component + local target_dir="" + if [[ "$component" == "main" ]]; then + # Main component packages go to root + target_dir="$output_base" + else + # Release-specific components go to extra/ + target_dir="$output_base/extra/$component" + if [[ "$DRY_RUN" == false ]]; then + mkdir -p "$target_dir" + fi + fi + + local target_path="$target_dir/$package_name" + + # Copy package + if [[ "$DRY_RUN" == true ]]; then + log "[DRY-RUN] Would copy: $package_name -> $target_dir" + ((copied_packages++)) || true + else + if [[ -f "$source_path" ]]; then + # Check if file already exists and is identical + if [[ -f "$target_path" ]]; then + # Compare files + if cmp -s "$source_path" "$target_path"; then + log_verbose "Skipping (identical): $package_name" + ((skipped_packages++)) || true + else + log_verbose "Copying (updated): $package_name" + # Try hard link first, fall back to copy + cp -l "$source_path" "$target_path" 2>/dev/null || cp "$source_path" "$target_path" + ((copied_packages++)) || true + fi + else + log_verbose "Copying: $package_name" + # Try hard link first, fall back to copy + cp -l "$source_path" "$target_path" 2>/dev/null || cp "$source_path" "$target_path" + ((copied_packages++)) || true + fi + else + log "Warning: Source file not found: $source_path" + ((error_packages++)) || true + fi + fi + done + done + done + + # Print summary + log "==========================================" + log "Extraction complete!" + log "Total packages found: $total_packages" + log "Packages copied: $copied_packages" + log "Packages skipped: $skipped_packages" + if [[ $error_packages -gt 0 ]]; then + log "Packages with errors: $error_packages" + fi + log "Output directory: $output_base" + log "==========================================" + + # Show output structure + if [[ "$DRY_RUN" == false ]] && [[ -d "$output_base" ]]; then + log "" + log "Output structure:" + find "$output_base" -maxdepth 2 -type d | sed 's|'"$output_base"'||' | sort | while read -r dir; do + if [[ -n "$dir" ]]; then + local count=$(find "$output_base$dir" -maxdepth 1 -name "*.deb" 2>/dev/null | wc -l) + if [[ $count -gt 0 ]]; then + log " $dir: $count packages" + fi + fi + done + fi +} + +# Main execution +main() { + parse_args "$@" + + # Normalize repository URL + local repo_base="$REPO_URL" + # Remove trailing slash + repo_base="${repo_base%/}" + + # Auto-detect releases if not specified + if [[ ${#RELEASES[@]} -eq 0 ]]; then + detect_releases "$repo_base" + RELEASES=("${DETECTED_RELEASES[@]}") + fi + + log "Repository extraction configuration:" + log " Source: $repo_base" + log " Output: $OUTPUT_DIR" + log " Releases: ${RELEASES[*]:-auto-detect}" + log " Dry-run: $DRY_RUN" + log "" + + # Perform extraction + extract_packages "$repo_base" "$OUTPUT_DIR" +} + +# Run main function +main "$@" diff --git a/tools/repository/repo.sh b/tools/repository/repo.sh new file mode 100755 index 000000000..dddac3950 --- /dev/null +++ b/tools/repository/repo.sh @@ -0,0 +1,1092 @@ +#!/usr/bin/env bash + +# Global variables +DRY_RUN=false # Full dry-run: don't make any repository changes +KEEP_SOURCES=false # Keep source packages when adding to repo (don't delete) +SINGLE_RELEASE="" # Process only a single release (for GitHub Actions parallel workflow) +FORCE_ADD=false # Force re-adding packages even if they already exist in repo +FORCE_PUBLISH=false # Force publishing even when no packages to add + +# Logging function - uses syslog, view logs with: journalctl -t repo-management -f +# Arguments: +# $* - Message to log +log() { + logger -t repo-management "$*" +} + +# Execute a command, respecting dry-run mode +# In dry-run mode, logs what would be executed without actually running it +# Arguments: +# $* - Command to execute +# Returns: +# Command exit status (0 in dry-run mode) +run_cmd() { + local cmd="$*" + if [[ "$DRY_RUN" == true ]]; then + log "[DRY-RUN] Would execute: $cmd" + return 0 + else + log "Executing: $cmd" + eval "$cmd" + return $? + fi +} + +# Execute aptly command and check for errors +# Exits with status 1 if the command fails (unless in dry-run mode) +# Arguments: +# $* - Aptly command to execute (without 'aptly' prefix) +run_aptly() { + if [[ "$DRY_RUN" == true ]]; then + log "[DRY-RUN] Would execute: aptly $*" + return 0 + fi + + if ! aptly "$@"; then + local exit_code=$? + log "ERROR: aptly $* failed with exit code $exit_code" + exit 1 + fi +} + +# Drop published repositories that are no longer supported +# Identifies and removes published repositories for releases that are no longer +# in config/distributions/*/support (excluding 'eos') +# Arguments: +# $1 - "all" to drop all published repositories, otherwise drops only unsupported ones +drop_unsupported_releases() { + local supported_releases=() + local published_repos=() + local repos_to_drop=() + + # Determine which releases should be kept + if [[ "$1" == "all" ]]; then + log "Cleanup: dropping all published repositories" + supported_releases=() + else + log "Cleanup: dropping unsupported releases" + supported_releases=($(grep -rw config/distributions/*/support -ve 'eos' 2>/dev/null | cut -d"/" -f3 || true)) + fi + + # Get currently published repositories + published_repos=($(aptly publish list -config="${CONFIG}" --raw | sed "s/. //g")) + + # Find repos to drop (published but not supported) + for repo in "${published_repos[@]}"; do + local should_keep=false + for supported in "${supported_releases[@]}"; do + [[ "$repo" == "$supported" ]] && { should_keep=true; break; } + done + [[ "$should_keep" == false ]] && repos_to_drop+=("$repo") + done + + # Drop the identified repositories + for repo in "${repos_to_drop[@]}"; do + run_cmd aptly publish drop -config="${CONFIG}" "${repo}" + done +} +# Display contents of all repositories +# Shows packages in the common repository and release-specific repositories (utils, desktop) +# Uses global DISTROS array for iteration +showall() { + echo "Displaying common repository contents" + aptly repo show -with-packages -config="${CONFIG}" common 2>/dev/null | tail -n +7 + + for release in "${DISTROS[@]}"; do + # Only show if the repo exists + if aptly repo show -config="${CONFIG}" "${release}-utils" &>/dev/null; then + echo "Displaying repository contents for $release-utils" + aptly repo show -with-packages -config="${CONFIG}" "${release}-utils" | tail -n +7 + fi + if aptly repo show -config="${CONFIG}" "${release}-desktop" &>/dev/null; then + echo "Displaying repository contents for $release-desktop" + aptly repo show -with-packages -config="${CONFIG}" "${release}-desktop" | tail -n +7 + fi + done +} + + +# Add packages to an aptly repository component +# Processes .deb files from a source directory, optionally repacking BSP packages +# to pin kernel versions, then adds them to the specified repository +# Arguments: +# $1 - Repository component name (e.g., "common", "jammy-utils") +# $2 - Subdirectory path relative to input folder (e.g., "", "/extra/jammy-utils") +# $3 - Description (unused, for documentation only) +# $4 - Base input folder containing packages +adding_packages() { + local component="$1" + local subdir="$2" + local input_folder="$4" + local package_dir="${input_folder}${subdir}" + + # Check if any .deb files exist in the directory + if ! find "$package_dir" -maxdepth 1 -type f -name "*.deb" 2> /dev/null | grep -q .; then + return 0 + fi + + # Get list of packages already in repo for deduplication check + # Use associative array for O(1) lookup performance + local -A repo_packages_map + if [[ "$FORCE_ADD" != true ]]; then + log "Building package list from $component for deduplication check..." + # Read aptly output line by line and parse properly + # aptly output format: " name_version_arch" (has leading spaces) + while IFS= read -r line; do + [[ -z "$line" ]] && continue + # Trim leading/trailing whitespace from line + line="${line#"${line%%[![:space:]]*}"}" + line="${line%"${line##*[![:space:]]}"}" + [[ -z "$line" ]] && continue + # aptly repo show -with-packages outputs packages as: name_version_arch + # Split by underscore to get name, version, arch + # But version can contain underscores (e.g., 25.11.0-trunk.502), so we need to be careful + # Format: name_version_arch where arch is last field, version is everything before arch after name + local name version arch + # Get architecture (last field after last underscore) + arch="${line##*_}" + # Remove architecture from line to get name_version + local temp="${line%_*}" + # Get package name (first field before first underscore) + name="${temp%%_*}" + # Get version (everything between first and last underscore) + version="${temp#*_}" + [[ -z "$name" || -z "$version" || -z "$arch" ]] && continue + repo_packages_map["${name}|${version}|${arch}"]=1 + done < <(aptly repo show -with-packages -config="${CONFIG}" "$component" 2>/dev/null | tail -n +7) + log "Built lookup map with ${#repo_packages_map[@]} unique packages in $component" + fi + + # Process each .deb file + for deb_file in "${package_dir}"/*.deb; do + # Get package info using dpkg-deb -f to get reliable format + # Single call to get all fields at once (faster than 3 separate calls) + local deb_info deb_name deb_version deb_arch + deb_info=$(dpkg-deb -f "$deb_file" Package Version Architecture 2>/dev/null) + deb_name=$(echo "$deb_info" | sed -n '1s/Package: //p') + deb_version=$(echo "$deb_info" | sed -n '2s/Version: //p') + deb_arch=$(echo "$deb_info" | sed -n '3s/Architecture: //p') + + # Create full identifier using pipe as separator (won't appear in package names) + local deb_key="${deb_name}|${deb_version}|${deb_arch}" + local deb_display="${deb_name}_${deb_version}_${deb_arch}" + + log "Checking package: $deb_display" + + # Skip if exact package (name+version+arch) already exists in repo (unless FORCE_ADD is true) + if [[ "$FORCE_ADD" != true && -n "${repo_packages_map[$deb_key]}" ]]; then + echo "[-] SKIP: $deb_display" + log "SKIP: $deb_display already in $component" + continue + fi + + # Repack BSP packages if last-known-good kernel map exists + # This prevents upgrading to kernels that may break the board + if [[ -f userpatches/last-known-good.map ]]; then + local package_name + package_name="$(dpkg-deb -W "$deb_file" | awk '{print $1}')" + + # Read kernel pinning mappings from file + while IFS='|' read -r board branch linux_family last_kernel; do + if [[ "${package_name}" == "armbian-bsp-cli-${board}-${branch}" ]]; then + echo "Setting last kernel upgrade for $board to linux-image-$branch-$board=${last_kernel}" + + # Extract, modify control file, and repackage + local tempdir + tempdir="$(mktemp -d)" + dpkg-deb -R "$deb_file" "$tempdir" + sed -i "s/^Replaces:.*/&, linux-image-${branch}-${linux_family} (>> ${last_kernel}), linux-dtb-${branch}-${linux_family} (>> ${last_kernel})/" "$tempdir/DEBIAN/control" + dpkg-deb -b "$tempdir" "$deb_file" >/dev/null + rm -rf "$tempdir" + fi + done < userpatches/last-known-good-kernel-pkg.map + fi + + # Determine whether to remove source files after adding to repo + # KEEP_SOURCES mode preserves source packages + # DRY_RUN mode also preserves sources (and skips all repo modifications) + local remove_flag="-remove-files" + if [[ "$KEEP_SOURCES" == true ]] || [[ "$DRY_RUN" == true ]]; then + remove_flag="" + fi + + # Add package to repository + log "Adding $deb_name to $component" + run_aptly repo add "$remove_flag" -force-replace -config="${CONFIG}" "${component}" "${deb_file}" + done +} + + +# Build the common (main) repository component +# Creates/updates the common repository that contains packages shared across all releases +# Should be run once before processing individual releases in parallel +# Arguments: +# $1 - Input folder containing packages +# $2 - Output folder for published repository +# $3 - GPG password for signing (currently unused, signing is done separately) +update_main() { + local input_folder="$1" + local output_folder="$2" + local gpg_password="$3" + + log "Building common (main) component" + + # Create common repo if it doesn't exist + if [[ -z $(aptly repo list -config="${CONFIG}" -raw | awk '{print $(NF)}' | grep common) ]]; then + run_aptly repo create -config="${CONFIG}" -distribution="common" -component="main" -comment="Armbian common packages" "common" | logger -t repo-management >/dev/null + fi + + # Add packages from main folder + adding_packages "common" "" "main" "$input_folder" + + # Drop old snapshot + if [[ -n $(aptly snapshot list -config="${CONFIG}" -raw | awk '{print $(NF)}' | grep "common") ]]; then + run_aptly -config="${CONFIG}" snapshot drop common | logger -t repo-management >/dev/null + fi + + # Create new snapshot + run_aptly -config="${CONFIG}" snapshot create common from repo common | logger -t repo-management >/dev/null + + log "Common component built successfully" +} + +# Process a single release distribution +# Creates/updates release-specific repositories (utils, desktop), publishes them, +# and signs the Release files. Can be run in parallel for different releases. +# Arguments: +# $1 - Release name (e.g., "jammy", "noble") +# $2 - Input folder containing packages +# $3 - Output folder for published repository +# $4 - GPG password for signing +process_release() { + local release="$1" + local input_folder="$2" + local output_folder="$3" + local gpg_password="$4" + + log "Processing release: $release" + + # In isolated mode (SINGLE_RELEASE), ensure common snapshot exists + # It should have been created by 'update-main' command, but if not, create empty common + if [[ -n "$SINGLE_RELEASE" && -z $(aptly snapshot list -config="${CONFIG}" -raw | awk '{print $(NF)}' | grep "common") ]]; then + log "WARNING: Common snapshot not found. Creating empty common snapshot." + log "Please run 'update-main' command first to populate common packages." + + # Create empty common repo + if [[ -z $(aptly repo list -config="${CONFIG}" -raw | awk '{print $(NF)}' | grep common) ]]; then + aptly repo create -config="${CONFIG}" -distribution="common" -component="main" -comment="Armbian common packages" "common" | logger -t repo-management >/dev/null + fi + + # Create snapshot (will be empty until update-main is run) + aptly -config="${CONFIG}" snapshot create common from repo common | logger -t repo-management >/dev/null + fi + + # Create release-specific repositories if they don't exist + if [[ -z $(aptly repo list -config="${CONFIG}" -raw | awk '{print $(NF)}' | grep "${release}-utils") ]]; then + run_aptly repo create -config="${CONFIG}" -component="${release}-utils" -distribution="${release}" -comment="Armbian ${release}-utils repository" "${release}-utils" | logger -t repo-management >/dev/null + fi + if [[ -z $(aptly repo list -config="${CONFIG}" -raw | awk '{print $(NF)}' | grep "${release}-desktop") ]]; then + run_aptly repo create -config="${CONFIG}" -component="${release}-desktop" -distribution="${release}" -comment="Armbian ${release}-desktop repository" "${release}-desktop" | logger -t repo-management >/dev/null + fi + + # Add packages ONLY from release-specific extra folders + adding_packages "${release}-utils" "/extra/${release}-utils" "release utils" "$input_folder" + adding_packages "${release}-desktop" "/extra/${release}-desktop" "release desktop" "$input_folder" + + # Run db cleanup before publishing to remove unreferenced packages + # This helps avoid "file already exists and is different" errors + log "Running database cleanup before publishing" + run_aptly db cleanup -config="${CONFIG}" + + # Check if we have any packages to publish + # Get package counts in each repo + local utils_count=$(aptly repo show -config="${CONFIG}" "${release}-utils" 2>/dev/null | grep "Number of packages" | awk '{print $4}' || echo "0") + local desktop_count=$(aptly repo show -config="${CONFIG}" "${release}-desktop" 2>/dev/null | grep "Number of packages" | awk '{print $4}' || echo "0") + + log "Package counts for $release: utils=$utils_count, desktop=$desktop_count" + + # If no packages in either repo and not previously published, skip publishing + # Unless FORCE_PUBLISH is enabled + if [[ "$utils_count" -eq 0 && "$desktop_count" -eq 0 && "$FORCE_PUBLISH" != true ]]; then + # Check if this release was previously published + if ! aptly publish list -config="${CONFIG}" 2>/dev/null | grep -q "^\[${release}\]"; then + log "No packages to publish for $release and not previously published. Skipping." + return 0 + else + log "No new packages but $release was previously published. Will publish with common only." + fi + fi + + if [[ "$FORCE_PUBLISH" == true ]]; then + log "Force publish enabled: will publish even with no packages" + fi + + # Drop old snapshots if they exist + if [[ -n $(aptly snapshot list -config="${CONFIG}" -raw | awk '{print $(NF)}' | grep "${release}-utils") ]]; then + run_aptly -config="${CONFIG}" snapshot drop ${release}-utils | logger -t repo-management 2>/dev/null + fi + if [[ -n $(aptly snapshot list -config="${CONFIG}" -raw | awk '{print $(NF)}' | grep "${release}-desktop") ]]; then + run_aptly -config="${CONFIG}" snapshot drop ${release}-desktop | logger -t repo-management 2>/dev/null + fi + + # Create snapshots only for repos that have packages + # OR when FORCE_PUBLISH is enabled (then we publish whatever exists in the DB) + local components_to_publish=("main") + local snapshots_to_publish=("common") + + if [[ "$utils_count" -gt 0 || "$FORCE_PUBLISH" == true ]]; then + # Only create snapshot if repo has packages, or if force-publishing + if [[ "$utils_count" -gt 0 ]]; then + run_aptly -config="${CONFIG}" snapshot create ${release}-utils from repo ${release}-utils | logger -t repo-management >/dev/null + components_to_publish+=("${release}-utils") + snapshots_to_publish+=("${release}-utils") + elif [[ "$FORCE_PUBLISH" == true ]]; then + log "Force publish: checking for existing ${release}-utils snapshot in DB" + # Try to use existing snapshot if it exists + if [[ -n $(aptly snapshot list -config="${CONFIG}" -raw | awk '{print $(NF)}' | grep "${release}-utils") ]]; then + components_to_publish+=("${release}-utils") + snapshots_to_publish+=("${release}-utils") + log "Using existing ${release}-utils snapshot" + else + # Create empty snapshot from empty repo + run_aptly -config="${CONFIG}" snapshot create ${release}-utils from repo ${release}-utils | logger -t repo-management >/dev/null + components_to_publish+=("${release}-utils") + snapshots_to_publish+=("${release}-utils") + log "Created empty ${release}-utils snapshot for force publish" + fi + fi + fi + + if [[ "$desktop_count" -gt 0 || "$FORCE_PUBLISH" == true ]]; then + # Only create snapshot if repo has packages, or if force-publishing + if [[ "$desktop_count" -gt 0 ]]; then + run_aptly -config="${CONFIG}" snapshot create ${release}-desktop from repo ${release}-desktop | logger -t repo-management >/dev/null + components_to_publish+=("${release}-desktop") + snapshots_to_publish+=("${release}-desktop") + elif [[ "$FORCE_PUBLISH" == true ]]; then + log "Force publish: checking for existing ${release}-desktop snapshot in DB" + # Try to use existing snapshot if it exists + if [[ -n $(aptly snapshot list -config="${CONFIG}" -raw | awk '{print $(NF)}' | grep "${release}-desktop") ]]; then + components_to_publish+=("${release}-desktop") + snapshots_to_publish+=("${release}-desktop") + log "Using existing ${release}-desktop snapshot" + else + # Create empty snapshot from empty repo + run_aptly -config="${CONFIG}" snapshot create ${release}-desktop from repo ${release}-desktop | logger -t repo-management >/dev/null + components_to_publish+=("${release}-desktop") + snapshots_to_publish+=("${release}-desktop") + log "Created empty ${release}-desktop snapshot for force publish" + fi + fi + fi + + log "Publishing $release with components: ${components_to_publish[*]}" + + # Determine publish directory based on mode + local publish_dir="$output_folder" + if [[ -n "$SINGLE_RELEASE" ]]; then + publish_dir="$IsolatedRootDir" + fi + + # Publish - include common snapshot for main component + log "Publishing $release" + + # Drop existing publish for this release if it exists to avoid "file already exists" errors + if aptly publish list -config="${CONFIG}" 2>/dev/null | grep -q "^\[${release}\]"; then + log "Dropping existing publish for $release from isolated DB" + run_aptly publish drop -config="${CONFIG}" "${release}" + fi + + # When using isolated DB, only clean up the isolated DB's published files + # DO NOT clean up shared output - other parallel workers might be using it + # The rsync copy will overwrite as needed, preserving other releases' files + if [[ -n "$SINGLE_RELEASE" ]]; then + # Clean up isolated DB's published files only + if [[ -d "${IsolatedRootDir}/public/dists/${release}" ]]; then + log "Cleaning up existing published files for $release in isolated DB" + rm -rf "${IsolatedRootDir}/public/dists/${release}" + # Clean up pool entries for this release in isolated DB + find "${IsolatedRootDir}/public/pool" -type d -name "${release}-*" 2>/dev/null | xargs -r rm -rf + fi + fi + + # Build publish command with only components that have packages + local component_list=$(IFS=,; echo "${components_to_publish[*]}") + local snapshot_list="${snapshots_to_publish[*]}" + + log "Publishing with components: $component_list" + log "Publishing with snapshots: $snapshot_list" + + run_aptly publish \ + -skip-signing \ + -skip-contents \ + -architectures="armhf,arm64,amd64,riscv64,i386,loong64,all" \ + -passphrase="${gpg_password}" \ + -origin="Armbian" \ + -label="Armbian" \ + -config="${CONFIG}" \ + -component="$component_list" \ + -distribution="${release}" snapshot $snapshot_list > /dev/null + + # If using isolated DB, copy published files to shared output location FIRST + if [[ -n "$SINGLE_RELEASE" && "$publish_dir" != "$output_folder" ]]; then + log "Copying published files from isolated DB to shared output" + if [[ -d "${publish_dir}/public" ]]; then + mkdir -p "${output_folder}/public" + # Use rsync to copy published repo files to shared location + # NO --delete flag - we want to preserve other releases' files + # Enable pipefail to catch rsync failures even when piped to logger + set -o pipefail + if ! rsync -a "${publish_dir}/public/" "${output_folder}/public/" 2>&1 | logger -t repo-management; then + local rsync_exit_code=$? + log "ERROR: Failed to copy published files for $release (rsync exit code: $rsync_exit_code)" + set +o pipefail # Restore default pipe behavior + return 1 + fi + set +o pipefail # Restore default pipe behavior + log "Copied files for $release to ${output_folder}/public/" + fi + fi + + # Sign Release files for this release + # This includes: + # 1. Top-level Release file (dists/{release}/Release) + # 2. Component-level Release files (dists/{release}/{component}/Release) + # Sign AFTER copying so signed files end up in the shared output location + log "Starting signing process for $release" + # Use shared output location for signing, not isolated directory + local release_pub_dir="${output_folder}/public/dists/${release}" + + # Get GPG keys from environment or use defaults + # Use BOTH keys for signing, just like the signing() function does + local gpg_keys=() + if [[ -n "$GPG_KEY" ]]; then + gpg_keys=("$GPG_KEY") + else + gpg_keys=("DF00FAF1C577104B50BF1D0093D6889F9F0E78D5" "8CFA83D13EB2181EEF5843E41EB30FAF236099FE") + fi + + local gpg_params=("--yes" "--armor") + local keys_found=0 + + # Add all available keys to GPG parameters + for gpg_key in "${gpg_keys[@]}"; do + # Try to find the actual key in the keyring + local actual_key="" + if gpg --list-secret-keys "$gpg_key" >/dev/null 2>&1; then + actual_key="$gpg_key" + else + # Try to find by email or partial match + actual_key=$(gpg --list-secret-keys --keyid-format LONG 2>/dev/null | grep -B1 "$gpg_key" | grep "sec" | awk '{print $2}' | cut -d'/' -f2 || echo "") + fi + + if [[ -n "$actual_key" ]]; then + gpg_params+=("-u" "$actual_key") + log "Adding GPG key for signing: $actual_key (requested: $gpg_key)" + ((keys_found++)) + else + log "WARNING: GPG key $gpg_key not found in keyring" + fi + done + + if [[ $keys_found -eq 0 ]]; then + log "ERROR: No GPG keys found in keyring" + log "Available keys:" + gpg --list-secret-keys --keyid-format LONG 2>&1 | logger -t repo-management + return 1 + fi + + log "Using $keys_found GPG key(s) for signing" + + # First, create component-level Release files by copying from binary-amd64 Release + # This is needed because aptly only creates Release files in binary-* subdirs + for component in main ${release}-utils ${release}-desktop; do + local component_dir="${release_pub_dir}/${component}" + if [[ -d "$component_dir" ]]; then + # Use the binary-amd64 Release file as the component Release file + local source_release="${component_dir}/binary-amd64/Release" + local target_release="${component_dir}/Release" + + if [[ -f "$source_release" && ! -f "$target_release" ]]; then + log "Creating component Release file: ${target_release}" + cp "$source_release" "$target_release" 2>&1 | logger -t repo-management + fi + fi + done + + # Now sign all Release files (both top-level and component-level) + # Find all Release files except those in binary-* subdirectories + find "${release_pub_dir}" -type f -name "Release" | while read -r release_file; do + # Skip binary-* subdirectories + if [[ "$release_file" =~ /binary-[^/]+/Release$ ]]; then + continue + fi + + log "Signing: ${release_file}" + local sign_dir="$(dirname "$release_file")" + + if gpg "${gpg_params[@]}" --clear-sign -o "${sign_dir}/InRelease" "$release_file" 2>&1 | logger -t repo-management >/dev/null; then + gpg "${gpg_params[@]}" --detach-sign -o "${sign_dir}/Release.gpg" "$release_file" 2>&1 | logger -t repo-management >/dev/null + log "Successfully signed: ${release_file}" + else + log "ERROR: Failed to sign: ${release_file}" + fi + done + + log "Completed processing release: $release" +} + +# Publish repositories for all configured releases +# Builds common component, processes each release, and finalizes the repository +# Arguments: +# $1 - Input folder containing packages +# $2 - Output folder for published repository +# $3 - Command name (unused, for compatibility) +# $4 - GPG password for signing +# $5 - Comma-separated list of releases (unused, determined from config) +publishing() { + # Only build common repo if NOT in single-release mode + # In single-release mode, common should be built separately with 'update-main' command + if [[ -z "$SINGLE_RELEASE" ]]; then + # This repository contains packages that are the same in all releases + if [[ -z $(aptly repo list -config="${CONFIG}" -raw | awk '{print $(NF)}' | grep common) ]]; then + run_aptly repo create -config="${CONFIG}" -distribution="common" -component="main" -comment="Armbian common packages" "common" | logger -t repo-management >/dev/null + fi + + # Add packages from main folder + adding_packages "common" "" "main" "$1" + + # Create snapshot + if [[ -n $(aptly snapshot list -config="${CONFIG}" -raw | awk '{print $(NF)}' | grep "common") ]]; then + run_aptly -config="${CONFIG}" snapshot drop common | logger -t repo-management >/dev/null + fi + run_aptly -config="${CONFIG}" snapshot create common from repo common | logger -t repo-management >/dev/null + else + # Single-release mode: ensure common snapshot exists (should be created by update-main) + if [[ -z $(aptly snapshot list -config="${CONFIG}" -raw | awk '{print $(NF)}' | grep "common") ]]; then + log "WARNING: Common snapshot not found. Run 'update-main' command first!" + fi + fi + + # Get all distributions or use single release if specified + local distributions=() + if [[ -n "$SINGLE_RELEASE" ]]; then + distributions=("$SINGLE_RELEASE") + log "Single release mode: processing only $SINGLE_RELEASE" + else + distributions=($(grep -rw config/distributions/*/support -ve '' | cut -d"/" -f3)) + fi + + # Process releases sequentially + if [[ -n "$SINGLE_RELEASE" ]]; then + log "Processing single release: ${distributions[0]}" + else + log "Processing ${#distributions[@]} releases sequentially" + fi + for release in "${distributions[@]}"; do + process_release "$release" "$1" "$2" "$4" + done + + # Cleanup database + run_aptly db cleanup -config="${CONFIG}" + + # Copy GPG key to repository + mkdir -p "${2}"/public/ + # Remove existing key file if it exists to avoid permission issues + rm -f "${2}"/public/armbian.key + cp config/armbian.key "${2}"/public/ + + # Write repository sync control file + date +%s > "${2}/public/control" + + # Display repository contents + showall +} + + +# Sign repository Release files using GPG +# Creates InRelease and Release.gpg signature files for component-level Release files +# Arguments: +# $1 - Output folder path containing published repository +# $@ - GPG key IDs to use for signing +signing() { + local output_folder="$1" + shift + local gpg_keys=("$@") + + if [[ ${#gpg_keys[@]} -eq 0 ]]; then + echo "No GPG keys provided for signing." >&2 + return 1 + fi + + # Build GPG parameters with available keys + local gpg_params=("--yes" "--armor") + for key in "${gpg_keys[@]}"; do + # Try to find the actual key in the keyring + local actual_key="" + if gpg --list-secret-keys "$key" >/dev/null 2>&1; then + actual_key="$key" + else + # Try to find by email or partial match + actual_key=$(gpg --list-secret-keys --keyid-format LONG 2>/dev/null | grep -B1 "$key" | grep "sec" | awk '{print $2}' | cut -d'/' -f2 || echo "") + if [[ -z "$actual_key" ]]; then + echo "Warning: GPG key $key not found on this system." >&2 + continue + fi + fi + gpg_params+=("-u" "$actual_key") + echo "Using GPG key: $actual_key (requested: $key)" >&2 + done + + # Only sign Release files at component level, NOT binary subdirs + # Sign: dists/{release}/{component}/Release + # Skip: dists/{release}/Release (top-level, not needed) + # Skip: dists/{release}/*/binary-*/Release (subdirs, not needed) + find "$output_folder/public/dists" -type f -name Release | while read -r release_file; do + # Skip if file is in a binary-* subdirectory + if [[ "$release_file" =~ /binary-[^/]+/Release$ ]]; then + continue + fi + + # Skip top-level Release files (dists/{release}/Release) + # Only sign component-level Release files (dists/{release}/{component}/Release) + local rel_path="${release_file#$output_folder/public/dists/}" + # Count slashes - should have exactly 2 for component level: {release}/{component}/Release + local slash_count=$(echo "$rel_path" | tr -cd '/' | wc -c) + + if [[ $slash_count -eq 2 ]]; then + local distro_path + distro_path="$(dirname "$release_file")" + echo "Signing release at: $distro_path" | logger -t repo-management + gpg "${gpg_params[@]}" --clear-sign -o "$distro_path/InRelease" "$release_file" + gpg "${gpg_params[@]}" --detach-sign -o "$distro_path/Release.gpg" "$release_file" + fi + done +} + + +# Finalize repository after parallel GitHub Actions workers have built individual releases +# Workers have already built and signed repos in isolated databases, so this just +# ensures the GPG key and control file are in place +# Arguments: +# $1 - Base input folder (contains package sources, for consistency) +# $2 - Output folder containing combined repository +merge_repos() { + local input_folder="$1" + local output_folder="$2" + + log "Merge mode: finalizing combined repository" + log "Workers have already built and signed individual releases" + + # Repositories are already built and signed by parallel workers + # Just need to ensure the key and control file are in place + + # Copy GPG key to repository + mkdir -p "${output_folder}"/public/ + # Remove existing key file if it exists to avoid permission issues + rm -f "${output_folder}"/public/armbian.key + cp config/armbian.key "${output_folder}"/public/ + log "Copied GPG key to repository" + + # Write repository sync control file + sudo date +%s > "${output_folder}/public/control" + log "Updated repository control file" + + # Display repository contents + showall + + log "Merge complete - repository is ready" +} + + +# Main repository manipulation dispatcher +# Routes commands to appropriate repository management functions +# Arguments: +# $1 - Input folder containing packages +# $2 - Output folder for published repository +# $3 - Command to execute (update-main, serve, html, delete, show, unique, update, merge) +# $4 - GPG password for signing +# $5 - Comma-separated list of releases (used by some commands) +# $6 - List of packages to delete (used by delete command) +repo-manipulate() { + # Read comma-delimited distros into array + IFS=', ' read -r -a DISTROS <<< "$5" + + case "$3" in + + update-main) + # Build common (main) component - runs once before parallel workers + update_main "$1" "$2" "$4" + return 0 + ;; + + serve) + # Serve the published repository + # Since aptly serve requires published repos in its database, and we use + # direct file publishing, we'll use Python's HTTP server instead + local serve_ip=$(ip -f inet addr | grep -Po 'inet \K[\d.]+' | grep -v 127.0.0.1 | head -1) + if [[ -z "$serve_ip" ]]; then + log "WARNING: No external IP found, using 0.0.0.0" + serve_ip="0.0.0.0" + fi + local serve_port="${SERVE_PORT:-8080}" + + if [[ ! -d "$output/public" ]]; then + log "ERROR: No published repository found at $output/public" + log "Please run 'update' command first to create the repository" + return 1 + fi + + log "Starting HTTP server on ${serve_ip}:${serve_port}" + log "Serving from: $output/public" + log "Press Ctrl+C to stop" + log "" + log "Repository URL: http://${serve_ip}:${serve_port}" + log "" + + # Change to public directory and start HTTP server + cd "$output/public" || return 1 + if ! command -v python3 &> /dev/null; then + log "ERROR: python3 not found. Install python3 to use serve command." + return 1 + fi + python3 -m http.server "${serve_port}" --bind "${serve_ip}" + return 0 + ;; + + html) + cat tools/repository/header.html + for release in "${DISTROS[@]}"; do + echo "

$release

MainUtilsDesktop" + echo "" + aptly repo show -with-packages -config="${CONFIG}" "${release}-utils" | tail -n +7 | sed 's/.*/&
/' + echo "" | sudo tee -a ${filename} + aptly repo show -with-packages -config="${CONFIG}" "${release}-desktop" | tail -n +7 | sed 's/.*/&
/' + echo "" + done + cat tools/repository/footer.html + return 0 + ;; + + delete) + echo "Deleting $6 from common" + aptly -config="${CONFIG}" repo remove common "$6" + for release in "${DISTROS[@]}"; do + echo "Deleting $6 from $release-utils" + aptly -config="${CONFIG}" repo remove "${release}-utils" "$6" + echo "Deleting $6 from $release-desktop" + aptly -config="${CONFIG}" repo remove "${release}-desktop" "$6" + done + return 0 + ;; + + show) + + showall + return 0 + + ;; + + unique) + # which package should be removed from all repositories + IFS=$'\n' + while true; do + LIST=() + LIST+=($(aptly repo show -with-packages -config="${CONFIG}" common | tail -n +7)) + for release in "${DISTROS[@]}"; do + LIST+=($(aptly repo show -with-packages -config="${CONFIG}" "${release}-utils" | tail -n +7)) + LIST+=($(aptly repo show -with-packages -config="${CONFIG}" "${release}-desktop" | tail -n +7)) + done + LIST=($(echo "${LIST[@]}" | tr ' ' '\n' | sort -u)) + new_list=() + # create a human readable menu + for ((n = 0; n < $((${#LIST[@]})); n++)); do + new_list+=("${LIST[$n]}") + new_list+=("") + done + LIST=("${new_list[@]}") + LIST_LENGTH=$((${#LIST[@]} / 2)) + exec 3>&1 + TARGET_VERSION=$(dialog --cancel-label "Cancel" --backtitle "BACKTITLE" --no-collapse --title \ + "Remove packages from repositories" --clear --menu "Delete" $((9 + LIST_LENGTH)) 82 65 "${LIST[@]}" 2>&1 1>&3) + exitstatus=$? + exec 3>&- + if [[ $exitstatus -eq 0 ]]; then + aptly repo remove -config="${CONFIG}" "common" "$TARGET_VERSION" + for release in "${DISTROS[@]}"; do + aptly repo remove -config="${CONFIG}" "${release}-utils" "$TARGET_VERSION" + aptly repo remove -config="${CONFIG}" "${release}-desktop" "$TARGET_VERSION" + done + else + return 1 + fi + aptly db cleanup -config="${CONFIG}" > /dev/null 2>&1 + # remove empty folders + find "$2/public" -type d -empty -delete + done + ;; + + update) + # remove old releases from publishing + drop_unsupported_releases "all" + publishing "$1" "$2" "$3" "$4" "$5" + # Only use signing function for non-single-release mode + # In single-release mode, workers already signed their components + if [[ -z "$SINGLE_RELEASE" ]]; then + signing "$2" "DF00FAF1C577104B50BF1D0093D6889F9F0E78D5" "8CFA83D13EB2181EEF5843E41EB30FAF236099FE" + fi + ;; + + merge) + # Merge repositories from parallel per-release runs + # Workers have already signed their releases, just finalize + merge_repos "$1" "$2" + ;; + + *) + echo -e "Unknown command" + return 1 + ;; +esac +} + + +# defaults +input="output/debs-beta" +output="output/repository" +command="show" +if [[ -d "config/distributions" ]]; then + releases=$(grep -rw config/distributions/*/support -ve 'eos' 2>/dev/null | cut -d"/" -f3 | xargs | sed -e 's/ /,/g') + if [[ -z "$releases" ]]; then + log "WARNING: No releases found in config/distributions" + fi +else + log "WARNING: config/distributions directory not found" + releases="" +fi + +help() +{ +echo "Armbian wrapper for Aptly v1.0 + +(c) Igor Pecovnik, igor@armbian.com + +License: (MIT) + +Usage: $0 [ -short | --long ] + +-h --help displays this help +-i --input [input folder] input folder with packages +-o --output [output folder] output folder for repository +-p --password [GPG password] GPG password for signing +-r --repository [jammy,sid,bullseye,...] comma-separated list of releases +-l --list [\"Name (% linux*)|armbian-config\"] list of packages +-c --command command to execute +-R --single-release [name] process only a single release (for parallel GitHub Actions) + example: -R jammy or -R noble + + [show] displays packages in each repository + [sign] sign repository + [html] displays packages in each repository in html form + [serve] serve repository - useful for local diagnostics + [unique] manually select which package should be removed from all repositories + [update] search for packages in input folder and create/update repository + [update-main] build common (main) component - run once before parallel workers + [merge] merge repositories from parallel per-release runs into final repo + [delete] delete package from -l LIST of packages + +-d --dry-run perform a full trial run without making any repository changes + (implies --keep-sources, shows what would be done) +-k --keep-sources keep source packages when adding to repository + (generates real repo but doesn't delete input packages) +-F --force-add force re-adding all packages even if they already exist + (by default, skips packages that are already in the repo) +-P --force-publish force publishing even when there are no packages to add + (by default, skips publishing empty releases) + +GitHub Actions parallel workflow example: + # Step 1: Build common (main) component once (optional - workers will create it if missing) + ./repo.sh -c update-main -i /shared/packages -o /shared/output + + # Step 2: Workers build release-specific components in parallel (isolated DBs) + # Worker 1: ./repo.sh -c update -R jammy -k -i /shared/packages -o /shared/output + # Worker 2: ./repo.sh -c update -R noble -k -i /shared/packages -o /shared/output + # Worker 3: ./repo.sh -c update -R bookworm -k -i /shared/packages -o /shared/output + + # Step 3: Final merge to combine all outputs + ./repo.sh -c merge -i /shared/packages -o /shared/output + +Note: Each worker uses isolated DB (aptly-isolated-) to avoid locking. +Common snapshot is created in each worker's isolated DB from root packages. + " + exit 2 +} + +SHORT=i:,l:,o:,c:,p:,r:,h,d,k,R:,F:,P: +LONG=input:,list:,output:,command:,password:,releases:,help,dry-run,keep-sources,single-release:,force-add:,force-publish: +if ! OPTS=$(getopt -a -n repo --options $SHORT --longoptions $LONG -- "$@"); then + help + exit 1 +fi + +# Note: Logging now uses syslog/journalctl - view with: journalctl -t repo-management -f + +VALID_ARGUMENTS=$# # Returns the count of arguments that are in short or long options + +eval set -- "$OPTS" + +while : +do + case "$1" in + -i | --input ) + input="$2" + shift 2 + ;; + -o | --output ) + output="$2" + shift 2 + ;; + -c | --command ) + command="$2" + shift 2 + ;; + -p | --password ) + password="$2" + shift 2 + ;; + -r | --releases ) + releases="$2" + shift 2 + ;; + -l | --list ) + list="$2" + shift 2 + ;; + -k | --keep-sources ) + KEEP_SOURCES=true + shift + ;; + -R | --single-release ) + SINGLE_RELEASE="$2" + shift 2 + ;; + -F | --force-add ) + FORCE_ADD=true + shift + ;; + -P | --force-publish ) + FORCE_PUBLISH=true + shift + ;; + -d | --dry-run ) + DRY_RUN=true + # Dry-run implies keep-sources + KEEP_SOURCES=true + shift + ;; + -h | --help) + help + ;; + --) + shift; + break + ;; + *) + echo "Unexpected option: $1" + help + ;; + esac +done + +# redefine output folder in Aptly +# Use isolated database for single-release mode to avoid DB locking during parallel execution +# Use shared database for regular (non-parallel) mode +if [[ -n "$SINGLE_RELEASE" ]]; then + # Create isolated aptly directory for this release + IsolatedRootDir="${output}/aptly-isolated-${SINGLE_RELEASE}" + + # Clean up isolated DB from previous runs to ensure fresh state + # This prevents database/pool desync and "no such file" errors + if [[ -d "$IsolatedRootDir" ]]; then + log "Cleaning up isolated DB from previous run: $IsolatedRootDir" + rm -rf "${IsolatedRootDir}" + fi + + if ! mkdir -p "$IsolatedRootDir"; then + log "ERROR: mkdir $IsolatedRootDir: permission denied" + exit 1 + fi + + # Do NOT copy the shared database to isolated DB + # This prevents "key not found" errors when the copied DB references packages + # that don't exist in the isolated pool. Instead, each worker creates a fresh DB + # and builds the common component from packages in the shared input folder. + + # Do NOT link the shared pool either - each isolated DB should have its own pool + # Packages will be copied to the isolated pool when they're added via 'aptly repo add' + # This prevents hard link issues and "no such file or directory" errors during publish + + # Create temp config file + TempDir="$(mktemp -d || exit 1)" + + # Create config with isolated rootDir + cat tools/repository/aptly.conf | \ + sed 's|"rootDir": ".*"|"rootDir": "'$IsolatedRootDir'"|g' > "${TempDir}"/aptly.conf + + CONFIG="${TempDir}/aptly.conf" + log "Using isolated aptly root for $SINGLE_RELEASE at: $IsolatedRootDir" +else + TempDir="$(mktemp -d || exit 1)" + sed 's|"rootDir": ".*"|"rootDir": "'$output'"|g' tools/repository/aptly.conf > "${TempDir}"/aptly.conf + CONFIG="${TempDir}/aptly.conf" +fi + +# Display configuration status +echo "==========================================" +echo "Configuration Status:" +echo " DRY-RUN: $([ "$DRY_RUN" == true ] && echo 'ENABLED' || echo 'disabled')" +echo " KEEP-SOURCES: $([ "$KEEP_SOURCES" == true ] && echo 'ENABLED' || echo 'disabled')" +echo " FORCE-ADD: $([ "$FORCE_ADD" == true ] && echo 'ENABLED' || echo 'disabled')" +echo " FORCE-PUBLISH: $([ "$FORCE_PUBLISH" == true ] && echo 'ENABLED' || echo 'disabled')" +if [[ -n "$SINGLE_RELEASE" ]]; then + echo " SINGLE-RELEASE: ENABLED ($SINGLE_RELEASE)" +else + echo " SINGLE-RELEASE: disabled" +fi +echo "==========================================" + +log "Configuration: DRY_RUN=$DRY_RUN, KEEP_SOURCES=$KEEP_SOURCES, FORCE_ADD=$FORCE_ADD, FORCE_PUBLISH=$FORCE_PUBLISH, SINGLE_RELEASE=$SINGLE_RELEASE" + +if [[ "$DRY_RUN" == true ]]; then + echo "==========================================" + echo "DRY-RUN MODE ENABLED" + echo "No changes will be made to repository" + echo "Packages will NOT be deleted on add" + echo "==========================================" +elif [[ "$KEEP_SOURCES" == true ]]; then + echo "==========================================" + echo "KEEP-SOURCES MODE ENABLED" + echo "Repository will be generated normally" + echo "Source packages will NOT be deleted" + echo "==========================================" +fi + +if [[ "$FORCE_ADD" == true ]]; then + echo "==========================================" + echo "FORCE-ADD MODE ENABLED" + echo "All packages will be re-added even if already in repo" + echo "==========================================" +fi + +if [[ -n "$SINGLE_RELEASE" ]]; then + echo "==========================================" + echo "SINGLE RELEASE MODE" + echo "Processing only: $SINGLE_RELEASE" + echo "==========================================" +fi + + +# main +repo-manipulate "$input" "$output" "$command" "$password" "$releases" "$list" +RETURN=$? +exit $RETURN