Files
armbian.github.io/.github/workflows/infrastructure-repository-update.yml
Igor Pecovnik 54ee14b535 Fix parallel package download race condition and improve repository upload
- Fix race condition in parallel package downloads
- Add SKIP_VERSION_CHECK option for force repopulation
- Fix upload to only include .deb files, not repository structure
- Improve workflow reliability and error handling

Signed-off-by: Igor Pecovnik <igor@armbian.com>
2026-01-05 22:55:59 +01:00

1067 lines
38 KiB
YAML

name: "Infrastructure: Repository update"
on:
workflow_dispatch:
inputs:
purge_external:
description: 'Purge external packages before download'
required: true
type: boolean
default: true
download_external:
description: 'Download external packages'
required: true
type: boolean
default: true
repository_dispatch:
types: ["Repository update"]
env:
STORAGE_PATH: /armbian/openssh-server/storage
INCOMING_PATH: /armbian/openssh-server/storage/incoming
PUBLISHING_PATH: /publishing/repository
CLEANUP_INPUT: true
DRY_RUN_SYNC: false
concurrency:
group: pipeline
cancel-in-progress: false
jobs:
Check:
name: "Check membership" # Only release manager can execute this manually
runs-on: Linux
steps:
- name: "Check membership"
uses: armbian/actions/team-check@main
with:
ORG_MEMBERS: ${{ secrets.ORG_MEMBERS }}
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
TEAM: "Release manager"
Copying:
needs: Check
runs-on: repository
steps:
- name: Delete empty folders in INCOMING_PATH
run: |
sudo find "${{ env.INCOMING_PATH }}" -type d -empty -delete
- name: Display STORAGE_PATH tree structure
run: |
tree ${{ env.INCOMING_PATH }} || find ${{ env.INCOMING_PATH }} -print | sed -e 's;[^/]*/;|____;g;s;____|; |;g'
- name: Generate statistical report for .deb files in INCOMING_PATH
run: |
echo '## Incoming .deb Files Report' >> $GITHUB_STEP_SUMMARY
echo '' >> $GITHUB_STEP_SUMMARY
echo '| Folder | File Count | Total Size |' >> $GITHUB_STEP_SUMMARY
echo '|:-------|-----------:|----------:|' >> $GITHUB_STEP_SUMMARY
for folder_path in "${{ env.INCOMING_PATH }}"/*/; do
folder=$(basename "$folder_path")
if [ -d "$folder_path" ]; then
file_count=$(find "$folder_path" -type f -name "*.deb" | wc -l)
if [ "$file_count" -gt 0 ]; then
total_size_kb=$(du -sk "$folder_path" | cut -f1)
total_size_mb=$(awk "BEGIN {printf \"%.0f Mb\", $total_size_kb / 1024}")
else
total_size_mb="0 Mb"
fi
printf "| %-7s | %10d | %13s |\n" "$folder" "$file_count" "$total_size_mb" >> $GITHUB_STEP_SUMMARY
fi
done
- name: Checkout build repository
uses: actions/checkout@v6
- name: Copy operations
run: |
TARGET="${{ github.event.client_payload.target }}"
# Default to cron/ if target is not set
if [ -z "$TARGET" ]; then
TARGET="cron/"
fi
case "$TARGET" in
"stable/")
if [ -d "${INCOMING_PATH}/stable" ]; then
COPY_UBOOT=false \
DRY_RUN=true \
SELECT=':edge' \
SRC_DIR="${{ env.INCOMING_PATH }}"/nightly/debs-beta/ \
DST_DIR=/tmp/x \
scripts/copy-kernel-packages.sh 2>&1 | tee -a $GITHUB_STEP_SUMMARY
else
echo "## Source folder INCOMING/stable does not exist, skipping" >> $GITHUB_STEP_SUMMARY
fi
;;
"cron/")
# move what is inside incoming
if [ -d "${INCOMING_PATH}/cron" ]; then
echo "## Fixing STORAGE_PATH permissions" >> $GITHUB_STEP_SUMMARY
sudo chmod -R g+w ${STORAGE_PATH} 2>&1 | tee -a $GITHUB_STEP_SUMMARY
echo "## Rsync from INCOMING/cron to STORAGE_PATH" >> $GITHUB_STEP_SUMMARY
rsync -av --omit-dir-times --no-perms --no-group ${INCOMING_PATH}/cron/ ${STORAGE_PATH} 2>&1 | tee -a $GITHUB_STEP_SUMMARY || echo "Warning: Some files/attrs were not transferred (code 23)" >> $GITHUB_STEP_SUMMARY
echo "## Removing source INCOMING/cron" >> $GITHUB_STEP_SUMMARY
rm -rf ${INCOMING_PATH}/cron 2>&1 | tee -a $GITHUB_STEP_SUMMARY
else
echo "## Source folder INCOMING/cron does not exist, skipping" >> $GITHUB_STEP_SUMMARY
fi
;;
"nightly/")
:
;;
"generic/")
:
;;
"apps/")
:
;;
"community/")
:
;;
*)
:
;;
esac
external:
name: "Download external"
needs: Copying
uses: armbian/armbian.github.io/.github/workflows/infrastructure-download-external.yml@main
with:
ENABLED: ${{ inputs.download_external != false }}
SKIP_VERSION_CHECK: true
ACCESS_NAME: armbian
BUILD_RUNNER: "ubuntu-latest"
HOST_DEPLOY: "repo.armbian.com"
PURGE: ${{ inputs.purge_external || false }}
secrets:
GPG_KEY1: ${{ secrets.GPG_KEY3 }}
GPG_KEY2: ${{ secrets.GPG_KEY4 }}
ACCESS_TOKEN: ${{ secrets.ACCESS_TOKEN }}
KEY_UPLOAD: ${{ secrets.KEY_UPLOAD }}
HOST_UPLOAD: ${{ secrets.HOST_UPLOAD }}
HOST_UPLOAD_USER: ${{ secrets.HOST_UPLOAD_USER }}
HOST_UPLOAD_PORT: ${{ secrets.HOST_UPLOAD_PORT }}
KNOWN_HOSTS_ARMBIAN_UPLOAD: ${{ secrets.KNOWN_HOSTS_ARMBIAN_UPLOAD }}
fix-permissions:
name: "Fix permissions"
needs: external
runs-on: repository
strategy:
matrix:
repository:
- name: debs
steps:
- name: "Set group write permissions for ${{ matrix.repository.name }}"
shell: bash
run: |
set -e
set -o pipefail
REPO_PATH="${{ env.PUBLISHING_PATH }}-${{ matrix.repository.name }}"
SHARED_GROUP="${SHARED_GROUP:-runners}"
# Validate path to prevent accidental operations
case "$REPO_PATH" in
*"/publishing/repository"*)
# Path is valid
;;
*)
echo "::error::Invalid repository path: $REPO_PATH"
echo "::error::Path must be under /publishing/repository"
exit 1
;;
esac
echo "### Setting permissions for ${{ matrix.repository.name }}" | tee -a "$GITHUB_STEP_SUMMARY"
echo ""
echo "Repository path: $REPO_PATH" | tee -a "$GITHUB_STEP_SUMMARY"
# Check if the specified group exists
if getent group "$SHARED_GROUP" >/dev/null 2>&1; then
echo "Shared group: $SHARED_GROUP" | tee -a "$GITHUB_STEP_SUMMARY"
else
echo "⚠ Shared group '$SHARED_GROUP' does not exist, skipping group-specific operations" | tee -a "$GITHUB_STEP_SUMMARY"
SHARED_GROUP=""
fi
echo ""
if [ -d "$REPO_PATH" ]; then
echo "Setting cooperative umask (002) for this job..." | tee -a "$GITHUB_STEP_SUMMARY"
umask 002
if [ -n "$SHARED_GROUP" ]; then
echo "Ensuring group ownership is $SHARED_GROUP (recursive)..." | tee -a "$GITHUB_STEP_SUMMARY"
sudo chgrp -R "$SHARED_GROUP" "$REPO_PATH" || true
fi
echo "Setting directories to group-writable and setgid (2775)..." | tee -a "$GITHUB_STEP_SUMMARY"
sudo find "$REPO_PATH" -type d -print0 \
| xargs -0 -r -P 8 sudo chmod -- 2775 2>/dev/null || true
echo "Ensuring files are group-writable (g+w)..." | tee -a "$GITHUB_STEP_SUMMARY"
sudo find "$REPO_PATH" -type f ! -perm -g+w -print0 \
| xargs -0 -r -P 8 sudo chmod -- g+w 2>/dev/null || true
if [ -n "$SHARED_GROUP" ] && command -v setfacl >/dev/null 2>&1; then
echo "Setting default ACLs so new files/dirs keep group rwX (recommended)..." | tee -a "$GITHUB_STEP_SUMMARY"
sudo setfacl -R -m "g:${SHARED_GROUP}:rwX" "$REPO_PATH" || true
sudo setfacl -R -d -m "g:${SHARED_GROUP}:rwX" "$REPO_PATH" || true
echo "✓ ACLs applied" | tee -a "$GITHUB_STEP_SUMMARY"
elif [ -z "$SHARED_GROUP" ]; then
echo "⚠ No valid shared group, skipping ACLs" | tee -a "$GITHUB_STEP_SUMMARY"
else
echo "⚠ setfacl not available; skipping ACLs" | tee -a "$GITHUB_STEP_SUMMARY"
fi
echo "Checking for sticky bit directories (can block deleting others' files)..." | tee -a "$GITHUB_STEP_SUMMARY"
STICKY_DIRS="$(sudo find "$REPO_PATH" -type d -perm -1000 -print | head -n 20 || true)"
if [ -n "$STICKY_DIRS" ]; then
echo "⚠ Sticky-bit dirs found (sample):" | tee -a "$GITHUB_STEP_SUMMARY"
echo "$STICKY_DIRS" | sed 's/^/ - /' | tee -a "$GITHUB_STEP_SUMMARY"
echo " If you want shared deletion, remove sticky bit: sudo chmod -t <dir>" | tee -a "$GITHUB_STEP_SUMMARY"
else
echo "✓ No sticky-bit dirs detected" | tee -a "$GITHUB_STEP_SUMMARY"
fi
echo "" | tee -a "$GITHUB_STEP_SUMMARY"
echo "✓ Permissions updated (dirs 2775, files g+w, setgid enabled, umask 002 set)" | tee -a "$GITHUB_STEP_SUMMARY"
else
echo "Repository path does not exist yet: $REPO_PATH" | tee -a "$GITHUB_STEP_SUMMARY"
fi
echo "" | tee -a "$GITHUB_STEP_SUMMARY"
prepare-beta:
name: "Prepare beta repository"
needs: fix-permissions
runs-on: repository
steps:
- name: "Remove and recreate beta repository"
shell: bash
run: |
set -e
set -o pipefail
BETA_REPO_PATH="${{ env.PUBLISHING_PATH }}-debs-beta"
# Validate path to prevent accidental deletion
# Ensure it's under the expected publishing path
case "$BETA_REPO_PATH" in
*"/publishing/repository"*)
# Path is valid - contains expected parent path
;;
*)
echo "::error::Invalid beta repository path: $BETA_REPO_PATH"
echo "::error::Path must be under /publishing/repository"
exit 1
;;
esac
echo "### Preparing beta repository" | tee -a "$GITHUB_STEP_SUMMARY"
echo ""
echo "Target path: $BETA_REPO_PATH" | tee -a "$GITHUB_STEP_SUMMARY"
echo ""
# Remove existing beta repository to start fresh
if [ -d "$BETA_REPO_PATH" ]; then
echo "Removing existing beta repository: $BETA_REPO_PATH" | tee -a "$GITHUB_STEP_SUMMARY"
# Additional safety check: ensure it's a symlink or directory, not a file
if [ -L "$BETA_REPO_PATH" ] || [ -d "$BETA_REPO_PATH" ]; then
# Use rm with one trailing slash to prevent accidental matches
# The trailing slash ensures we only delete if it's a directory/symlink to directory
sudo rm -rf "${BETA_REPO_PATH}/"
else
echo "::error::Path exists but is not a directory or symlink: $BETA_REPO_PATH"
exit 1
fi
else
echo "Beta repository does not exist yet: $BETA_REPO_PATH" | tee -a "$GITHUB_STEP_SUMMARY"
fi
# Create fresh beta repository directory
echo "Creating fresh beta repository directory" | tee -a "$GITHUB_STEP_SUMMARY"
sudo mkdir -p "$BETA_REPO_PATH"
# Verify directory was created
if [ ! -d "$BETA_REPO_PATH" ]; then
echo "::error::Failed to create beta repository directory: $BETA_REPO_PATH"
exit 1
fi
echo "" | tee -a "$GITHUB_STEP_SUMMARY"
echo "✓ Beta repository prepared" | tee -a "$GITHUB_STEP_SUMMARY"
main:
needs: prepare-beta
name: "Publish main"
runs-on: repository
strategy:
matrix:
repository:
- name: debs
- name: debs-beta
steps:
# Cleaning self hosted runners
- name: Runner clean
uses: armbian/actions/runner-clean@main
- name: Import GPG key
uses: crazy-max/ghaction-import-gpg@v6
with:
gpg_private_key: ${{ secrets.GPG_KEY3 }}
- name: Import GPG key
uses: crazy-max/ghaction-import-gpg@v6
with:
gpg_private_key: ${{ secrets.GPG_KEY4 }}
- name: Checkout build repository for ${{ matrix.repository.name }}
uses: actions/checkout@v6
with:
repository: armbian/build
fetch-depth: 1
ref: main
clean: false
- name: "Build repository ${{ matrix.repository.name }}"
shell: bash
run: |
set -e
set -o pipefail
STORAGE_PATH="${{ env.STORAGE_PATH }}"
PUBLISH_PATH="${{ env.PUBLISHING_PATH }}"
REPO_NAME="${{ matrix.repository.name }}"
# Validate paths to prevent accidental operations
case "$STORAGE_PATH" in
*"/armbian/openssh-server/storage"*)
# Path is valid
;;
*)
echo "::error::Invalid storage path: $STORAGE_PATH"
echo "::error::Path must be under /armbian/openssh-server/storage"
exit 1
;;
esac
case "$PUBLISH_PATH" in
*"/publishing/repository"*)
# Path is valid
;;
*)
echo "::error::Invalid publishing path: $PUBLISH_PATH"
echo "::error::Path must be under /publishing/repository"
exit 1
;;
esac
INPUT_DIR="${STORAGE_PATH}/${REPO_NAME}"
OUTPUT_DIR="${PUBLISH_PATH}-${REPO_NAME}"
echo "### Checking for .deb files in $REPO_NAME" | tee -a "$GITHUB_STEP_SUMMARY"
echo ""
echo "Input directory: $INPUT_DIR" | tee -a "$GITHUB_STEP_SUMMARY"
echo ""
# Validate input directory exists
if [ ! -d "$INPUT_DIR" ]; then
echo "::notice::Input directory does not exist: $INPUT_DIR"
exit 0
fi
# Count .deb files in incoming directory
DEB_COUNT=$(find "$INPUT_DIR" -type f -name "*.deb" 2>/dev/null | wc -l)
echo "Found ${DEB_COUNT} .deb files" | tee -a "$GITHUB_STEP_SUMMARY"
echo ""
# Only run repo.sh if there are files to process
if [ "${DEB_COUNT}" -gt 0 ]; then
echo "Running repository update..." | tee -a "$GITHUB_STEP_SUMMARY"
tools/repository/repo.sh \
-c update-main \
-k \
-i "$INPUT_DIR" \
-o "$OUTPUT_DIR"
else
echo "::notice::No .deb files found, skipping repository update"
fi
preclean:
needs: main
name: "Publish"
runs-on: repository
outputs:
matrix: ${{steps.json.outputs.JSON_CONTENT}}
steps:
- name: "Checkout Armbian build Framework"
uses: actions/checkout@v6
with:
repository: armbian/build
clean: false
ref: main
fetch-depth: 1
path: build
- name: "Make JSON"
id: json
shell: bash
run: |
set -e
set -o pipefail
pkg="debs,debs-beta"
# Validate build config directory exists
if [[ ! -d "build/config/distributions" ]]; then
echo "::error::Build config directory not found"
exit 1
fi
# Get all supported releases from config files
releases=()
while IFS= read -r support_file; do
if [[ -f "$support_file" ]]; then
release=$(echo "$support_file" | cut -d"/" -f4)
if [[ -n "$release" ]]; then
releases+=("$release")
fi
fi
done < <(find build/config/distributions -name "support" -type f 2>/dev/null)
if [[ ${#releases[@]} -eq 0 ]]; then
echo "::error::No releases found in build/config/distributions"
exit 1
fi
echo "Found ${#releases[@]} releases: ${releases[*]}" | tee -a "$GITHUB_STEP_SUMMARY"
# Build JSON matrix
echo 'JSON_CONTENT<<EOF' >> $GITHUB_OUTPUT
for i in "${releases[@]}"; do
for j in debs debs-beta; do
echo "{\"release\":\"${i}\",\"package\":\"$j\"}"
done
done | jq -s >> $GITHUB_OUTPUT
echo 'EOF' >> $GITHUB_OUTPUT
postclean:
needs: preclean
strategy:
fail-fast: false
matrix:
include: ${{ fromJson(needs.preclean.outputs.matrix) }}
name: "${{ matrix.release }} (${{ matrix.package }})"
timeout-minutes: 60
runs-on: repository
steps:
# Cleaning self hosted runners
- name: Runner clean
uses: armbian/actions/runner-clean@main
- name: "Fix permissions for ${{ matrix.package }}"
shell: bash
run: |
set -e
set -o pipefail
REPO_PATH="${{ env.PUBLISHING_PATH }}-${{ matrix.package }}"
SHARED_GROUP="${SHARED_GROUP:-runners}"
# Validate path to prevent accidental operations
case "$REPO_PATH" in
*"/publishing/repository"*)
# Path is valid
;;
*)
echo "::error::Invalid repository path: $REPO_PATH"
echo "::error::Path must be under /publishing/repository"
exit 1
;;
esac
echo "### Setting permissions for ${{ matrix.package }}" | tee -a "$GITHUB_STEP_SUMMARY"
echo ""
echo "Repository path: $REPO_PATH" | tee -a "$GITHUB_STEP_SUMMARY"
echo ""
if [ -d "$REPO_PATH" ]; then
echo "Setting umask 002" | tee -a "$GITHUB_STEP_SUMMARY"
umask 002
echo "Setting directories to group-writable and setgid (2775)..." | tee -a "$GITHUB_STEP_SUMMARY"
sudo find "$REPO_PATH" -type d -print0 \
| xargs -0 -r -P 8 sudo chmod -- 2775 2>/dev/null || true
echo "Ensuring files are group-writable (g+w)..." | tee -a "$GITHUB_STEP_SUMMARY"
sudo find "$REPO_PATH" -type f ! -perm -g+w -print0 \
| xargs -0 -r -P 8 sudo chmod -- g+w 2>/dev/null || true
echo "" | tee -a "$GITHUB_STEP_SUMMARY"
echo "✓ Permissions fixed" | tee -a "$GITHUB_STEP_SUMMARY"
else
echo "Repository path does not exist yet: $REPO_PATH" | tee -a "$GITHUB_STEP_SUMMARY"
fi
echo "" | tee -a "$GITHUB_STEP_SUMMARY"
- name: Import GPG key
uses: crazy-max/ghaction-import-gpg@v6
with:
gpg_private_key: ${{ secrets.GPG_KEY3 }}
- name: Import GPG key
uses: crazy-max/ghaction-import-gpg@v6
with:
gpg_private_key: ${{ secrets.GPG_KEY4 }}
- name: Checkout build repository
uses: actions/checkout@v6
with:
repository: armbian/build
ref: main
fetch-depth: 1
clean: false
- name: "Build repository ${{ matrix.release }}"
shell: bash
run: |
set -e
set -o pipefail
STORAGE_PATH="${{ env.STORAGE_PATH }}"
PUBLISH_PATH="${{ env.PUBLISHING_PATH }}"
PACKAGE="${{ matrix.package }}"
RELEASE="${{ matrix.release }}"
# Validate paths
case "$STORAGE_PATH" in
*"/armbian/openssh-server/storage"*)
;;
*)
echo "::error::Invalid storage path: $STORAGE_PATH"
exit 1
;;
esac
case "$PUBLISH_PATH" in
*"/publishing/repository"*)
;;
*)
echo "::error::Invalid publishing path: $PUBLISH_PATH"
exit 1
;;
esac
# Validate package name
case "$PACKAGE" in
debs|debs-beta)
;;
*)
echo "::error::Invalid package name: $PACKAGE"
exit 1
;;
esac
INPUT_DIR="${STORAGE_PATH}/${PACKAGE}"
OUTPUT_DIR="${PUBLISH_PATH}-${PACKAGE}"
echo "### Checking for .deb files in $PACKAGE/$RELEASE" | tee -a "$GITHUB_STEP_SUMMARY"
echo ""
echo "Input directory: $INPUT_DIR" | tee -a "$GITHUB_STEP_SUMMARY"
echo ""
# Validate input directory exists
if [ ! -d "$INPUT_DIR" ]; then
echo "::notice::Input directory does not exist: $INPUT_DIR"
exit 0
fi
# Count .deb files in incoming directory
DEB_COUNT=$(find "$INPUT_DIR" -type f -name "*.deb" 2>/dev/null | wc -l)
echo "Found ${DEB_COUNT} .deb files" | tee -a "$GITHUB_STEP_SUMMARY"
echo ""
# Only run repo.sh if there are files to process
if [ "${DEB_COUNT}" -gt 0 ]; then
echo "Running repository update for $RELEASE..." | tee -a "$GITHUB_STEP_SUMMARY"
tools/repository/repo.sh \
-c update \
-R "$RELEASE" \
-k \
-i "$INPUT_DIR" \
-o "$OUTPUT_DIR"
else
echo "::notice::No .deb files found, skipping repository update"
fi
cleanup:
name: "Clean input"
needs: postclean
runs-on: repository
strategy:
matrix:
repository:
- name: debs
- name: debs-beta
steps:
- name: "Cleanup for ${{ matrix.repository.name }}"
shell: bash
run: |
set -e
set -o pipefail
# Check if cleanup is enabled via environment variable
if [ "${CLEANUP_INPUT}" != "true" ]; then
echo "### Cleanup disabled" | tee -a "$GITHUB_STEP_SUMMARY"
echo "CLEANUP_INPUT is set to: ${CLEANUP_INPUT}" | tee -a "$GITHUB_STEP_SUMMARY"
echo "Set CLEANUP_INPUT to 'true' to enable cleanup" | tee -a "$GITHUB_STEP_SUMMARY"
exit 0
fi
INPUT_DIR="${{ env.STORAGE_PATH }}/${{ matrix.repository.name }}"
# Validate path to prevent accidental deletion
# Ensure it's under the expected storage path
case "$INPUT_DIR" in
*"/armbian/openssh-server/storage"*)
# Path is valid - contains expected parent path
;;
*)
echo "::error::Invalid input directory path: $INPUT_DIR"
echo "::error::Path must be under /armbian/openssh-server/storage"
exit 1
;;
esac
echo "### Cleanup for ${{ matrix.repository.name }}" | tee -a "$GITHUB_STEP_SUMMARY"
echo ""
echo "Input directory: $INPUT_DIR" | tee -a "$GITHUB_STEP_SUMMARY"
echo ""
# Check if directory exists
if [ ! -d "${INPUT_DIR}" ]; then
echo "::warning::Directory ${INPUT_DIR} does not exist"
exit 0
fi
# Additional safety check: ensure it's actually a directory, not a file
if [ -f "${INPUT_DIR}" ]; then
echo "::error::Path exists but is a file, not a directory: $INPUT_DIR"
exit 1
fi
# Count .deb files before deletion (use sudo since files are owned by different user)
# Search recursively in subdirectories
DEB_COUNT=$(sudo find "${INPUT_DIR}" -type f -name "*.deb" 2>/dev/null | wc -l)
echo "Found ${DEB_COUNT} .deb files to delete" | tee -a "$GITHUB_STEP_SUMMARY"
echo ""
if [ "${DEB_COUNT}" -eq 0 ]; then
echo "No files to delete" | tee -a "$GITHUB_STEP_SUMMARY"
exit 0
fi
# Delete all .deb files recursively (use sudo since files are owned by different user)
# Use -print0 and xargs for safer handling of filenames with spaces
echo "Deleting .deb files..." | tee -a "$GITHUB_STEP_SUMMARY"
sudo find "${INPUT_DIR}" -type f -name "*.deb" -print0 \
| xargs -0 -r sudo rm -fv
# Verify deletion
REMAINING=$(sudo find "${INPUT_DIR}" -type f -name "*.deb" 2>/dev/null | wc -l)
echo ""
echo "✓ Deleted ${DEB_COUNT} .deb files" | tee -a "$GITHUB_STEP_SUMMARY"
echo "Remaining files: ${REMAINING}" | tee -a "$GITHUB_STEP_SUMMARY"
# Warn if deletion failed (files remain)
if [ "${REMAINING}" -gt 0 ]; then
echo "::warning::Some files could not be deleted. ${REMAINING} files remaining."
exit 1
fi
Prepare:
name: "Upload to servers"
needs: cleanup
outputs:
matrix: ${{steps.json.outputs.JSON_CONTENT}}
runs-on: ubuntu-latest
steps:
- name: Get primary mirrors from database
id: json
shell: bash
run: |
set -e
set -o pipefail
# Validate that required secrets are available
if [[ -z "${{ secrets.NETBOX_API }}" || "${{ secrets.NETBOX_API }}" == "" ]]; then
echo "::error::NETBOX_API secret is not set or is empty"
exit 1
fi
if [[ -z "${{ secrets.NETBOX_TOKEN }}" || "${{ secrets.NETBOX_TOKEN }}" == "" ]]; then
echo "::error::NETBOX_TOKEN secret is not set or is empty"
exit 1
fi
# Validate API URL format to prevent SSRF
NETBOX_API="${{ secrets.NETBOX_API }}"
if [[ ! "$NETBOX_API" =~ ^https?:// ]]; then
echo "::error::NETBOX_API must start with http:// or https://"
exit 1
fi
echo "### Fetching mirror list from NetBox" | tee -a "$GITHUB_STEP_SUMMARY"
echo ""
# Build query with proper filtering
API_URL="${NETBOX_API}/virtualization/virtual-machines/?limit=500&name__empty=false&device_role=Mirror&tag=push&status=active"
# Fetch data with timeout and security options
echo "Querying: $API_URL" | tee -a "$GITHUB_STEP_SUMMARY"
response=$(curl -fsSL \
--max-time 30 \
--connect-timeout 10 \
-H "Authorization: Token ${{ secrets.NETBOX_TOKEN }}" \
-H "Accept: application/json" \
"$API_URL" 2>&1)
curl_exit_code=$?
if [[ $curl_exit_code -ne 0 ]]; then
echo "::error::Failed to fetch data from NetBox API (curl exit code: $curl_exit_code)"
echo "::error::Response: $response"
exit 1
fi
# Validate JSON response
if ! echo "$response" | jq empty 2>/dev/null; then
echo "::error::Invalid JSON response from NetBox API"
echo "::error::Response: $response"
exit 1
fi
# Extract mirror names and build enriched JSON with server configuration
# This avoids repeated NetBox API calls in downstream matrix jobs
mirror_json=$(echo "$response" | jq -r '.results[] | {
name: .name,
path: .custom_fields.path,
port: .custom_fields.port,
username: .custom_fields.username
} | select(.path != null and .port != null and .username != null)' | jq -s '.' 2>/dev/null)
if [[ -z "$mirror_json" || "$mirror_json" == "null" || "$mirror_json" == "[]" ]]; then
echo "::warning::No mirrors found in NetBox API response"
echo 'JSON_CONTENT<<EOF' >> $GITHUB_OUTPUT
echo '[]' >> $GITHUB_OUTPUT
echo 'EOF' >> $GITHUB_OUTPUT
else
# Count mirrors
mirror_count=$(echo "$mirror_json" | jq 'length')
echo "Found $mirror_count active mirrors" | tee -a "$GITHUB_STEP_SUMMARY"
# Validate JSON structure
if ! echo "$mirror_json" | jq empty 2>/dev/null; then
echo "::error::Failed to format mirror list as JSON"
exit 1
fi
# Output to GitHub Actions
echo "JSON_CONTENT<<EOF" >> $GITHUB_OUTPUT
echo "$mirror_json" >> $GITHUB_OUTPUT
echo 'EOF' >> $GITHUB_OUTPUT
# List mirrors in summary
echo "" | tee -a "$GITHUB_STEP_SUMMARY"
echo "Mirror servers:" | tee -a "$GITHUB_STEP_SUMMARY"
echo "$mirror_json" | jq -r '.[].name' | sed 's/^/ - /' | tee -a "$GITHUB_STEP_SUMMARY"
fi
Sync:
name: "F"
runs-on: repository-sync
needs: Prepare
outputs:
matrix: ${{needs.Prepare.outputs.matrix}}
if: ${{ needs.Prepare.outputs.matrix != '[]' && needs.Prepare.outputs.matrix != '' }}
timeout-minutes: 90
strategy:
max-parallel: 8
fail-fast: false
matrix:
node: ${{fromJson(needs.Prepare.outputs.matrix)}}
steps:
- name: Install SSH key
uses: shimataro/ssh-key-action@v2
with:
key: ${{ secrets.KEY_UPLOAD }}
known_hosts: ${{ secrets.KNOWN_HOSTS_ARMBIAN_UPLOAD }}
if_key_exists: replace
- name: What to sync?
shell: bash
run: |
set -e
set -o pipefail
# Use server configuration from matrix (fetched and validated in Prepare job)
HOSTNAME="${{ matrix.node.name }}"
SERVER_PATH="${{ matrix.node.path }}"
SERVER_PORT="${{ matrix.node.port }}"
SERVER_USERNAME="${{ matrix.node.username }}"
echo "### Server: $HOSTNAME" | tee -a "$GITHUB_STEP_SUMMARY"
echo " Path: $SERVER_PATH" | tee -a "$GITHUB_STEP_SUMMARY"
echo " Port: $SERVER_PORT" | tee -a "$GITHUB_STEP_SUMMARY"
echo " Username: $SERVER_USERNAME" | tee -a "$GITHUB_STEP_SUMMARY"
echo ""
# Fetch targets from NetBox API (need tags for determining sync targets)
API_URL="${{ secrets.NETBOX_API }}/virtualization/virtual-machines/?limit=500&name__empty=false&name=${HOSTNAME}"
response=$(curl -fsSL \
--max-time 30 \
--connect-timeout 10 \
-H "Authorization: Token ${{ secrets.NETBOX_TOKEN }}" \
-H "Accept: application/json" \
"$API_URL" 2>&1) || exit 1
# Extract targets from tags
TARGETS=($(echo "$response" | jq -r '.results[] | .tags[] | .name' 2>/dev/null | grep -v "Push" || echo ""))
if [[ ${#TARGETS[@]} -eq 0 ]]; then
echo "::warning::No targets found for $HOSTNAME"
exit 0
fi
# Filter to only valid targets
VALID_TARGETS=()
for target in "${TARGETS[@]}"; do
case "$target" in
debs|debs-beta)
VALID_TARGETS+=("$target")
;;
*)
echo "::warning::Invalid target '$target' for $HOSTNAME - must be 'debs' or 'debs-beta', skipping"
;;
esac
done
if [[ ${#VALID_TARGETS[@]} -eq 0 ]]; then
echo "::warning::No valid targets found for $HOSTNAME"
exit 0
fi
TARGETS=("${VALID_TARGETS[@]}")
echo "Sync targets: ${TARGETS[*]}" | tee -a "$GITHUB_STEP_SUMMARY"
echo ""
# Set base path using global variable
PUBLISHING_PATH="${{ env.PUBLISHING_PATH }}"
# Validate publishing path
case "$PUBLISHING_PATH" in
*"/publishing/repository"*)
;;
*)
echo "::error::Invalid publishing path: $PUBLISHING_PATH"
exit 1
;;
esac
# Build rsync options - add --dry-run if DRY_RUN_SYNC is enabled
RSYNC_OPTIONS="-av --size-only --omit-dir-times"
if [[ "${{ env.DRY_RUN_SYNC }}" == "true" ]]; then
RSYNC_OPTIONS="$RSYNC_OPTIONS --dry-run"
echo "::notice::DRY_RUN_SYNC is enabled - rsync will only show what would be transferred" | tee -a "$GITHUB_STEP_SUMMARY"
echo "" | tee -a "$GITHUB_STEP_SUMMARY"
fi
# Sync to each target
for target in "${TARGETS[@]}"; do
echo "→ Syncing $target" | tee -a "$GITHUB_STEP_SUMMARY"
# Remove old host key
ssh-keygen -f "${HOME}/.ssh/known_hosts" -R "$HOSTNAME" 2>/dev/null || true
REPO_PATH="${PUBLISHING_PATH}-${target}"
if [[ ! -d "$REPO_PATH/public" ]]; then
if [[ "$target" == "debs" ]]; then
echo "::error::Source repository path does not exist: $REPO_PATH/public"
exit 1
else
echo "::warning::Repository path does not exist: $REPO_PATH/public, skipping"
continue
fi
fi
DEST_PATH="${SERVER_PATH}/$(echo "$target" | sed 's/debs-beta$/beta/;s/^debs$/apt/')"
rsync $RSYNC_OPTIONS -e "ssh -p ${SERVER_PORT} -o StrictHostKeyChecking=accept-new -o ConnectTimeout=30" \
--exclude "dists" --exclude "control" \
"$REPO_PATH/public/" \
${SERVER_USERNAME}@${HOSTNAME}:"${DEST_PATH}"
done
echo "" | tee -a "$GITHUB_STEP_SUMMARY"
echo "✓ Sync completed for $HOSTNAME" | tee -a "$GITHUB_STEP_SUMMARY"
Index:
name: "I"
runs-on: repository-sync
needs: Sync
if: ${{ needs.Sync.outputs.matrix != '[]' && needs.Sync.outputs.matrix != '' }}
timeout-minutes: 60
strategy:
max-parallel: 8
fail-fast: false
matrix:
node: ${{fromJson(needs.Sync.outputs.matrix)}}
steps:
- name: Install SSH key
uses: shimataro/ssh-key-action@v2
with:
key: ${{ secrets.KEY_UPLOAD }}
known_hosts: ${{ secrets.KNOWN_HOSTS_ARMBIAN_UPLOAD }}
if_key_exists: replace
- name: What to sync?
shell: bash
run: |
set -e
set -o pipefail
# Use server configuration from matrix (fetched and validated in Prepare job)
HOSTNAME="${{ matrix.node.name }}"
SERVER_PATH="${{ matrix.node.path }}"
SERVER_PORT="${{ matrix.node.port }}"
SERVER_USERNAME="${{ matrix.node.username }}"
echo "### Server: $HOSTNAME" | tee -a "$GITHUB_STEP_SUMMARY"
echo " Path: $SERVER_PATH" | tee -a "$GITHUB_STEP_SUMMARY"
echo " Port: $SERVER_PORT" | tee -a "$GITHUB_STEP_SUMMARY"
echo " Username: $SERVER_USERNAME" | tee -a "$GITHUB_STEP_SUMMARY"
echo ""
# Fetch targets from NetBox API (need tags for determining sync targets)
API_URL="${{ secrets.NETBOX_API }}/virtualization/virtual-machines/?limit=500&name__empty=false&name=${HOSTNAME}"
response=$(curl -fsSL \
--max-time 30 \
--connect-timeout 10 \
-H "Authorization: Token ${{ secrets.NETBOX_TOKEN }}" \
-H "Accept: application/json" \
"$API_URL" 2>&1) || exit 1
# Extract targets from tags
TARGETS=($(echo "$response" | jq -r '.results[] | .tags[] | .name' 2>/dev/null | grep -v "Push" || echo ""))
if [[ ${#TARGETS[@]} -eq 0 ]]; then
echo "::warning::No targets found for $HOSTNAME"
exit 0
fi
# Filter to only valid targets
VALID_TARGETS=()
for target in "${TARGETS[@]}"; do
case "$target" in
debs|debs-beta)
VALID_TARGETS+=("$target")
;;
*)
echo "::warning::Invalid target '$target' for $HOSTNAME - must be 'debs' or 'debs-beta', skipping"
;;
esac
done
if [[ ${#VALID_TARGETS[@]} -eq 0 ]]; then
echo "::warning::No valid targets found for $HOSTNAME"
exit 0
fi
TARGETS=("${VALID_TARGETS[@]}")
# Set base path using global variable
PUBLISHING_PATH="${{ env.PUBLISHING_PATH }}"
# Validate publishing path
case "$PUBLISHING_PATH" in
*"/publishing/repository"*)
;;
*)
echo "::error::Invalid publishing path: $PUBLISHING_PATH"
exit 1
;;
esac
# Build rsync options - add --dry-run if DRY_RUN_SYNC is enabled
RSYNC_OPTIONS="-av --size-only --omit-dir-times"
if [[ "${{ env.DRY_RUN_SYNC }}" == "true" ]]; then
RSYNC_OPTIONS="$RSYNC_OPTIONS --dry-run"
echo "::notice::DRY_RUN_SYNC is enabled - rsync will only show what would be transferred" | tee -a "$GITHUB_STEP_SUMMARY"
echo "" | tee -a "$GITHUB_STEP_SUMMARY"
fi
# Sync beta repository with cleanup
for target in "${TARGETS[@]}"; do
echo "→ Finalizing $target" | tee -a "$GITHUB_STEP_SUMMARY"
REPO_PATH="${PUBLISHING_PATH}-${target}"
if [[ ! -d "$REPO_PATH/public" ]]; then
echo "::warning::Repository path does not exist: $REPO_PATH/public, skipping"
continue
fi
DEST_PATH="${SERVER_PATH}/$(echo "$target" | sed 's/debs-beta$/beta/')"
# Final sync without excludes
rsync $RSYNC_OPTIONS -e "ssh -p ${SERVER_PORT} -o StrictHostKeyChecking=accept-new -o ConnectTimeout=30" \
"$REPO_PATH/public/" \
${SERVER_USERNAME}@${HOSTNAME}:"${DEST_PATH}"
# Cleanup sync with --delete
rsync $RSYNC_OPTIONS --delete -e "ssh -p ${SERVER_PORT} -o StrictHostKeyChecking=accept-new -o ConnectTimeout=30" \
"$REPO_PATH/public/" \
${SERVER_USERNAME}@${HOSTNAME}:"${DEST_PATH}"
done
echo "" | tee -a "$GITHUB_STEP_SUMMARY"
echo "✓ Final sync completed for $HOSTNAME" | tee -a "$GITHUB_STEP_SUMMARY"
dispatch:
name: "Refresh web and redirector index"
if: ${{ github.repository_owner == 'Armbian' }}
needs: Index
runs-on: ubuntu-latest
steps:
- name: "Run redirector update action"
uses: peter-evans/repository-dispatch@v4
with:
token: ${{ secrets.DISPATCH }}
repository: armbian/armbian.github.io
event-type: "Infrastructure: Update redirector"