artifact-image-complete-matrix #660
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
# template file: 050.single_header.yaml | |
#name: "Build All Artifacts/Images" | |
name: "artifact-image-complete-matrix" | |
on: | |
schedule: | |
- cron: '0 2 * * *' # Scheduled runs every day at 2am UTC | |
workflow_dispatch: | |
inputs: | |
skipImages: | |
description: 'Skip building images? no = build images, yes = skip images' | |
required: true | |
options: [ 'yes' , 'no' ] | |
type: choice | |
nightly: # This is passed as BETA=yes or BETA=no, to prepare step, and to reprepro steps. it affects output/debs vs output/debs-beta | |
description: 'BETA/nightly? yes = nightly, no = stable' | |
required: true | |
options: [ 'yes', 'no' ] | |
type: choice | |
checkOci: | |
description: 'Check OCI for existing artifacts? yes = check OCI, no = always build everything' | |
required: true | |
options: [ 'yes', 'no' ] | |
type: choice | |
skipCustomRepo: | |
description: 'Skip custom repo? yes = skip custom repo, no = use custom repo' | |
required: true | |
options: [ 'no', 'yes' ] | |
type: choice | |
extraParamsAllBuilds: | |
description: 'Extra params for all builds/jobs (prepare/artifact/image) (eg: DEBUG=yes)' | |
required: false | |
default: '' | |
type: string | |
targetsFilterInclude: | |
description: 'TARGETS_FILTER_INCLUDE, example: "BOARD:odroidhc4,BOARD:odroidn2"' | |
required: false | |
default: '' | |
type: string | |
env: | |
# For easier reuse across the multiple chunks ('armbian/build' repo) | |
BUILD_REPOSITORY: "${{ github.repository_owner }}/armbian-build" | |
BUILD_REF: "extensions" # branch or tag or sha1 | |
# For easier reuse across the multiple chunks ('armbian/os' repo) | |
USERPATCHES_REPOSITORY: "" | |
USERPATCHES_REF: "" # branch or tag or sha1 | |
USERPATCHES_DIR: "" # folder inside USERPATCHES_REPOSITORY | |
# Armbian envs. Adjust to your needs. | |
# OCI_TARGET_BASE: "ghcr.io/${{ github.repository }}/" # This is picked up by the Docker launcher automatically. It does _not_ override the targets.yaml during info resolution. | |
DOCKER_ARMBIAN_BASE_COORDINATE_PREFIX: "ghcr.io/armsurvivors/armbian-builder:armbian-next-" # Use Docker image in same repo | |
DOCKER_SKIP_UPDATE: "yes" # Do not apt update/install/requirements/etc during Dockerfile build, trust DOCKER_ARMBIAN_BASE_COORDINATE_PREFIX's images are up-to-date | |
# Added to every build, even the prepare job. | |
EXTRA_PARAMS_ALL_BUILDS: "SHARE_LOG=yes ${{ github.event.inputs.extraParamsAllBuilds }}" | |
# Added to every image build arguments. | |
EXTRA_PARAMS_IMAGE: "'DONT_BUILD_ARTIFACTS=any' COMPRESS_OUTPUTIMAGE=xz SHOW_DEBIAN=yes SKIP_CUSTOM_REPO=${{github.event.inputs.skipCustomRepo}}" # IMAGE_VERSION is individually added at the build step. | |
jobs: | |
matrix_prep: | |
name: "JSON matrix: 5/2 :: 5 artifact chunks, 2 image chunks" | |
if: ${{ github.repository_owner == 'armsurvivors' }} | |
runs-on: [ "self-hosted", "Linux", 'armbian', "matrix-prepare" ] | |
steps: | |
#- name: Runner clean | |
# uses: armbian/actions/runner-clean@main | |
# prepare and output with the date. there are two so we can be independent regarding GH release name vs REVISION | |
- name: "Prepare date-based release outputs" | |
id: prepare-date-outputs | |
run: | | |
echo "images_release=$(date +%y.%m.%d)-armsurvivors-${{ github.run_number }}" >> $GITHUB_OUTPUT | |
echo "revision_release=$(date +%y.%m.%d)-armsurvivors-${{ github.run_number }}" >> $GITHUB_OUTPUT | |
# cleanup the place where we will clone the userpatches repo, to avoid git going insane and cleaning everything later | |
- name: Cleanup userpatches repo | |
if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }} | |
run: rm -rf userpatches.repo | |
- name: Checkout build repo | |
uses: actions/checkout@v4 # We don't need to clone git, really. A wget would suffice for GH-hosted runners. But using clone is better for Igor-hosted runners. | |
with: | |
repository: ${{ env.BUILD_REPOSITORY }} | |
ref: ${{ env.BUILD_REF }} | |
fetch-depth: 0 # fetch all history; much faster for self-hosted runners | |
clean: false # true is default. it *will* delete the hosts /dev if mounted inside. | |
- name: "grab the sha1 of the latest commit of the build repo ${{ env.BUILD_REPOSITORY }}#${{ env.BUILD_REF }}" | |
id: latest-commit | |
run: echo "sha1=$(git rev-parse HEAD)" >> $GITHUB_OUTPUT | |
# clone the userpatches repo (`armbian/os`) | |
- name: "Checkout userpatches repo: ${{env.USERPATCHES_REPOSITORY}}#${{env.USERPATCHES_REF}}" | |
uses: actions/checkout@v4 | |
if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }} | |
with: | |
repository: ${{ env.USERPATCHES_REPOSITORY }} | |
ref: ${{ env.USERPATCHES_REF }} | |
fetch-depth: 0 # fetch all history; much faster for self-hosted runners | |
clean: false # true is default. | |
path: userpatches.repo | |
- name: "Put userpatches in place, and remove userpatches repo" | |
if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }} | |
run: | | |
mkdir -pv userpatches | |
rsync -av userpatches.repo/${{env.USERPATCHES_DIR}}/. userpatches/ | |
rm -rf userpatches.repo | |
# Login to ghcr.io, we're gonna do a lot of OCI lookups. | |
- name: Docker Login to GitHub Container Registry | |
uses: docker/login-action@v3 | |
with: | |
registry: ghcr.io | |
username: "${{ github.repository_owner }}" # GitHub username or org | |
password: ${{ secrets.GITHUB_TOKEN }} # GitHub actions builtin token. repo has to have pkg access. | |
- name: Prepare Info JSON and Matrices | |
id: prepare-matrix | |
run: | | |
# this sets outputs "artifact-matrix" #and "image-matrix" | |
bash ./compile.sh gha-matrix rpardini-generic ${{env.EXTRA_PARAMS_ALL_BUILDS}} \ | |
REVISION="${{ steps.prepare-date-outputs.outputs.revision_release }}" \ | |
TARGETS_FILTER_INCLUDE='${{ github.event.inputs.targetsFilterInclude || '' }}' \ | |
MATRIX_ARTIFACT_CHUNKS=5 \ | |
MATRIX_IMAGE_CHUNKS=2 \ | |
BETA=${{ github.event.inputs.nightly || 'yes' }} \ | |
CHECK_OCI=${{ github.event.inputs.checkOci || 'yes' }} \ | |
TARGETS_FILENAME=targets.yaml \ | |
SKIP_IMAGES=${{ github.event.inputs.skipImages || 'yes' }} | |
- name: "Logs: ${{ steps.prepare-matrix.outputs.logs_url }}" | |
run: | | |
echo "Logs: ${{ steps.prepare-matrix.outputs.logs_url }}" | |
- name: chown cache memoize/oci back to normal user | |
run: | | |
sudo chown -R $USER:$USER cache/memoize cache/oci/positive | |
# Store output/info folder in a GitHub Actions artifact | |
- uses: actions/upload-artifact@v4 | |
name: Upload output/info as GitHub Artifact | |
with: | |
name: build-info-json | |
path: output/info | |
- name: Prepare GH Release | |
uses: "marvinpinto/action-automatic-releases@latest" # @TODO this is outdated, needs replacement. Also it deletes the release if it already exists, which is not what we want. Might be necessary to move the tag. | |
if: ${{ github.event.inputs.skipImages == 'no' }} # this action deletes the release, so if skipping images, skip this too | |
with: | |
repo_token: "${{ secrets.GITHUB_TOKEN }}" | |
automatic_release_tag: "${{ steps.prepare-date-outputs.outputs.images_release }}" | |
prerelease: false | |
title: "Images ${{ steps.prepare-date-outputs.outputs.images_release }}" | |
outputs: | |
# not related to matrix | |
build-sha1: ${{ steps.latest-commit.outputs.sha1 }} | |
images_release: ${{ steps.prepare-date-outputs.outputs.images_release }} | |
revision_release: ${{ steps.prepare-date-outputs.outputs.revision_release }} | |
# template file: 150.per-chunk-artifacts_prep-outputs.yaml | |
# artifacts-1 of 5 | |
artifacts-chunk-json-1: ${{ steps.prepare-matrix.outputs.artifacts-chunk-json-1 }} | |
artifacts-chunk-not-empty-1: ${{ steps.prepare-matrix.outputs.artifacts-chunk-not-empty-1 }} | |
artifacts-chunk-size-1: ${{ steps.prepare-matrix.outputs.artifacts-chunk-size-1 }} | |
# artifacts-2 of 5 | |
artifacts-chunk-json-2: ${{ steps.prepare-matrix.outputs.artifacts-chunk-json-2 }} | |
artifacts-chunk-not-empty-2: ${{ steps.prepare-matrix.outputs.artifacts-chunk-not-empty-2 }} | |
artifacts-chunk-size-2: ${{ steps.prepare-matrix.outputs.artifacts-chunk-size-2 }} | |
# artifacts-3 of 5 | |
artifacts-chunk-json-3: ${{ steps.prepare-matrix.outputs.artifacts-chunk-json-3 }} | |
artifacts-chunk-not-empty-3: ${{ steps.prepare-matrix.outputs.artifacts-chunk-not-empty-3 }} | |
artifacts-chunk-size-3: ${{ steps.prepare-matrix.outputs.artifacts-chunk-size-3 }} | |
# artifacts-4 of 5 | |
artifacts-chunk-json-4: ${{ steps.prepare-matrix.outputs.artifacts-chunk-json-4 }} | |
artifacts-chunk-not-empty-4: ${{ steps.prepare-matrix.outputs.artifacts-chunk-not-empty-4 }} | |
artifacts-chunk-size-4: ${{ steps.prepare-matrix.outputs.artifacts-chunk-size-4 }} | |
# artifacts-5 of 5 | |
artifacts-chunk-json-5: ${{ steps.prepare-matrix.outputs.artifacts-chunk-json-5 }} | |
artifacts-chunk-not-empty-5: ${{ steps.prepare-matrix.outputs.artifacts-chunk-not-empty-5 }} | |
artifacts-chunk-size-5: ${{ steps.prepare-matrix.outputs.artifacts-chunk-size-5 }} | |
# template file: 151.per-chunk-images_prep-outputs.yaml | |
# artifacts-1 of 2 | |
images-chunk-json-1: ${{ steps.prepare-matrix.outputs.images-chunk-json-1 }} | |
images-chunk-not-empty-1: ${{ steps.prepare-matrix.outputs.images-chunk-not-empty-1 }} | |
images-chunk-size-1: ${{ steps.prepare-matrix.outputs.images-chunk-size-1 }} | |
# artifacts-2 of 2 | |
images-chunk-json-2: ${{ steps.prepare-matrix.outputs.images-chunk-json-2 }} | |
images-chunk-not-empty-2: ${{ steps.prepare-matrix.outputs.images-chunk-not-empty-2 }} | |
images-chunk-size-2: ${{ steps.prepare-matrix.outputs.images-chunk-size-2 }} | |
# template file: 250.single_aggr-jobs.yaml | |
# ------ aggregate all artifact chunks into a single dependency ------- | |
all-artifacts-ready: | |
name: "5 artifacts chunks ready" | |
runs-on: ubuntu-latest # not going to run, anyway, but is required. | |
if: ${{ !cancelled() && ( 1 == 2 ) }} # eg: never run. | |
needs: [ "matrix_prep", "build-artifacts-chunk-1","build-artifacts-chunk-2","build-artifacts-chunk-3","build-artifacts-chunk-4","build-artifacts-chunk-5" ] # <-- HERE: all artifact chunk numbers. | |
steps: | |
- name: fake step | |
run: uptime | |
all-images-ready: | |
name: "2 image chunks ready" | |
runs-on: ubuntu-latest # not going to run, anyway, but is required. | |
if: ${{ !cancelled() && ( 1 == 2 ) }} # eg: never run. | |
needs: [ "matrix_prep", "build-images-chunk-1","build-images-chunk-2" ] # <-- HERE: all image chunk numbers. | |
steps: | |
- name: fake step | |
run: uptime | |
all-artifacts-and-images-ready: | |
name: "5 artifact chunks and 2 image chunks ready" | |
runs-on: ubuntu-latest # not going to run, anyway, but is required. | |
if: ${{ !cancelled() && ( 1 == 2 ) }} # eg: never run. | |
needs: [ "matrix_prep", "all-artifacts-ready", "all-images-ready" ] | |
steps: | |
- name: fake step | |
run: uptime | |
all-artifacts-and-repo-ready: | |
name: "5 artifact chunks and apt repository ready" | |
runs-on: ubuntu-latest # not going to run, anyway, but is required. | |
if: ${{ !cancelled() && ( 1 == 2 ) }} # eg: never run. | |
needs: [ "matrix_prep", "all-artifacts-ready", "publish-debs-to-repo" ] | |
steps: | |
- name: fake step | |
run: uptime | |
# template file: 550.per-chunk-artifacts_job.yaml | |
"build-artifacts-chunk-1": # templated "build-artifacts-chunk-1" | |
if: ${{ github.repository_owner == 'armsurvivors' && needs.matrix_prep.outputs.artifacts-chunk-not-empty-1 == 'yes' }} # <-- HERE: Chunk number. | |
needs: [ "matrix_prep" ] | |
strategy: | |
fail-fast: false # let other jobs try to complete if one fails | |
matrix: ${{ fromJSON(needs.matrix_prep.outputs.artifacts-chunk-json-1) }} # <-- HERE: Chunk number. | |
name: ${{ matrix.desc || 'Empty A1' }} # <-- HERE: Chunk number. | |
timeout-minutes: 120 | |
runs-on: ${{ matrix.runs_on }} | |
steps: | |
#- name: Runner clean | |
# uses: armbian/actions/runner-clean@main | |
# Login to ghcr.io, for later uploading rootfs to ghcr.io | |
- name: Docker Login to GitHub Container Registry | |
uses: docker/login-action@v3 | |
with: | |
registry: ghcr.io | |
username: "${{ github.repository_owner }}" # GitHub username or org | |
password: ${{ secrets.GITHUB_TOKEN }} # GitHub actions builtin token. repo has to have pkg access. | |
# cleanup the place where we will clone the userpatches repo, to avoid git going insane and cleaning everything later | |
- name: Cleanup userpatches repo | |
if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }} | |
run: rm -rf userpatches.repo | |
- name: "Checkout build repo with depth ${{ matrix.fdepth }}" | |
uses: actions/checkout@v4 # We don't need to clone git, really. A wget would suffice for GH-hosted runners. But using clone is better for Igor-hosted runners. | |
with: | |
repository: ${{ env.BUILD_REPOSITORY }} | |
ref: ${{ needs.matrix_prep.outputs.build-sha1 }} | |
fetch-depth: ${{ matrix.fdepth }} # fetch all history for self-hosted, but shallow for GH-hosted | |
clean: false # true is default. it *will* delete the hosts /dev if mounted inside. | |
# clone the userpatches repo (`armbian/os`) | |
- name: "Checkout userpatches repo with depth ${{ matrix.fdepth }}: ${{env.USERPATCHES_REPOSITORY}}#${{env.USERPATCHES_REF}}" | |
uses: actions/checkout@v4 | |
if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }} | |
with: | |
repository: ${{ env.USERPATCHES_REPOSITORY }} | |
ref: ${{ env.USERPATCHES_REF }} | |
fetch-depth: ${{ matrix.fdepth }} # fetch all history for self-hosted, but shallow for GH-hosted | |
clean: false # true is default. | |
path: userpatches.repo | |
- name: "Put userpatches in place, and remove userpatches repo" | |
if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }} | |
run: | | |
mkdir -pv userpatches | |
rsync -av userpatches.repo/${{env.USERPATCHES_DIR}}/. userpatches/ | |
rm -rf userpatches.repo | |
- name: Build ${{matrix.desc}} | |
id: build | |
run: | | |
bash ./compile.sh ${{ matrix.invocation }} ${{env.EXTRA_PARAMS_ALL_BUILDS}} UPLOAD_TO_OCI_ONLY=yes | |
- name: "Logs: ${{ steps.build.outputs.logs_url }}" | |
if: always() | |
run: | | |
echo "Logs: ${{ steps.build.outputs.logs_url }}" | |
"build-artifacts-chunk-2": # templated "build-artifacts-chunk-2" | |
if: ${{ github.repository_owner == 'armsurvivors' && needs.matrix_prep.outputs.artifacts-chunk-not-empty-2 == 'yes' }} # <-- HERE: Chunk number. | |
needs: [ "matrix_prep" ] | |
strategy: | |
fail-fast: false # let other jobs try to complete if one fails | |
matrix: ${{ fromJSON(needs.matrix_prep.outputs.artifacts-chunk-json-2) }} # <-- HERE: Chunk number. | |
name: ${{ matrix.desc || 'Empty A2' }} # <-- HERE: Chunk number. | |
timeout-minutes: 120 | |
runs-on: ${{ matrix.runs_on }} | |
steps: | |
#- name: Runner clean | |
# uses: armbian/actions/runner-clean@main | |
# Login to ghcr.io, for later uploading rootfs to ghcr.io | |
- name: Docker Login to GitHub Container Registry | |
uses: docker/login-action@v3 | |
with: | |
registry: ghcr.io | |
username: "${{ github.repository_owner }}" # GitHub username or org | |
password: ${{ secrets.GITHUB_TOKEN }} # GitHub actions builtin token. repo has to have pkg access. | |
# cleanup the place where we will clone the userpatches repo, to avoid git going insane and cleaning everything later | |
- name: Cleanup userpatches repo | |
if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }} | |
run: rm -rf userpatches.repo | |
- name: "Checkout build repo with depth ${{ matrix.fdepth }}" | |
uses: actions/checkout@v4 # We don't need to clone git, really. A wget would suffice for GH-hosted runners. But using clone is better for Igor-hosted runners. | |
with: | |
repository: ${{ env.BUILD_REPOSITORY }} | |
ref: ${{ needs.matrix_prep.outputs.build-sha1 }} | |
fetch-depth: ${{ matrix.fdepth }} # fetch all history for self-hosted, but shallow for GH-hosted | |
clean: false # true is default. it *will* delete the hosts /dev if mounted inside. | |
# clone the userpatches repo (`armbian/os`) | |
- name: "Checkout userpatches repo with depth ${{ matrix.fdepth }}: ${{env.USERPATCHES_REPOSITORY}}#${{env.USERPATCHES_REF}}" | |
uses: actions/checkout@v4 | |
if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }} | |
with: | |
repository: ${{ env.USERPATCHES_REPOSITORY }} | |
ref: ${{ env.USERPATCHES_REF }} | |
fetch-depth: ${{ matrix.fdepth }} # fetch all history for self-hosted, but shallow for GH-hosted | |
clean: false # true is default. | |
path: userpatches.repo | |
- name: "Put userpatches in place, and remove userpatches repo" | |
if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }} | |
run: | | |
mkdir -pv userpatches | |
rsync -av userpatches.repo/${{env.USERPATCHES_DIR}}/. userpatches/ | |
rm -rf userpatches.repo | |
- name: Build ${{matrix.desc}} | |
id: build | |
run: | | |
bash ./compile.sh ${{ matrix.invocation }} ${{env.EXTRA_PARAMS_ALL_BUILDS}} UPLOAD_TO_OCI_ONLY=yes | |
- name: "Logs: ${{ steps.build.outputs.logs_url }}" | |
if: always() | |
run: | | |
echo "Logs: ${{ steps.build.outputs.logs_url }}" | |
"build-artifacts-chunk-3": # templated "build-artifacts-chunk-3" | |
if: ${{ github.repository_owner == 'armsurvivors' && needs.matrix_prep.outputs.artifacts-chunk-not-empty-3 == 'yes' }} # <-- HERE: Chunk number. | |
needs: [ "matrix_prep" ] | |
strategy: | |
fail-fast: false # let other jobs try to complete if one fails | |
matrix: ${{ fromJSON(needs.matrix_prep.outputs.artifacts-chunk-json-3) }} # <-- HERE: Chunk number. | |
name: ${{ matrix.desc || 'Empty A3' }} # <-- HERE: Chunk number. | |
timeout-minutes: 120 | |
runs-on: ${{ matrix.runs_on }} | |
steps: | |
#- name: Runner clean | |
# uses: armbian/actions/runner-clean@main | |
# Login to ghcr.io, for later uploading rootfs to ghcr.io | |
- name: Docker Login to GitHub Container Registry | |
uses: docker/login-action@v3 | |
with: | |
registry: ghcr.io | |
username: "${{ github.repository_owner }}" # GitHub username or org | |
password: ${{ secrets.GITHUB_TOKEN }} # GitHub actions builtin token. repo has to have pkg access. | |
# cleanup the place where we will clone the userpatches repo, to avoid git going insane and cleaning everything later | |
- name: Cleanup userpatches repo | |
if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }} | |
run: rm -rf userpatches.repo | |
- name: "Checkout build repo with depth ${{ matrix.fdepth }}" | |
uses: actions/checkout@v4 # We don't need to clone git, really. A wget would suffice for GH-hosted runners. But using clone is better for Igor-hosted runners. | |
with: | |
repository: ${{ env.BUILD_REPOSITORY }} | |
ref: ${{ needs.matrix_prep.outputs.build-sha1 }} | |
fetch-depth: ${{ matrix.fdepth }} # fetch all history for self-hosted, but shallow for GH-hosted | |
clean: false # true is default. it *will* delete the hosts /dev if mounted inside. | |
# clone the userpatches repo (`armbian/os`) | |
- name: "Checkout userpatches repo with depth ${{ matrix.fdepth }}: ${{env.USERPATCHES_REPOSITORY}}#${{env.USERPATCHES_REF}}" | |
uses: actions/checkout@v4 | |
if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }} | |
with: | |
repository: ${{ env.USERPATCHES_REPOSITORY }} | |
ref: ${{ env.USERPATCHES_REF }} | |
fetch-depth: ${{ matrix.fdepth }} # fetch all history for self-hosted, but shallow for GH-hosted | |
clean: false # true is default. | |
path: userpatches.repo | |
- name: "Put userpatches in place, and remove userpatches repo" | |
if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }} | |
run: | | |
mkdir -pv userpatches | |
rsync -av userpatches.repo/${{env.USERPATCHES_DIR}}/. userpatches/ | |
rm -rf userpatches.repo | |
- name: Build ${{matrix.desc}} | |
id: build | |
run: | | |
bash ./compile.sh ${{ matrix.invocation }} ${{env.EXTRA_PARAMS_ALL_BUILDS}} UPLOAD_TO_OCI_ONLY=yes | |
- name: "Logs: ${{ steps.build.outputs.logs_url }}" | |
if: always() | |
run: | | |
echo "Logs: ${{ steps.build.outputs.logs_url }}" | |
"build-artifacts-chunk-4": # templated "build-artifacts-chunk-4" | |
if: ${{ github.repository_owner == 'armsurvivors' && needs.matrix_prep.outputs.artifacts-chunk-not-empty-4 == 'yes' }} # <-- HERE: Chunk number. | |
needs: [ "matrix_prep" ] | |
strategy: | |
fail-fast: false # let other jobs try to complete if one fails | |
matrix: ${{ fromJSON(needs.matrix_prep.outputs.artifacts-chunk-json-4) }} # <-- HERE: Chunk number. | |
name: ${{ matrix.desc || 'Empty A4' }} # <-- HERE: Chunk number. | |
timeout-minutes: 120 | |
runs-on: ${{ matrix.runs_on }} | |
steps: | |
#- name: Runner clean | |
# uses: armbian/actions/runner-clean@main | |
# Login to ghcr.io, for later uploading rootfs to ghcr.io | |
- name: Docker Login to GitHub Container Registry | |
uses: docker/login-action@v3 | |
with: | |
registry: ghcr.io | |
username: "${{ github.repository_owner }}" # GitHub username or org | |
password: ${{ secrets.GITHUB_TOKEN }} # GitHub actions builtin token. repo has to have pkg access. | |
# cleanup the place where we will clone the userpatches repo, to avoid git going insane and cleaning everything later | |
- name: Cleanup userpatches repo | |
if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }} | |
run: rm -rf userpatches.repo | |
- name: "Checkout build repo with depth ${{ matrix.fdepth }}" | |
uses: actions/checkout@v4 # We don't need to clone git, really. A wget would suffice for GH-hosted runners. But using clone is better for Igor-hosted runners. | |
with: | |
repository: ${{ env.BUILD_REPOSITORY }} | |
ref: ${{ needs.matrix_prep.outputs.build-sha1 }} | |
fetch-depth: ${{ matrix.fdepth }} # fetch all history for self-hosted, but shallow for GH-hosted | |
clean: false # true is default. it *will* delete the hosts /dev if mounted inside. | |
# clone the userpatches repo (`armbian/os`) | |
- name: "Checkout userpatches repo with depth ${{ matrix.fdepth }}: ${{env.USERPATCHES_REPOSITORY}}#${{env.USERPATCHES_REF}}" | |
uses: actions/checkout@v4 | |
if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }} | |
with: | |
repository: ${{ env.USERPATCHES_REPOSITORY }} | |
ref: ${{ env.USERPATCHES_REF }} | |
fetch-depth: ${{ matrix.fdepth }} # fetch all history for self-hosted, but shallow for GH-hosted | |
clean: false # true is default. | |
path: userpatches.repo | |
- name: "Put userpatches in place, and remove userpatches repo" | |
if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }} | |
run: | | |
mkdir -pv userpatches | |
rsync -av userpatches.repo/${{env.USERPATCHES_DIR}}/. userpatches/ | |
rm -rf userpatches.repo | |
- name: Build ${{matrix.desc}} | |
id: build | |
run: | | |
bash ./compile.sh ${{ matrix.invocation }} ${{env.EXTRA_PARAMS_ALL_BUILDS}} UPLOAD_TO_OCI_ONLY=yes | |
- name: "Logs: ${{ steps.build.outputs.logs_url }}" | |
if: always() | |
run: | | |
echo "Logs: ${{ steps.build.outputs.logs_url }}" | |
"build-artifacts-chunk-5": # templated "build-artifacts-chunk-5" | |
if: ${{ github.repository_owner == 'armsurvivors' && needs.matrix_prep.outputs.artifacts-chunk-not-empty-5 == 'yes' }} # <-- HERE: Chunk number. | |
needs: [ "matrix_prep" ] | |
strategy: | |
fail-fast: false # let other jobs try to complete if one fails | |
matrix: ${{ fromJSON(needs.matrix_prep.outputs.artifacts-chunk-json-5) }} # <-- HERE: Chunk number. | |
name: ${{ matrix.desc || 'Empty A5' }} # <-- HERE: Chunk number. | |
timeout-minutes: 120 | |
runs-on: ${{ matrix.runs_on }} | |
steps: | |
#- name: Runner clean | |
# uses: armbian/actions/runner-clean@main | |
# Login to ghcr.io, for later uploading rootfs to ghcr.io | |
- name: Docker Login to GitHub Container Registry | |
uses: docker/login-action@v3 | |
with: | |
registry: ghcr.io | |
username: "${{ github.repository_owner }}" # GitHub username or org | |
password: ${{ secrets.GITHUB_TOKEN }} # GitHub actions builtin token. repo has to have pkg access. | |
# cleanup the place where we will clone the userpatches repo, to avoid git going insane and cleaning everything later | |
- name: Cleanup userpatches repo | |
if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }} | |
run: rm -rf userpatches.repo | |
- name: "Checkout build repo with depth ${{ matrix.fdepth }}" | |
uses: actions/checkout@v4 # We don't need to clone git, really. A wget would suffice for GH-hosted runners. But using clone is better for Igor-hosted runners. | |
with: | |
repository: ${{ env.BUILD_REPOSITORY }} | |
ref: ${{ needs.matrix_prep.outputs.build-sha1 }} | |
fetch-depth: ${{ matrix.fdepth }} # fetch all history for self-hosted, but shallow for GH-hosted | |
clean: false # true is default. it *will* delete the hosts /dev if mounted inside. | |
# clone the userpatches repo (`armbian/os`) | |
- name: "Checkout userpatches repo with depth ${{ matrix.fdepth }}: ${{env.USERPATCHES_REPOSITORY}}#${{env.USERPATCHES_REF}}" | |
uses: actions/checkout@v4 | |
if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }} | |
with: | |
repository: ${{ env.USERPATCHES_REPOSITORY }} | |
ref: ${{ env.USERPATCHES_REF }} | |
fetch-depth: ${{ matrix.fdepth }} # fetch all history for self-hosted, but shallow for GH-hosted | |
clean: false # true is default. | |
path: userpatches.repo | |
- name: "Put userpatches in place, and remove userpatches repo" | |
if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }} | |
run: | | |
mkdir -pv userpatches | |
rsync -av userpatches.repo/${{env.USERPATCHES_DIR}}/. userpatches/ | |
rm -rf userpatches.repo | |
- name: Build ${{matrix.desc}} | |
id: build | |
run: | | |
bash ./compile.sh ${{ matrix.invocation }} ${{env.EXTRA_PARAMS_ALL_BUILDS}} UPLOAD_TO_OCI_ONLY=yes | |
- name: "Logs: ${{ steps.build.outputs.logs_url }}" | |
if: always() | |
run: | | |
echo "Logs: ${{ steps.build.outputs.logs_url }}" | |
# template file: 650.per-chunk-images_job.yaml | |
"build-images-chunk-1": # templated "build-images-chunk-1" | |
needs: [ "matrix_prep", "all-artifacts-and-repo-ready" ] | |
timeout-minutes: 60 | |
if: ${{ !failure() && !cancelled() && ( github.repository_owner == 'armsurvivors' ) && ( needs.matrix_prep.outputs.images-chunk-not-empty-1 == 'yes' ) }} # <-- HERE: Chunk number. | |
strategy: | |
fail-fast: false # let other jobs try to complete if one fails | |
matrix: ${{ fromJSON(needs.matrix_prep.outputs.images-chunk-json-1) }} # <-- HERE: Chunk number. | |
name: ${{ matrix.desc || 'Empty I1' }} # <-- HERE: Chunk number. | |
runs-on: ${{ matrix.runs_on }} | |
steps: | |
# Login to ghcr.io, we'll be downloading a lot from ghcr.io | |
- name: Docker Login to GitHub Container Registry | |
uses: docker/login-action@v3 | |
with: | |
registry: ghcr.io | |
username: "${{ github.repository_owner }}" # GitHub username or org | |
password: ${{ secrets.GITHUB_TOKEN }} # GitHub actions builtin token. repo has to have pkg access. | |
# cleanup the place where we will clone the userpatches repo, to avoid git going insane and cleaning everything later | |
- name: Cleanup userpatches repo | |
if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }} | |
run: rm -rf userpatches.repo | |
- name: "Checkout build repo with depth ${{ matrix.fdepth }}" | |
uses: actions/checkout@v4 | |
with: | |
repository: ${{ env.BUILD_REPOSITORY }} | |
ref: ${{ needs.matrix_prep.outputs.build-sha1 }} | |
clean: false # true is default. it *will* delete the hosts /dev if mounted inside. | |
fetch-depth: ${{ matrix.fdepth }} # fetch all history for self-hosted, but shallow for GH-hosted | |
# clone the userpatches repo (`armbian/os`) | |
- name: "Checkout userpatches repo with depth ${{ matrix.fdepth }}: ${{env.USERPATCHES_REPOSITORY}}#${{env.USERPATCHES_REF}}" | |
uses: actions/checkout@v4 | |
if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }} | |
with: | |
repository: ${{ env.USERPATCHES_REPOSITORY }} | |
ref: ${{ env.USERPATCHES_REF }} | |
fetch-depth: ${{ matrix.fdepth }} # fetch all history for self-hosted, but shallow for GH-hosted | |
clean: false # true is default. | |
path: userpatches.repo | |
- name: "Put userpatches in place, and remove userpatches repo" | |
if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }} | |
run: | | |
mkdir -pv userpatches | |
rsync -av userpatches.repo/${{env.USERPATCHES_DIR}}/. userpatches/ | |
rm -rf userpatches.repo | |
- name: Cleanup leftover output images | |
run: | | |
rm -rfv output/images userpatches/VERSION | |
- name: ${{matrix.desc}} | |
id: build-one-image | |
run: | | |
bash ./compile.sh ${{ matrix.invocation }} ${{env.EXTRA_PARAMS_IMAGE}} ${{env.EXTRA_PARAMS_ALL_BUILDS}} OUTPUT_IMAGES_ONLY=yes | |
- name: "Logs: ${{ steps.build-one-image.outputs.logs_url }}" | |
if: always() | |
run: | | |
echo "Logs: ${{ steps.build-one-image.outputs.logs_url }}" | |
# @TODO lets upload to GHR release ourselves instead of this shit | |
- name: Release ${{ matrix.board }} | |
id: release1 | |
continue-on-error: true | |
uses: softprops/action-gh-release@v2 | |
with: | |
tag_name: ${{ needs.matrix_prep.outputs.images_release }} | |
files: | | |
output/images/*.xz | |
- name: Release ${{ matrix.board }} (retry if 1st failed) | |
id: release2 | |
if: ${{ steps.release1.outcome == 'failure'}} | |
uses: softprops/action-gh-release@v2 | |
with: | |
tag_name: ${{ needs.matrix_prep.outputs.images_release }} | |
files: | | |
output/images/*.xz | |
- name: Cleanup output images | |
if: always() | |
run: | | |
rm -rfv output/images userpatches/VERSION | |
"build-images-chunk-2": # templated "build-images-chunk-2" | |
needs: [ "matrix_prep", "all-artifacts-and-repo-ready" ] | |
timeout-minutes: 60 | |
if: ${{ !failure() && !cancelled() && ( github.repository_owner == 'armsurvivors' ) && ( needs.matrix_prep.outputs.images-chunk-not-empty-2 == 'yes' ) }} # <-- HERE: Chunk number. | |
strategy: | |
fail-fast: false # let other jobs try to complete if one fails | |
matrix: ${{ fromJSON(needs.matrix_prep.outputs.images-chunk-json-2) }} # <-- HERE: Chunk number. | |
name: ${{ matrix.desc || 'Empty I2' }} # <-- HERE: Chunk number. | |
runs-on: ${{ matrix.runs_on }} | |
steps: | |
# Login to ghcr.io, we'll be downloading a lot from ghcr.io | |
- name: Docker Login to GitHub Container Registry | |
uses: docker/login-action@v3 | |
with: | |
registry: ghcr.io | |
username: "${{ github.repository_owner }}" # GitHub username or org | |
password: ${{ secrets.GITHUB_TOKEN }} # GitHub actions builtin token. repo has to have pkg access. | |
# cleanup the place where we will clone the userpatches repo, to avoid git going insane and cleaning everything later | |
- name: Cleanup userpatches repo | |
if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }} | |
run: rm -rf userpatches.repo | |
- name: "Checkout build repo with depth ${{ matrix.fdepth }}" | |
uses: actions/checkout@v4 | |
with: | |
repository: ${{ env.BUILD_REPOSITORY }} | |
ref: ${{ needs.matrix_prep.outputs.build-sha1 }} | |
clean: false # true is default. it *will* delete the hosts /dev if mounted inside. | |
fetch-depth: ${{ matrix.fdepth }} # fetch all history for self-hosted, but shallow for GH-hosted | |
# clone the userpatches repo (`armbian/os`) | |
- name: "Checkout userpatches repo with depth ${{ matrix.fdepth }}: ${{env.USERPATCHES_REPOSITORY}}#${{env.USERPATCHES_REF}}" | |
uses: actions/checkout@v4 | |
if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }} | |
with: | |
repository: ${{ env.USERPATCHES_REPOSITORY }} | |
ref: ${{ env.USERPATCHES_REF }} | |
fetch-depth: ${{ matrix.fdepth }} # fetch all history for self-hosted, but shallow for GH-hosted | |
clean: false # true is default. | |
path: userpatches.repo | |
- name: "Put userpatches in place, and remove userpatches repo" | |
if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }} | |
run: | | |
mkdir -pv userpatches | |
rsync -av userpatches.repo/${{env.USERPATCHES_DIR}}/. userpatches/ | |
rm -rf userpatches.repo | |
- name: Cleanup leftover output images | |
run: | | |
rm -rfv output/images userpatches/VERSION | |
- name: ${{matrix.desc}} | |
id: build-one-image | |
run: | | |
bash ./compile.sh ${{ matrix.invocation }} ${{env.EXTRA_PARAMS_IMAGE}} ${{env.EXTRA_PARAMS_ALL_BUILDS}} OUTPUT_IMAGES_ONLY=yes | |
- name: "Logs: ${{ steps.build-one-image.outputs.logs_url }}" | |
if: always() | |
run: | | |
echo "Logs: ${{ steps.build-one-image.outputs.logs_url }}" | |
# @TODO lets upload to GHR release ourselves instead of this shit | |
- name: Release ${{ matrix.board }} | |
id: release1 | |
continue-on-error: true | |
uses: softprops/action-gh-release@v2 | |
with: | |
tag_name: ${{ needs.matrix_prep.outputs.images_release }} | |
files: | | |
output/images/*.xz | |
- name: Release ${{ matrix.board }} (retry if 1st failed) | |
id: release2 | |
if: ${{ steps.release1.outcome == 'failure'}} | |
uses: softprops/action-gh-release@v2 | |
with: | |
tag_name: ${{ needs.matrix_prep.outputs.images_release }} | |
files: | | |
output/images/*.xz | |
- name: Cleanup output images | |
if: always() | |
run: | | |
rm -rfv output/images userpatches/VERSION | |
# template file: 750.single_repo.yaml | |
# ------ publish packages to repository ------- | |
publish-debs-to-repo: | |
name: "publish debs to repo" | |
runs-on: [ "self-hosted", "Linux", 'armbian', "debs-to-repo" ] | |
if: ${{ !failure() && !cancelled() && (github.event.inputs.targetsFilterInclude == '') }} # eg: run if dependencies worked. See https://github.com/orgs/community/discussions/45058#discussioncomment-4817378 | |
needs: [ "matrix_prep", "all-artifacts-ready" ] | |
steps: | |
# Prepare dependencies. | |
# If no /usr/bin/gpg, install gnupg2 | |
# If no /usr/bin/reprepro, install reprepro | |
- name: Install dependencies | |
run: | | |
if [ ! -e /usr/bin/gpg ]; then | |
sudo apt-get update | |
sudo apt-get install -y gnupg2 | |
fi | |
if [ ! -e /usr/bin/reprepro ]; then | |
sudo apt-get update | |
sudo apt-get install -y reprepro | |
fi | |
# Login to ghcr.io, for later uploading rootfs to ghcr.io | |
- name: Docker Login to GitHub Container Registry | |
uses: docker/login-action@v3 | |
with: | |
registry: ghcr.io | |
username: "${{ github.repository_owner }}" # GitHub username or org | |
password: ${{ secrets.GITHUB_TOKEN }} # GitHub actions builtin token. repo has to have pkg access. | |
# cleanup the place where we will clone the userpatches repo, to avoid git going insane and cleaning everything later | |
- name: Cleanup userpatches repo | |
if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }} | |
run: rm -rf userpatches.repo | |
- name: Checkout build repo | |
uses: actions/checkout@v4 # We don't need to clone git, really. A wget would suffice for GH-hosted runners. But using clone is better for Igor-hosted runners. | |
with: | |
repository: ${{ env.BUILD_REPOSITORY }} | |
ref: ${{ needs.matrix_prep.outputs.build-sha1 }} | |
fetch-depth: 0 # fetch all history; much faster for self-hosted runners | |
clean: false # true is default. it *will* delete the hosts /dev if mounted inside. | |
# clone the userpatches repo (`armbian/os`) | |
- name: "Checkout userpatches repo: ${{env.USERPATCHES_REPOSITORY}}#${{env.USERPATCHES_REF}}" | |
uses: actions/checkout@v4 | |
if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }} | |
with: | |
repository: ${{ env.USERPATCHES_REPOSITORY }} | |
ref: ${{ env.USERPATCHES_REF }} | |
clean: false # true is default. | |
path: userpatches.repo | |
- name: "Put userpatches in place, and remove userpatches repo" | |
if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }} | |
run: | | |
mkdir -pv userpatches | |
rsync -av userpatches.repo/${{env.USERPATCHES_DIR}}/. userpatches/ | |
rm -rf userpatches.repo | |
# Clean off output/info, if any | |
- name: Cleanup output/info | |
run: | | |
rm -rfv output/info | |
mkdir -pv output | |
- name: Clean old debs from previous runs # each run should touch each file it did so old files should be fine to delete | |
run: | | |
set -x | |
du -h -d 1 output | |
find output/packages-hashed output/debs output/debs-beta bla -type f -mtime +3 -delete || true | |
# Download the artifacts (output/info) produced by the prepare-matrix job. | |
- name: Download artifacts | |
uses: actions/download-artifact@v4 | |
with: | |
name: build-info-json | |
path: output/info | |
# List the artifacts we downloaded | |
- name: List artifacts | |
run: | | |
ls -laht output/info | |
- name: Import GPG key from GitHub secrets | |
id: import_gpg | |
uses: crazy-max/ghaction-import-gpg@01dd5d3ca463c7f10f7f4f7b4f177225ac661ee4 # v6.1.0 # https://github.com/crazy-max/ghaction-import-gpg/releases | |
with: | |
gpg_private_key: ${{ secrets.REPO_GPG_PRIVATE_KEY }} | |
passphrase: ${{ secrets.REPO_GPG_PASSPHRASE }} | |
- name: Show GPG user IDs | |
run: | | |
echo "fingerprint: ${{ steps.import_gpg.outputs.fingerprint }}" | |
echo "keyid: ${{ steps.import_gpg.outputs.keyid }}" | |
echo "name: ${{ steps.import_gpg.outputs.name }}" | |
echo "email: ${{ steps.import_gpg.outputs.email }}" | |
- name: List loaded keys keys | |
run: gpg -K || true | |
#- name: restart repo from scratch | |
# run: | | |
# sudo rm -rf /opt/armbian_repo | |
- name: create and ensure ownership of /opt/armbian_repo | |
run: | | |
sudo mkdir -pv /opt/armbian_repo | |
sudo chown -R $USER:$USER /opt/armbian_repo | |
- name: copy the reprepro db inside armbian output/reprepro-db | |
run: | | |
mkdir -pv output/reprepro-db/db | |
if [[ -d /opt/armbian_repo/db ]]; then | |
cp -pv /opt/armbian_repo/db/packages.db output/reprepro-db/db/packages.db | |
else | |
rm -rf output/reprepro-db | |
fi | |
- name: Integrated reprepro-based deb-download and reprepro script generation | |
id: rolling-repo-reprepro-download-script | |
run: | | |
bash ./compile.sh rpardini-generic debs-to-repo-rolling-reprepro REPO_REPREPRO_PATH=/armbian/output/reprepro-db REPO_GPG_KEYID="${{ steps.import_gpg.outputs.keyid }}" BETA=${{ github.event.inputs.nightly || 'yes' }} ${{env.EXTRA_PARAMS_ALL_BUILDS}} | |
- name: "Logs debs-to-repo-rolling-reprepro: ${{ steps.rolling-repo-reprepro-download-script.outputs.logs_url }}" | |
run: | | |
echo "Logs debs-to-repo-rolling-reprepro: ${{ steps.rolling-repo-reprepro-download-script.outputs.logs_url }}" | |
#- name: Prepare the reprepro scripts | |
# id: prepare-scripts-reprepro | |
# run: | | |
# bash ./compile.sh rpardini-generic debs-to-repo-reprepro BETA=${{ github.event.inputs.nightly || 'yes' }} ${{env.EXTRA_PARAMS_ALL_BUILDS}} | |
# | |
#- name: "Logs debs-to-repo-reprepro: ${{ steps.prepare-scripts-reprepro.outputs.logs_url }}" | |
# run: | | |
# echo "Logs debs-to-repo-reprepro: ${{ steps.prepare-scripts-reprepro.outputs.logs_url }}" | |
- name: Run the reprepro scripts | |
id: run-scripts-reprepro | |
env: | |
REPO_GPG_KEYID: ${{ steps.import_gpg.outputs.keyid }} | |
REPO_LOCATION: /opt/armbian_repo | |
REPO_CONF_LOCATION: /opt/armbian_repo/conf | |
run: | | |
set -x | |
export REPREPRO_INFO_DIR="$(pwd)/output/info/reprepro" | |
# rpardini is an absolute idiot, and chose "[" (bracket) as the delimiter jinja template, so now has to escape it (raw/endraw). Imbecile. | |
if [[ "${{ github.event.inputs.nightly || 'yes' }}" == "yes" ]]; then | |
export INCOMING_DEBS_DIR="$(pwd)/output/debs-beta" | |
else | |
export INCOMING_DEBS_DIR="$(pwd)/output/debs" | |
fi | |
bash -x output/info/reprepro/reprepro.sh | |
- name: export the GPG public key from the agent into repo files | |
run: | | |
rm -fv /opt/armbian_repo/armbian-next.gpg | |
gpg --batch --export --output /opt/armbian_repo/armbian-next.gpg "${{ steps.import_gpg.outputs.keyid }}" | |
rm -fv /opt/armbian_repo/armbian-next.asc | |
gpg --batch --export --output /opt/armbian_repo/armbian-next.asc --armor "${{ steps.import_gpg.outputs.keyid }}" | |
- name: Deploy nginx and setup for serving the repo over http 8081 | |
run: | | |
if [ ! -e /usr/sbin/nginx ]; then | |
sudo apt-get update | |
sudo apt-get install -y nginx-full | |
fi | |
cat <<EOF | sudo tee /etc/nginx/sites-enabled/armbian_repo.conf | |
server { | |
listen 8081; | |
access_log /var/log/nginx/repo-access.log; | |
error_log /var/log/nginx/repo-error.log; | |
location / { | |
root /opt/armbian_repo; | |
autoindex on; | |
} | |
location ~ /(.*)/conf { | |
deny all; | |
} | |
location ~ /(.*)/db { | |
deny all; | |
} | |
} | |
EOF | |
sudo nginx -t | |
sudo systemctl restart nginx | |
- name: Run the OCI-tagging script | |
id: run-scripts-oci-tagging | |
run: | | |
set -x | |
[[ ! -f /usr/local/bin/oras ]] && wget -O- "https://github.com/oras-project/oras/releases/download/v1.0.1/oras_1.0.1_linux_arm64.tar.gz" | sudo tar xzf - -C/usr/local/bin/ && oras version | |
bash -x output/info/reprepro/oci_tag_versions.sh | |
# # Generate priv | |
# gpg --batch --passphrase "supersecretpassphrase" --quick-generate-key "armbian-next (apt) <[email protected]>" default default never | |
# | |
# # Export priv | |
# gpg --batch --pinentry-mode=loopback --yes --passphrase "supersecretpassphrase" --armor --output apt-repo-private.asc --export-secret-key | |
# | |
# # Put contents of apt-repo-private.asc in secret REPO_GPG_PRIVATE_KEY | |
# # Put supersecretpassphrase in secret REPO_GPG_PASSPHRASE |