This commit is contained in:
Rob Taylor 2026-03-11 02:51:00 +00:00 committed by GitHub
commit ff5d9d1f8a
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
8 changed files with 1195 additions and 29 deletions

715
.github/workflows/action-tests.yml vendored Normal file
View file

@ -0,0 +1,715 @@
name: Action Tests
on:
workflow_dispatch:
inputs:
debug:
description: "Run in debug mode."
type: boolean
required: false
default: false
push:
branches: [master, dev, staging]
pull_request:
types: [opened, synchronize]
env:
DEBUG: ${{ github.event.inputs.debug || false }}
# Test for overrides in built in shell options (regression issue 98).
SHELLOPTS: errexit:pipefail
jobs:
# === Core Functionality ===
list_all_versions:
runs-on: ubuntu-latest
name: List all package versions (including deps).
steps:
- uses: actions/checkout@v4
- name: Execute
id: execute
uses: ./
with:
packages: xdot=1.3-1
version: ${{ github.run_id }}-${{ github.run_attempt }}-list_all_versions
debug: ${{ env.DEBUG }}
- name: Verify
run: |
echo "cache-hit = ${{ steps.execute.outputs.cache-hit }}"
echo "package-version-list = ${{ steps.execute.outputs.package-version-list }}"
echo "all-package-version-list = ${{ steps.execute.outputs.all-package-version-list }}"
# Verify cache miss on first run.
test "${{ steps.execute.outputs.cache-hit }}" = "false"
# Verify the main package is in the all-packages list.
echo "${{ steps.execute.outputs.all-package-version-list }}" | grep -q "xdot=1.3-1"
shell: bash
list_versions:
runs-on: ubuntu-latest
name: List package versions.
steps:
- uses: actions/checkout@v4
- name: Execute
id: execute
uses: ./
with:
packages: xdot rolldice
version: ${{ github.run_id }}-${{ github.run_attempt }}-list_versions
debug: ${{ env.DEBUG }}
- name: Verify
run: |
echo "cache-hit = ${{ steps.execute.outputs.cache-hit }}"
echo "package-version-list = ${{ steps.execute.outputs.package-version-list }}"
test "${{ steps.execute.outputs.cache-hit }}" = "false"
echo "${{ steps.execute.outputs.package-version-list }}" | grep -q "xdot="
echo "${{ steps.execute.outputs.package-version-list }}" | grep -q "rolldice="
shell: bash
standard_workflow_install:
runs-on: ubuntu-latest
name: Standard workflow install package and cache.
steps:
- uses: actions/checkout@v4
- name: Execute
id: execute
uses: ./
with:
packages: xdot rolldice
version: ${{ github.run_id }}-${{ github.run_attempt }}-standard_workflow
debug: ${{ env.DEBUG }}
- name: Verify
if: steps.execute.outputs.cache-hit != 'false'
run: |
echo "cache-hit = ${{ steps.execute.outputs.cache-hit }}"
exit 1
shell: bash
standard_workflow_install_with_new_version:
needs: standard_workflow_install
runs-on: ubuntu-latest
name: Standard workflow packages with new version.
steps:
- uses: actions/checkout@v4
- name: Execute
id: execute
uses: ./
with:
packages: xdot rolldice
version: ${{ github.run_id }}-${{ github.run_attempt }}-standard_workflow_install_with_new_version
debug: ${{ env.DEBUG }}
- name: Verify
if: steps.execute.outputs.cache-hit != 'false'
run: |
echo "cache-hit = ${{ steps.execute.outputs.cache-hit }}"
exit 1
shell: bash
standard_workflow_restore:
needs: standard_workflow_install
runs-on: ubuntu-latest
name: Standard workflow restore cached packages.
steps:
- uses: actions/checkout@v4
- name: Execute
id: execute
uses: ./
with:
packages: xdot rolldice
version: ${{ github.run_id }}-${{ github.run_attempt }}-standard_workflow
debug: ${{ env.DEBUG }}
- name: Verify
if: steps.execute.outputs.cache-hit != 'true'
run: |
echo "cache-hit = ${{ steps.execute.outputs.cache-hit }}"
exit 1
shell: bash
standard_workflow_restore_with_packages_out_of_order:
needs: standard_workflow_install
runs-on: ubuntu-latest
name: Standard workflow restore with packages out of order.
steps:
- uses: actions/checkout@v4
- name: Execute
id: execute
uses: ./
with:
packages: rolldice xdot
version: ${{ github.run_id }}-${{ github.run_attempt }}-standard_workflow
debug: ${{ env.DEBUG }}
- name: Verify
if: steps.execute.outputs.cache-hit != 'true'
run: |
echo "cache-hit = ${{ steps.execute.outputs.cache-hit }}"
exit 1
shell: bash
standard_workflow_add_package:
needs: standard_workflow_install
runs-on: ubuntu-latest
name: Standard workflow add another package.
steps:
- uses: actions/checkout@v4
- name: Execute
id: execute
uses: ./
with:
packages: xdot rolldice distro-info-data
version: ${{ github.run_id }}-${{ github.run_attempt }}-standard_workflow
debug: ${{ env.DEBUG }}
- name: Verify
if: steps.execute.outputs.cache-hit != 'false'
run: |
echo "cache-hit = ${{ steps.execute.outputs.cache-hit }}"
exit 1
shell: bash
standard_workflow_restore_add_package:
needs: standard_workflow_add_package
runs-on: ubuntu-latest
name: Standard workflow restore added package.
steps:
- uses: actions/checkout@v4
- name: Execute
id: execute
uses: ./
with:
packages: xdot rolldice distro-info-data
version: ${{ github.run_id }}-${{ github.run_attempt }}-standard_workflow
debug: ${{ env.DEBUG }}
- name: Verify
if: steps.execute.outputs.cache-hit != 'true'
run: |
echo "cache-hit = ${{ steps.execute.outputs.cache-hit }}"
exit 1
shell: bash
# === Error Handling ===
no_packages:
runs-on: ubuntu-latest
name: No packages passed.
steps:
- uses: actions/checkout@v4
- name: Execute
id: execute
uses: ./
with:
packages: ""
continue-on-error: true
- name: Verify
if: steps.execute.outcome != 'failure'
run: |
echo "Expected failure but got: ${{ steps.execute.outcome }}"
exit 1
shell: bash
package_not_found:
runs-on: ubuntu-latest
name: Package not found.
steps:
- uses: actions/checkout@v4
- name: Execute
id: execute
uses: ./
with:
packages: package_that_doesnt_exist
continue-on-error: true
- name: Verify
if: steps.execute.outcome != 'failure'
run: |
echo "Expected failure but got: ${{ steps.execute.outcome }}"
exit 1
shell: bash
version_contains_spaces:
runs-on: ubuntu-latest
name: Version contains spaces.
steps:
- uses: actions/checkout@v4
- name: Execute
id: execute
uses: ./
with:
packages: xdot
version: 123 abc
debug: ${{ env.DEBUG }}
continue-on-error: true
- name: Verify
if: steps.execute.outcome != 'failure'
run: |
echo "Expected failure but got: ${{ steps.execute.outcome }}"
exit 1
shell: bash
debug_disabled:
runs-on: ubuntu-latest
name: Debug disabled.
steps:
- uses: actions/checkout@v4
- uses: ./
with:
packages: xdot
version: ${{ github.run_id }}-${{ github.run_attempt }}-debug_disabled
debug: false
# === Regression Tests ===
regression_36:
runs-on: ubuntu-latest
name: "Reinstall existing package (regression issue #36)."
steps:
- uses: actions/checkout@v4
- uses: ./
with:
packages: libgtk-3-dev
version: ${{ github.run_id }}-${{ github.run_attempt }}-regression_36
debug: ${{ env.DEBUG }}
regression_37:
runs-on: ubuntu-latest
name: "Install with reported package deps not installed (regression issue #37)."
steps:
- uses: actions/checkout@v4
- uses: ./
with:
packages: libosmesa6-dev libgl1-mesa-dev python3-tk pandoc git-restore-mtime
version: ${{ github.run_id }}-${{ github.run_attempt }}-regression_37
debug: ${{ env.DEBUG }}
regression_72_1:
runs-on: ubuntu-latest
name: "Cache Java CA certs package v1 (regression issue #72)."
steps:
- uses: actions/checkout@v4
- uses: ./
with:
packages: openjdk-11-jre
version: ${{ github.run_id }}-${{ github.run_attempt }}-regression_72
debug: ${{ env.DEBUG }}
regression_72_2:
runs-on: ubuntu-latest
name: "Cache Java CA certs package v2 (regression issue #72)."
steps:
- uses: actions/checkout@v4
- uses: ./
with:
packages: default-jre
version: ${{ github.run_id }}-${{ github.run_attempt }}-regression_72
debug: ${{ env.DEBUG }}
regression_76:
runs-on: ubuntu-latest
name: "Cache empty archive (regression issue #76)."
steps:
- uses: actions/checkout@v4
- run: |
sudo wget -O- https://apt.repos.intel.com/intel-gpg-keys/GPG-PUB-KEY-INTEL-SW-PRODUCTS.PUB | gpg --dearmor | sudo tee /usr/share/keyrings/oneapi-archive-keyring.gpg > /dev/null;
echo "deb [signed-by=/usr/share/keyrings/oneapi-archive-keyring.gpg] https://apt.repos.intel.com/oneapi all main" | sudo tee /etc/apt/sources.list.d/oneAPI.list;
sudo apt-get -qq update;
sudo apt-get install -y intel-oneapi-runtime-libs intel-oneapi-runtime-opencl;
sudo apt-get install -y opencl-headers ocl-icd-opencl-dev;
sudo apt-get install -y libsundials-dev;
- uses: ./
with:
packages: intel-oneapi-runtime-libs
version: ${{ github.run_id }}-${{ github.run_attempt }}-regression_76
debug: ${{ env.DEBUG }}
regression_79:
runs-on: ubuntu-latest
name: "Tar error with libboost-dev (regression issue #79)."
steps:
- uses: actions/checkout@v4
- uses: ./
with:
packages: libboost-dev
version: ${{ github.run_id }}-${{ github.run_attempt }}-regression_79
debug: ${{ env.DEBUG }}
regression_81:
runs-on: ubuntu-latest
name: "Tar error with alsa-ucm-conf (regression issue #81)."
steps:
- uses: actions/checkout@v4
- uses: ./
with:
packages: libasound2 libatk-bridge2.0-0 libatk1.0-0 libatspi2.0-0 libcups2 libdrm2 libgbm1 libnspr4 libnss3 libxcomposite1 libxdamage1 libxfixes3 libxkbcommon0 libxrandr2
version: ${{ github.run_id }}-${{ github.run_attempt }}-regression_81
debug: ${{ env.DEBUG }}
regression_84_literal_block_install:
runs-on: ubuntu-latest
name: "Install multiline package listing - literal block (regression issue #84)."
steps:
- uses: actions/checkout@v4
- uses: ./
with:
packages: >
xdot
rolldice distro-info-data
version: ${{ github.run_id }}-${{ github.run_attempt }}-regression_84_literal_block
debug: ${{ env.DEBUG }}
regression_84_literal_block_restore:
needs: regression_84_literal_block_install
runs-on: ubuntu-latest
name: "Restore multiline package listing - literal block (regression issue #84)."
steps:
- uses: actions/checkout@v4
- name: Execute
id: execute
uses: ./
with:
packages: xdot rolldice distro-info-data
version: ${{ github.run_id }}-${{ github.run_attempt }}-regression_84_literal_block
debug: ${{ env.DEBUG }}
- name: Verify
if: steps.execute.outputs.cache-hit != 'true'
run: |
echo "cache-hit = ${{ steps.execute.outputs.cache-hit }}"
exit 1
shell: bash
regression_84_folded_block_install:
runs-on: ubuntu-latest
name: "Install multiline package listing - folded block (regression issue #84)."
steps:
- uses: actions/checkout@v4
- uses: ./
with:
packages: |
xdot \
rolldice distro-info-data
version: ${{ github.run_id }}-${{ github.run_attempt }}-regression_84_folded_block
debug: ${{ env.DEBUG }}
regression_84_folded_block_restore:
needs: regression_84_folded_block_install
runs-on: ubuntu-latest
name: "Restore multiline package listing - folded block (regression issue #84)."
steps:
- uses: actions/checkout@v4
- name: Execute
id: execute
uses: ./
with:
packages: xdot rolldice distro-info-data
version: ${{ github.run_id }}-${{ github.run_attempt }}-regression_84_folded_block
debug: ${{ env.DEBUG }}
- name: Verify
if: steps.execute.outputs.cache-hit != 'true'
run: |
echo "cache-hit = ${{ steps.execute.outputs.cache-hit }}"
exit 1
shell: bash
regression_89:
runs-on: ubuntu-latest
name: "Upload logs artifact name (regression issue #89)."
steps:
- uses: actions/checkout@v4
- uses: ./
with:
packages: libgtk-3-dev:amd64
version: ${{ github.run_id }}-${{ github.run_attempt }}-regression_89
debug: ${{ env.DEBUG }}
regression_98:
runs-on: ubuntu-latest
name: "Install error due to SHELLOPTS override (regression issue #98)."
steps:
- uses: actions/checkout@v4
- uses: ./
with:
packages: git-restore-mtime libgl1-mesa-dev libosmesa6-dev pandoc
version: ${{ github.run_id }}-${{ github.run_attempt }}-regression_98
debug: ${{ env.DEBUG }}
regression_106_install:
runs-on: ubuntu-latest
name: "Stale apt repo - install phase (regression issue #106)."
steps:
- uses: actions/checkout@v4
- uses: ./
with:
packages: libtk8.6
version: ${{ github.run_id }}-${{ github.run_attempt }}-regression_106
debug: ${{ env.DEBUG }}
regression_106_restore:
needs: regression_106_install
runs-on: ubuntu-latest
name: "Stale apt repo - restore phase (regression issue #106)."
steps:
- uses: actions/checkout@v4
- uses: ./
with:
packages: libtk8.6
version: ${{ github.run_id }}-${{ github.run_attempt }}-regression_106
debug: ${{ env.DEBUG }}
# === Special Cases ===
multi_arch_cache_key:
runs-on: ubuntu-latest
name: "Cache packages with multi-arch cache key."
steps:
- uses: actions/checkout@v4
- uses: ./
with:
packages: libfuse2
version: ${{ github.run_id }}-${{ github.run_attempt }}-multi_arch_cache_key
debug: ${{ env.DEBUG }}
virtual_package:
runs-on: ubuntu-latest
name: "Cache virtual package."
steps:
- uses: actions/checkout@v4
- uses: ./
with:
packages: libvips
version: ${{ github.run_id }}-${{ github.run_attempt }}-virtual_package
debug: ${{ env.DEBUG }}
# === dpkg Registration Tests ===
dpkg_status_install:
runs-on: ubuntu-latest
name: "dpkg knows about packages after install (phase 1)."
steps:
- uses: actions/checkout@v4
- name: Execute
id: execute
uses: ./
with:
packages: rolldice
version: ${{ github.run_id }}-${{ github.run_attempt }}-dpkg_status
debug: ${{ env.DEBUG }}
- name: Verify dpkg knows the package
run: |
dpkg -s rolldice | grep -q 'Status: install ok installed'
echo "dpkg reports rolldice as installed after fresh install."
shell: bash
dpkg_status_restore:
needs: dpkg_status_install
runs-on: ubuntu-latest
name: "dpkg knows about packages after cache restore (phase 2)."
steps:
- uses: actions/checkout@v4
- name: Execute
id: execute
uses: ./
with:
packages: rolldice
version: ${{ github.run_id }}-${{ github.run_attempt }}-dpkg_status
debug: ${{ env.DEBUG }}
- name: Verify cache hit
run: test "${{ steps.execute.outputs.cache-hit }}" = "true"
shell: bash
- name: Verify dpkg knows the package after cache restore
run: |
dpkg -s rolldice | grep -q 'Status: install ok installed'
echo "dpkg reports rolldice as installed after cache restore."
shell: bash
- name: Verify the binary works
run: |
rolldice 2d6
echo "rolldice binary works after cache restore."
shell: bash
# === apt-sources Tests ===
apt_sources_empty:
runs-on: ubuntu-latest
name: "Empty apt-sources has no effect (backward compat)."
steps:
- uses: actions/checkout@v4
- name: Execute
id: execute
uses: ./
with:
packages: xdot
apt-sources: ""
version: ${{ github.run_id }}-${{ github.run_attempt }}-apt_sources_empty
debug: ${{ env.DEBUG }}
- name: Verify
run: |
test "${{ steps.execute.outputs.cache-hit }}" = "false"
echo "${{ steps.execute.outputs.package-version-list }}" | grep -q "xdot="
shell: bash
apt_sources_inline_install:
runs-on: ubuntu-latest
name: "apt-sources with inline deb line - install."
steps:
- uses: actions/checkout@v4
- name: Execute
id: execute
uses: ./
with:
packages: gh
apt-sources: |
https://cli.github.com/packages/githubcli-archive-keyring.gpg | deb [arch=amd64] https://cli.github.com/packages stable main
version: ${{ github.run_id }}-${{ github.run_attempt }}-apt_sources_inline
debug: ${{ env.DEBUG }}
- name: Verify
run: |
test "${{ steps.execute.outputs.cache-hit }}" = "false"
echo "${{ steps.execute.outputs.package-version-list }}" | grep -q "gh="
# Verify the keyring was created.
test -f /usr/share/keyrings/cli-github-com-packages-githubcli-archive-keyring-gpg.gpg
# Verify the source list was created.
test -f /etc/apt/sources.list.d/cli-github-com-packages-githubcli-archive-keyring-gpg.list
shell: bash
apt_sources_inline_restore:
needs: apt_sources_inline_install
runs-on: ubuntu-latest
name: "apt-sources with inline deb line - restore."
steps:
- uses: actions/checkout@v4
- name: Execute
id: execute
uses: ./
with:
packages: gh
apt-sources: |
https://cli.github.com/packages/githubcli-archive-keyring.gpg | deb [arch=amd64] https://cli.github.com/packages stable main
version: ${{ github.run_id }}-${{ github.run_attempt }}-apt_sources_inline
debug: ${{ env.DEBUG }}
- name: Verify
if: steps.execute.outputs.cache-hit != 'true'
run: |
echo "cache-hit = ${{ steps.execute.outputs.cache-hit }}"
exit 1
shell: bash
apt_sources_cache_key_changes:
runs-on: ubuntu-latest
name: "apt-sources changes invalidate cache."
steps:
- uses: actions/checkout@v4
- name: Install with one source
id: install1
uses: ./
with:
packages: xdot
apt-sources: |
https://cli.github.com/packages/githubcli-archive-keyring.gpg | deb [arch=amd64] https://cli.github.com/packages stable main
version: ${{ github.run_id }}-${{ github.run_attempt }}-apt_sources_cache_key
debug: ${{ env.DEBUG }}
- name: Verify first install is cache miss
run: test "${{ steps.install1.outputs.cache-hit }}" = "false"
shell: bash
apt_sources_validation_missing_pipe:
runs-on: ubuntu-latest
name: "apt-sources rejects lines missing pipe separator."
steps:
- uses: actions/checkout@v4
- name: Execute
id: execute
uses: ./
with:
packages: xdot
apt-sources: |
https://example.com/key.gpg deb https://example.com/repo stable main
version: ${{ github.run_id }}-${{ github.run_attempt }}-apt_sources_no_pipe
debug: ${{ env.DEBUG }}
continue-on-error: true
- name: Verify
if: steps.execute.outcome != 'failure'
run: |
echo "Expected failure but got: ${{ steps.execute.outcome }}"
exit 1
shell: bash
apt_sources_validation_http_key:
runs-on: ubuntu-latest
name: "apt-sources rejects non-HTTPS key URLs."
steps:
- uses: actions/checkout@v4
- name: Execute
id: execute
uses: ./
with:
packages: xdot
apt-sources: |
http://example.com/key.gpg | deb https://example.com/repo stable main
version: ${{ github.run_id }}-${{ github.run_attempt }}-apt_sources_http_key
debug: ${{ env.DEBUG }}
continue-on-error: true
- name: Verify
if: steps.execute.outcome != 'failure'
run: |
echo "Expected failure but got: ${{ steps.execute.outcome }}"
exit 1
shell: bash
apt_sources_conflicting_source:
runs-on: ubuntu-latest
name: "apt-sources replaces conflicting pre-existing source."
steps:
- uses: actions/checkout@v4
- name: Pre-create conflicting source
run: |
# Simulate a runner that already has the GitHub CLI repo configured
# with a different keyring path (like NVIDIA runners have for CUDA).
echo "deb [arch=amd64 signed-by=/usr/share/keyrings/fake-old-keyring.gpg] https://cli.github.com/packages stable main" \
| sudo tee /etc/apt/sources.list.d/existing-gh-repo.list
# Create a dummy keyring file so the source looks legitimate.
sudo touch /usr/share/keyrings/fake-old-keyring.gpg
shell: bash
- name: Execute
id: execute
uses: ./
with:
packages: gh
apt-sources: |
https://cli.github.com/packages/githubcli-archive-keyring.gpg | deb [arch=amd64] https://cli.github.com/packages stable main
version: ${{ github.run_id }}-${{ github.run_attempt }}-apt_sources_conflict
debug: ${{ env.DEBUG }}
- name: Verify
run: |
# Action should succeed despite the pre-existing conflicting source.
test "${{ steps.execute.outputs.cache-hit }}" = "false"
echo "${{ steps.execute.outputs.package-version-list }}" | grep -q "gh="
# The conflicting source file should have been removed.
test ! -f /etc/apt/sources.list.d/existing-gh-repo.list
# Our source file should exist.
test -f /etc/apt/sources.list.d/cli-github-com-packages-githubcli-archive-keyring-gpg.list
# gh should be callable.
gh --version
shell: bash
apt_sources_force_update:
runs-on: ubuntu-latest
name: "apt-sources forces apt update even if lists are fresh."
steps:
- uses: actions/checkout@v4
- name: Freshen apt lists
run: |
# Ensure apt lists are fresh so the 5-minute staleness check would
# normally skip the update. The action should force update anyway
# because apt-sources is specified.
sudo apt-get update -qq
shell: bash
- name: Execute
id: execute
uses: ./
with:
packages: gh
apt-sources: |
https://cli.github.com/packages/githubcli-archive-keyring.gpg | deb [arch=amd64] https://cli.github.com/packages stable main
version: ${{ github.run_id }}-${{ github.run_attempt }}-apt_sources_force_update
debug: ${{ env.DEBUG }}
- name: Verify
run: |
test "${{ steps.execute.outputs.cache-hit }}" = "false"
echo "${{ steps.execute.outputs.package-version-list }}" | grep -q "gh="
gh --version
shell: bash

View file

@ -37,6 +37,7 @@ There are three kinds of version labels you can use.
- `execute_install_scripts` - Execute Debian package pre and post install script upon restore. See [Caveats / Non-file Dependencies](#non-file-dependencies) for more information.
- `empty_packages_behavior` - Desired behavior when the given `packages` is empty. `'error'` (default), `'warn'` or `'ignore'`.
- `add-repository` - Space delimited list of repositories to add via `apt-add-repository` before installing packages. Supports PPA (e.g., `ppa:user/repo`) and other repository formats.
- `apt-sources` - Multi-line list of GPG-signed third-party repository sources. Each line has the format `key_url | source_spec` where `key_url` is an HTTPS URL to a GPG signing key and `source_spec` is either a URL to a `.list` file or an inline `deb` line. See [Using with Signed Third-party Repositories](#using-with-signed-third-party-repositories).
### Outputs
@ -121,6 +122,61 @@ install_from_multiple_repos:
version: 1.0
```
### Using with Signed Third-party Repositories
Many third-party repositories (Docker, NVIDIA, GitHub CLI, etc.) require a GPG signing key and a `signed-by=` source entry. The `apt-sources` parameter handles this two-step setup automatically.
Each line in `apt-sources` has the format:
```
key_url | source_spec
```
- `key_url` — HTTPS URL to the GPG signing key (will be dearmored and saved to `/usr/share/keyrings/`). Both ASCII-armored and binary keys are supported.
- `source_spec` — Either a URL to a source file (downloaded and `signed-by` injected) or an inline `deb` line. Both traditional `.list` format and modern deb822 `.sources` format are auto-detected and handled.
```yaml
# NVIDIA Container Toolkit (source spec is a URL to a .list file)
install_nvidia_toolkit:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: awalsh128/cache-apt-pkgs-action@latest
with:
packages: nvidia-container-toolkit
apt-sources: |
https://nvidia.github.io/libnvidia-container/gpgkey | https://nvidia.github.io/libnvidia-container/stable/deb/nvidia-container-toolkit.list
version: 1.0
```
```yaml
# Docker CE (inline deb line)
install_docker:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: awalsh128/cache-apt-pkgs-action@latest
with:
packages: docker-ce docker-ce-cli
apt-sources: |
https://download.docker.com/linux/ubuntu/gpg | deb [arch=amd64] https://download.docker.com/linux/ubuntu jammy stable
version: 1.0
```
```yaml
# Multiple signed sources
install_multiple:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: awalsh128/cache-apt-pkgs-action@latest
with:
packages: nvidia-container-toolkit docker-ce
apt-sources: |
https://nvidia.github.io/libnvidia-container/gpgkey | https://nvidia.github.io/libnvidia-container/stable/deb/nvidia-container-toolkit.list
https://download.docker.com/linux/ubuntu/gpg | deb [arch=amd64] https://download.docker.com/linux/ubuntu jammy stable
version: 1.0
```
## Caveats
### Non-file Dependencies

View file

@ -39,6 +39,14 @@ inputs:
description: 'Space delimited list of repositories to add via apt-add-repository before installing packages. Supports PPA (ppa:user/repo) and other repository formats.'
required: false
default: ''
apt-sources:
description: >
Multi-line list of GPG-signed third-party repository sources. Each line has the format:
key_url | source_spec
Where key_url is an HTTPS URL to a GPG signing key, and source_spec is either a URL to
a .list file or an inline deb line (e.g. "deb [arch=amd64] https://example.com/repo distro main").
required: false
default: ''
outputs:
cache-hit:
@ -64,6 +72,7 @@ runs:
"$EXEC_INSTALL_SCRIPTS" \
"$DEBUG" \
"$ADD_REPOSITORY" \
"$APT_SOURCES" \
"$PACKAGES"
if [ -f ~/cache-apt-pkgs/cache_key.md5 ]; then
echo "CACHE_KEY=$(cat ~/cache-apt-pkgs/cache_key.md5)" >> $GITHUB_ENV
@ -77,6 +86,7 @@ runs:
EMPTY_PACKAGES_BEHAVIOR: "${{ inputs.empty_packages_behavior }}"
DEBUG: "${{ inputs.debug }}"
ADD_REPOSITORY: "${{ inputs.add-repository }}"
APT_SOURCES: "${{ inputs.apt-sources }}"
PACKAGES: "${{ inputs.packages }}"
- id: load-cache
@ -96,6 +106,7 @@ runs:
"$EXEC_INSTALL_SCRIPTS" \
"$DEBUG" \
"$ADD_REPOSITORY" \
"$APT_SOURCES" \
"$PACKAGES"
function create_list { local list=$(cat ~/cache-apt-pkgs/manifest_${1}.log | tr '\n' ','); echo ${list:0:-1}; };
echo "package-version-list=$(create_list main)" >> $GITHUB_OUTPUT
@ -106,6 +117,7 @@ runs:
EXEC_INSTALL_SCRIPTS: "${{ inputs.execute_install_scripts }}"
DEBUG: "${{ inputs.debug }}"
ADD_REPOSITORY: "${{ inputs.add-repository }}"
APT_SOURCES: "${{ inputs.apt-sources }}"
PACKAGES: "${{ inputs.packages }}"
- id: upload-logs

View file

@ -18,8 +18,11 @@ cache_dir="${1}"
# Repositories to add before installing packages.
add_repository="${3}"
# GPG-signed third-party repository sources.
apt_sources="${4}"
# List of the packages to use.
input_packages="${@:4}"
input_packages="${@:5}"
if ! apt-fast --version > /dev/null 2>&1; then
log "Installing apt-fast for optimized installs..."
@ -41,8 +44,13 @@ if [ -n "${add_repository}" ]; then
log_empty_line
fi
# Set up GPG-signed third-party apt sources if specified
setup_apt_sources "${apt_sources}"
log "Updating APT package list..."
if [[ -z "$(find -H /var/lib/apt/lists -maxdepth 0 -mmin -5)" ]]; then
# Force update when custom sources were added — the staleness check only
# reflects the last update, which may predate the newly added repos.
if [ -n "${apt_sources}" ] || [ -n "${add_repository}" ] || [[ -z "$(find -H /var/lib/apt/lists -maxdepth 0 -mmin -5)" ]]; then
sudo apt-fast update > /dev/null
log "done"
else
@ -76,21 +84,19 @@ install_log_filepath="${cache_dir}/install.log"
log "Clean installing ${package_count} packages..."
# Zero interaction while installing or upgrading the system via apt.
sudo DEBIAN_FRONTEND=noninteractive apt-fast --yes install ${packages} > "${install_log_filepath}"
log "done"
log "Installation log written to ${install_log_filepath}"
# Explicitly check exit status since set +e (from lib.sh) is active.
sudo DEBIAN_FRONTEND=noninteractive apt-fast --yes install ${packages} 2>&1 | tee "${install_log_filepath}"
install_rc=${PIPESTATUS[0]}
if [ "${install_rc}" -ne 0 ]; then
log_err "Failed to install packages. apt-fast exited with an error (see messages above)."
exit 5
fi
log "Install completed successfully."
log_empty_line
installed_packages=$(get_installed_packages "${install_log_filepath}")
log "Installed package list:"
for installed_package in ${installed_packages}; do
# Reformat for human friendly reading.
log "- $(echo ${installed_package} | awk -F\= '{print $1" ("$2")"}')"
done
log_empty_line
installed_packages_count=$(wc -w <<< "${installed_packages}")
log "Caching ${installed_packages_count} installed packages..."
for installed_package in ${installed_packages}; do
@ -101,24 +107,35 @@ for installed_package in ${installed_packages}; do
read package_name package_ver < <(get_package_name_ver "${installed_package}")
log " * Caching ${package_name} to ${cache_filepath}..."
# Pipe all package files (no folders), including symlinks, their targets, and installation control data to Tar.
tar -cf "${cache_filepath}" -C / --verbatim-files-from --files-from <(
{ dpkg -L "${package_name}" &&
get_install_script_filepath "" "${package_name}" "preinst" &&
get_install_script_filepath "" "${package_name}" "postinst" ; } |
# Pipe all package files, directories, and symlinks (plus symlink targets
# and dpkg metadata) to Tar. Directories are included so that tar
# preserves their ownership and permissions on restore — without them,
# tar auto-creates parent directories using the current umask, which on
# some runners (e.g. GPU-optimized images) defaults to 0077, leaving
# restored trees inaccessible to non-root users.
tar -cf "${cache_filepath}" -C / --no-recursion --verbatim-files-from --files-from <(
{ dpkg -L "${package_name}" | grep -vxF -e '/.' -e '.' -e '/' &&
# Include all dpkg info files for this package (list, md5sums,
# conffiles, triggers, preinst, postinst, prerm, postrm, etc.)
# so dpkg recognizes the package after cache restore.
ls -1 /var/lib/dpkg/info/${package_name}.* 2>/dev/null &&
ls -1 /var/lib/dpkg/info/${package_name}:*.* 2>/dev/null ; } |
while IFS= read -r f; do
if test -f "${f}" -o -L "${f}"; then
get_tar_relpath "${f}"
if [ -f "${f}" ] || [ -L "${f}" ] || [ -d "${f}" ]; then
echo "${f#/}"
if [ -L "${f}" ]; then
target="$(readlink -f "${f}")"
if [ -f "${target}" ]; then
get_tar_relpath "${target}"
echo "${target#/}"
fi
fi
fi
done
)
# Save the dpkg status entry so we can register the package on restore.
dpkg -s "${package_name}" > "${cache_dir}/${installed_package}.dpkg-status" 2>/dev/null || true
log " done (compressed size $(du -h "${cache_filepath}" | cut -f1))."
fi

259
lib.sh
View file

@ -54,14 +54,14 @@ function get_install_script_filepath {
# The list of colon delimited action syntax pairs with each pair equals
# delimited. <name>:<version> <name>:<version>...
###############################################################################
function get_installed_packages {
function get_installed_packages {
local install_log_filepath="${1}"
local regex="^Unpacking ([^ :]+)([^ ]+)? (\[[^ ]+\]\s)?\(([^ )]+)"
local dep_packages=""
local regex="^Unpacking ([^ :]+)([^ ]+)? (\[[^ ]+\]\s)?\(([^ )]+)"
local dep_packages=""
while read -r line; do
# ${regex} should be unquoted since it isn't a literal.
if [[ "${line}" =~ ${regex} ]]; then
dep_packages="${dep_packages}${BASH_REMATCH[1]}=${BASH_REMATCH[4]} "
dep_packages="${dep_packages}${BASH_REMATCH[1]}${BASH_REMATCH[2]}=${BASH_REMATCH[4]} "
else
log_err "Unable to parse package name and version from \"${line}\""
exit 2
@ -135,6 +135,257 @@ function get_tar_relpath {
fi
}
###############################################################################
# Injects signed-by into a deb line if not already present.
# Arguments:
# The deb line to process.
# The keyring filepath to reference.
# Returns:
# The deb line with signed-by injected.
###############################################################################
function inject_signed_by {
local line="${1}"
local keyring="${2}"
# Already has signed-by, return unchanged.
if echo "${line}" | grep -q 'signed-by='; then
echo "${line}"
return
fi
# Match deb or deb-src lines with existing options bracket.
# e.g. "deb [arch=amd64] https://..." -> "deb [arch=amd64 signed-by=...] https://..."
if echo "${line}" | grep -qE '^deb(-src)?\s+\['; then
echo "${line}" | sed -E "s|^(deb(-src)?)\s+\[([^]]*)\]|\1 [\3 signed-by=${keyring}]|"
return
fi
# Match deb or deb-src lines without options bracket.
# e.g. "deb https://..." -> "deb [signed-by=...] https://..."
if echo "${line}" | grep -qE '^deb(-src)?\s+'; then
echo "${line}" | sed -E "s|^(deb(-src)?)\s+|\1 [signed-by=${keyring}] |"
return
fi
# Not a deb line, return unchanged.
echo "${line}"
}
###############################################################################
# Injects Signed-By into deb822-format (.sources) content if not already
# present. deb822 uses multi-line key-value blocks separated by blank lines.
# Arguments:
# The full deb822 content string.
# The keyring filepath to reference.
# Returns:
# The content with Signed-By injected into each block that lacks it.
###############################################################################
function inject_signed_by_deb822 {
local content="${1}"
local keyring="${2}"
# If Signed-By already present anywhere, return unchanged.
if echo "${content}" | grep -qi '^Signed-By:'; then
echo "${content}"
return
fi
# Insert Signed-By after the Types: line in each block.
echo "${content}" | sed "/^Types:/a\\
Signed-By: ${keyring}
"
}
###############################################################################
# Detects whether content is in deb822 format (.sources) or traditional
# one-line format (.list).
# Arguments:
# The source file content.
# Returns:
# Exit code 0 if deb822, 1 if traditional.
###############################################################################
function is_deb822_format {
echo "${1}" | grep -qE '^Types:\s+'
}
###############################################################################
# Derives a keyring name from a URL.
# Arguments:
# URL to derive name from.
# Returns:
# A sanitized name suitable for a keyring filename (without extension).
###############################################################################
function derive_keyring_name {
local url="${1}"
# Use full URL (minus scheme) with non-alphanumeric chars replaced by hyphens.
# This avoids collisions when two keys share a domain but differ in path.
echo "${url}" | sed -E 's|https?://||; s|[/.]+|-|g; s|-+$||'
}
###############################################################################
# Extracts the repo URL from a deb line, stripping the deb prefix and options.
# Arguments:
# A deb or deb-src line.
# Returns:
# The repo URL (first URL after stripping prefix and options bracket).
###############################################################################
function extract_repo_url {
echo "${1}" | sed -E 's/^deb(-src)?[[:space:]]+(\[[^]]*\][[:space:]]+)?//' | awk '{print $1}'
}
###############################################################################
# Removes existing apt source files that reference the same repo URL.
# This prevents "Conflicting values set for option Signed-By" errors when
# the runner already has a source configured (e.g., NVIDIA CUDA repo on
# GPU runners) and we add a new source with a different keyring path.
# Arguments:
# The repo URL to check for conflicts.
# The file path we're about to write (excluded from removal).
###############################################################################
function remove_conflicting_sources {
local repo_url="${1}"
local our_list_path="${2}"
# Nothing to check if repo_url is empty.
if [ -z "${repo_url}" ]; then
return
fi
for src_file in /etc/apt/sources.list.d/*.list /etc/apt/sources.list.d/*.sources; do
# Skip if glob didn't match any files.
test -f "${src_file}" || continue
# Skip our own file.
test "${src_file}" = "${our_list_path}" && continue
# Check if this file references the same repo URL (fixed-string match).
if grep -qF "${repo_url}" "${src_file}" 2>/dev/null; then
log " Removing conflicting source: ${src_file}"
sudo rm -f "${src_file}"
fi
done
}
###############################################################################
# Sets up GPG-signed third-party apt sources.
# Arguments:
# Multi-line string where each line is: key_url | source_spec
# Returns:
# Log lines from setup.
###############################################################################
function setup_apt_sources {
local apt_sources="${1}"
if [ -z "${apt_sources}" ]; then
return
fi
log "Setting up GPG-signed apt sources..."
while IFS= read -r line; do
# Skip empty lines.
if [ -z "$(echo "${line}" | tr -d '[:space:]')" ]; then
continue
fi
# Split on pipe separator, trim whitespace with sed instead of xargs.
local key_url=$(echo "${line}" | cut -d'|' -f1 | sed 's/^[[:space:]]*//;s/[[:space:]]*$//')
local source_spec=$(echo "${line}" | cut -d'|' -f2- | sed 's/^[[:space:]]*//;s/[[:space:]]*$//')
if [ -z "${key_url}" ] || [ -z "${source_spec}" ]; then
log_err "Invalid apt-sources line (missing key_url or source_spec): ${line}"
exit 7
fi
local keyring_name=$(derive_keyring_name "${key_url}")
local keyring_path="/usr/share/keyrings/${keyring_name}.gpg"
# Download GPG key to temp file, then detect format and convert if needed.
log "- Downloading GPG key from ${key_url}..."
local tmpkey=$(mktemp)
if ! curl -fsSL "${key_url}" -o "${tmpkey}"; then
log_err "Failed to download GPG key from ${key_url}"
rm -f "${tmpkey}"
exit 7
fi
# Detect if key is ASCII-armored or already binary.
# "PGP public key block" = ASCII-armored, needs dearmoring.
# "PGP/GPG key public ring" or other = already binary, copy directly.
if file "${tmpkey}" | grep -qi 'PGP public key block$'; then
# ASCII-armored key, dearmor it.
if ! sudo gpg --batch --yes --dearmor -o "${keyring_path}" < "${tmpkey}"; then
log_err "Failed to dearmor GPG key from ${key_url}"
rm -f "${tmpkey}"
exit 7
fi
else
# Already in binary format, copy directly.
sudo cp "${tmpkey}" "${keyring_path}"
fi
rm -f "${tmpkey}"
log " Keyring saved to ${keyring_path}"
# Determine if source_spec is a URL (download source file) or inline deb line.
if echo "${source_spec}" | grep -qE '^https?://'; then
# Source spec is a URL to a source file - download it.
local list_name="${keyring_name}"
log "- Downloading source list from ${source_spec}..."
local list_content
if ! list_content=$(curl -fsSL "${source_spec}"); then
log_err "Failed to download source list from ${source_spec}"
exit 7
fi
if is_deb822_format "${list_content}"; then
# deb822 format (.sources file) - inject Signed-By as a field.
local list_path="/etc/apt/sources.list.d/${list_name}.sources"
# Remove any existing source files that reference the same repo URLs
# to prevent signed-by conflicts.
local repo_urls=$(echo "${list_content}" | grep -i '^URIs:' | sed 's/^URIs:[[:space:]]*//')
for url in ${repo_urls}; do
remove_conflicting_sources "${url}" "${list_path}"
done
local processed_content=$(inject_signed_by_deb822 "${list_content}" "${keyring_path}")
echo "${processed_content}" | sudo tee "${list_path}" > /dev/null
log " Source list (deb822) written to ${list_path}"
else
# Traditional one-line format (.list file) - inject signed-by per line.
local list_path="/etc/apt/sources.list.d/${list_name}.list"
# Remove conflicting sources for each deb line's repo URL.
while IFS= read -r deb_line; do
if echo "${deb_line}" | grep -qE '^deb(-src)?[[:space:]]+'; then
local repo_url=$(extract_repo_url "${deb_line}")
remove_conflicting_sources "${repo_url}" "${list_path}"
fi
done <<< "${list_content}"
local processed_content=""
while IFS= read -r deb_line; do
if [ -n "${deb_line}" ]; then
processed_content="${processed_content}$(inject_signed_by "${deb_line}" "${keyring_path}")
"
fi
done <<< "${list_content}"
echo "${processed_content}" | sudo tee "${list_path}" > /dev/null
log " Source list written to ${list_path}"
fi
else
# Source spec is an inline deb line.
local list_name="${keyring_name}"
local list_path="/etc/apt/sources.list.d/${list_name}.list"
# Remove any existing source files that reference the same repo URL.
local repo_url=$(extract_repo_url "${source_spec}")
remove_conflicting_sources "${repo_url}" "${list_path}"
local processed_line=$(inject_signed_by "${source_spec}" "${keyring_path}")
echo "${processed_line}" | sudo tee "${list_path}" > /dev/null
log "- Inline source written to ${list_path}"
fi
done <<< "${apt_sources}"
log "done"
log_empty_line
}
function log { echo "${@}"; }
function log_err { >&2 echo "${@}"; }

View file

@ -28,13 +28,16 @@ test "${debug}" = "true" && set -x
# Repositories to add before installing packages.
add_repository="${6}"
# GPG-signed third-party repository sources.
apt_sources="${7}"
# List of the packages to use.
packages="${@:7}"
packages="${@:8}"
if test "${cache_hit}" = "true"; then
${script_dir}/restore_pkgs.sh "${cache_dir}" "${cache_restore_root}" "${execute_install_scripts}" "${debug}"
else
${script_dir}/install_and_cache_pkgs.sh "${cache_dir}" "${debug}" "${add_repository}" ${packages}
${script_dir}/install_and_cache_pkgs.sh "${cache_dir}" "${debug}" "${add_repository}" "${apt_sources}" ${packages}
fi
log_empty_line

View file

@ -27,8 +27,11 @@ debug="${4}"
# Repositories to add before installing packages.
add_repository="${5}"
# GPG-signed third-party repository sources.
apt_sources="${6}"
# List of the packages to use.
input_packages="${@:6}"
input_packages="${@:7}"
# Trim commas, excess spaces, and sort.
log "Normalizing package list..."
@ -80,6 +83,32 @@ if [ -n "${add_repository}" ]; then
log "done"
fi
# Validate apt-sources parameter
if [ -n "${apt_sources}" ]; then
log "Validating apt-sources parameter..."
while IFS= read -r line; do
# Skip empty lines.
if [ -z "$(echo "${line}" | tr -d '[:space:]')" ]; then
continue
fi
# Each line must contain a pipe separator.
if ! echo "${line}" | grep -q '|'; then
log "aborted"
log "apt-sources line missing '|' separator: ${line}" >&2
log "Expected format: key_url | source_spec" >&2
exit 7
fi
# Key URL must start with https://
key_url_check=$(echo "${line}" | cut -d'|' -f1 | sed 's/^[[:space:]]*//;s/[[:space:]]*$//')
if ! echo "${key_url_check}" | grep -qE '^https://'; then
log "aborted"
log "apt-sources key URL must start with https:// but got: ${key_url_check}" >&2
exit 7
fi
done <<< "${apt_sources}"
log "done"
fi
log "done"
log_empty_line
@ -105,12 +134,29 @@ if [ -n "${add_repository}" ]; then
log "- Repositories '${add_repository}' added to value."
fi
# Include apt-sources in cache key (normalize to single line for stable hashing)
if [ -n "${apt_sources}" ]; then
normalized_sources=$(echo "${apt_sources}" | sed '/^[[:space:]]*$/d' | sort | tr '\n' '|')
value="${value} apt-sources:${normalized_sources}"
log "- Apt sources added to value."
fi
# Don't invalidate existing caches for the standard Ubuntu runners
if [ "${cpu_arch}" != "x86_64" ]; then
value="${value} ${cpu_arch}"
log "- Architecture '${cpu_arch}' added to value."
fi
# Include a hash of pre-installed packages so runners with different base
# images (e.g., GPU runners with CUDA pre-installed vs plain Ubuntu) get
# different cache keys. This prevents a cache built on runner A (where some
# packages were already installed) from being restored on runner B (where
# those packages are missing).
base_pkgs_hash="$(dpkg-query -W -f='${binary:Package}\n' | sha1sum | cut -f1 -d' ')"
value="${value} base:${base_pkgs_hash}"
log "- Base packages hash '${base_pkgs_hash}' added to value."
echo "::notice::Runner base image fingerprint: ${base_pkgs_hash}. Runners with different pre-installed packages produce different fingerprints and cannot share caches."
log "- Value to hash is '${value}'."
key="$(echo "${value}" | md5sum | cut -f1 -d' ')"

View file

@ -50,7 +50,7 @@ for cached_filepath in ${cached_filepaths}; do
sudo tar -xf "${cached_filepath}" -C "${cache_restore_root}" > /dev/null
log " done"
# Execute install scripts if available.
# Execute install scripts if available.
if test ${execute_install_scripts} == "true"; then
# May have to add more handling for extracting pre-install script before extracting all files.
# Keeping it simple for now.
@ -59,3 +59,69 @@ for cached_filepath in ${cached_filepaths}; do
fi
done
log "done"
log_empty_line
# Register packages with dpkg so they appear as installed.
# The tar extraction restores dpkg info files (list, md5sums, etc.) but the
# main status database (/var/lib/dpkg/status) also needs updating.
dpkg_status_dir="${cache_dir}"
status_files=$(ls -1 "${dpkg_status_dir}"/*.dpkg-status 2>/dev/null || true)
if test -n "${status_files}"; then
log "Registering restored packages with dpkg..."
dpkg_status_path="${cache_restore_root}var/lib/dpkg/status"
for status_file in ${status_files}; do
pkg_name=$(grep '^Package:' "${status_file}" | head -1 | sed 's/^Package: //')
cached_ver=$(grep '^Version:' "${status_file}" | head -1 | sed 's/^Version: //')
cached_arch=$(grep '^Architecture:' "${status_file}" | head -1 | sed 's/^Architecture: //')
# Build architecture-qualified name for dpkg queries.
dpkg_query_name="${pkg_name}"
if [ -n "${cached_arch}" ] && [ "${cached_arch}" != "all" ]; then
dpkg_query_name="${pkg_name}:${cached_arch}"
fi
if dpkg -s "${dpkg_query_name}" > /dev/null 2>&1; then
existing_status=$(dpkg -s "${dpkg_query_name}" 2>/dev/null | grep '^Status:' | head -1)
existing_ver=$(dpkg -s "${dpkg_query_name}" 2>/dev/null | grep '^Version:' | head -1 | sed 's/^Version: //')
if echo "${existing_status}" | grep -q 'install ok installed'; then
if [ "${existing_ver}" = "${cached_ver}" ]; then
log "- ${dpkg_query_name} already at version ${cached_ver}, skipping."
continue
fi
# Package is installed at a different version (was upgraded during
# the original install). Remove the old dpkg status entry so we can
# replace it with the cached (upgraded) version.
log "- ${dpkg_query_name} updating from ${existing_ver} to ${cached_ver}..."
sudo python3 -c "
import sys
pkg, arch, path = sys.argv[1], sys.argv[2], sys.argv[3]
with open(path, 'r') as f:
content = f.read()
entries = content.split('\n\n')
kept = []
for entry in entries:
if not entry.strip():
continue
lines = entry.strip().split('\n')
match_pkg = any(l == 'Package: ' + pkg for l in lines)
match_arch = any(l == 'Architecture: ' + arch for l in lines)
if match_pkg and match_arch:
continue
kept.append(entry.strip())
with open(path, 'w') as f:
f.write('\n\n'.join(kept))
if kept:
f.write('\n\n')
" "${pkg_name}" "${cached_arch}" "${dpkg_status_path}"
fi
fi
# Append the status entry (with blank line separator) to the dpkg database.
echo "" | sudo tee -a "${dpkg_status_path}" > /dev/null
cat "${status_file}" | sudo tee -a "${dpkg_status_path}" > /dev/null
log "- ${dpkg_query_name} registered."
done
log "done"
fi