diff --git a/.github/workflows/action-tests.yml b/.github/workflows/action-tests.yml new file mode 100644 index 0000000..6e66a94 --- /dev/null +++ b/.github/workflows/action-tests.yml @@ -0,0 +1,522 @@ +name: Action Tests +on: + workflow_dispatch: + inputs: + debug: + description: "Run in debug mode." + type: boolean + required: false + default: false + push: + branches: [master, dev, staging] + pull_request: + types: [opened, synchronize] + +env: + DEBUG: ${{ github.event.inputs.debug || false }} + # Test for overrides in built in shell options (regression issue 98). + SHELLOPTS: errexit:pipefail + +jobs: + + # === Core Functionality === + + list_all_versions: + runs-on: ubuntu-latest + name: List all package versions (including deps). + steps: + - uses: actions/checkout@v4 + - name: Execute + id: execute + uses: ./ + with: + packages: xdot=1.3-1 + version: ${{ github.run_id }}-${{ github.run_attempt }}-list_all_versions + debug: ${{ env.DEBUG }} + - name: Verify + run: | + echo "cache-hit = ${{ steps.execute.outputs.cache-hit }}" + echo "package-version-list = ${{ steps.execute.outputs.package-version-list }}" + echo "all-package-version-list = ${{ steps.execute.outputs.all-package-version-list }}" + # Verify cache miss on first run. + test "${{ steps.execute.outputs.cache-hit }}" = "false" + # Verify the main package is in the all-packages list. + echo "${{ steps.execute.outputs.all-package-version-list }}" | grep -q "xdot=1.3-1" + shell: bash + + list_versions: + runs-on: ubuntu-latest + name: List package versions. + steps: + - uses: actions/checkout@v4 + - name: Execute + id: execute + uses: ./ + with: + packages: xdot rolldice + version: ${{ github.run_id }}-${{ github.run_attempt }}-list_versions + debug: ${{ env.DEBUG }} + - name: Verify + run: | + echo "cache-hit = ${{ steps.execute.outputs.cache-hit }}" + echo "package-version-list = ${{ steps.execute.outputs.package-version-list }}" + test "${{ steps.execute.outputs.cache-hit }}" = "false" + echo "${{ steps.execute.outputs.package-version-list }}" | grep -q "xdot=" + echo "${{ steps.execute.outputs.package-version-list }}" | grep -q "rolldice=" + shell: bash + + standard_workflow_install: + runs-on: ubuntu-latest + name: Standard workflow install package and cache. + steps: + - uses: actions/checkout@v4 + - name: Execute + id: execute + uses: ./ + with: + packages: xdot rolldice + version: ${{ github.run_id }}-${{ github.run_attempt }}-standard_workflow + debug: ${{ env.DEBUG }} + - name: Verify + if: steps.execute.outputs.cache-hit != 'false' + run: | + echo "cache-hit = ${{ steps.execute.outputs.cache-hit }}" + exit 1 + shell: bash + + standard_workflow_install_with_new_version: + needs: standard_workflow_install + runs-on: ubuntu-latest + name: Standard workflow packages with new version. + steps: + - uses: actions/checkout@v4 + - name: Execute + id: execute + uses: ./ + with: + packages: xdot rolldice + version: ${{ github.run_id }}-${{ github.run_attempt }}-standard_workflow_install_with_new_version + debug: ${{ env.DEBUG }} + - name: Verify + if: steps.execute.outputs.cache-hit != 'false' + run: | + echo "cache-hit = ${{ steps.execute.outputs.cache-hit }}" + exit 1 + shell: bash + + standard_workflow_restore: + needs: standard_workflow_install + runs-on: ubuntu-latest + name: Standard workflow restore cached packages. + steps: + - uses: actions/checkout@v4 + - name: Execute + id: execute + uses: ./ + with: + packages: xdot rolldice + version: ${{ github.run_id }}-${{ github.run_attempt }}-standard_workflow + debug: ${{ env.DEBUG }} + - name: Verify + if: steps.execute.outputs.cache-hit != 'true' + run: | + echo "cache-hit = ${{ steps.execute.outputs.cache-hit }}" + exit 1 + shell: bash + + standard_workflow_restore_with_packages_out_of_order: + needs: standard_workflow_install + runs-on: ubuntu-latest + name: Standard workflow restore with packages out of order. + steps: + - uses: actions/checkout@v4 + - name: Execute + id: execute + uses: ./ + with: + packages: rolldice xdot + version: ${{ github.run_id }}-${{ github.run_attempt }}-standard_workflow + debug: ${{ env.DEBUG }} + - name: Verify + if: steps.execute.outputs.cache-hit != 'true' + run: | + echo "cache-hit = ${{ steps.execute.outputs.cache-hit }}" + exit 1 + shell: bash + + standard_workflow_add_package: + needs: standard_workflow_install + runs-on: ubuntu-latest + name: Standard workflow add another package. + steps: + - uses: actions/checkout@v4 + - name: Execute + id: execute + uses: ./ + with: + packages: xdot rolldice distro-info-data + version: ${{ github.run_id }}-${{ github.run_attempt }}-standard_workflow + debug: ${{ env.DEBUG }} + - name: Verify + if: steps.execute.outputs.cache-hit != 'false' + run: | + echo "cache-hit = ${{ steps.execute.outputs.cache-hit }}" + exit 1 + shell: bash + + standard_workflow_restore_add_package: + needs: standard_workflow_add_package + runs-on: ubuntu-latest + name: Standard workflow restore added package. + steps: + - uses: actions/checkout@v4 + - name: Execute + id: execute + uses: ./ + with: + packages: xdot rolldice distro-info-data + version: ${{ github.run_id }}-${{ github.run_attempt }}-standard_workflow + debug: ${{ env.DEBUG }} + - name: Verify + if: steps.execute.outputs.cache-hit != 'true' + run: | + echo "cache-hit = ${{ steps.execute.outputs.cache-hit }}" + exit 1 + shell: bash + + # === Error Handling === + + no_packages: + runs-on: ubuntu-latest + name: No packages passed. + steps: + - uses: actions/checkout@v4 + - name: Execute + id: execute + uses: ./ + with: + packages: "" + continue-on-error: true + - name: Verify + if: steps.execute.outcome != 'failure' + run: | + echo "Expected failure but got: ${{ steps.execute.outcome }}" + exit 1 + shell: bash + + package_not_found: + runs-on: ubuntu-latest + name: Package not found. + steps: + - uses: actions/checkout@v4 + - name: Execute + id: execute + uses: ./ + with: + packages: package_that_doesnt_exist + continue-on-error: true + - name: Verify + if: steps.execute.outcome != 'failure' + run: | + echo "Expected failure but got: ${{ steps.execute.outcome }}" + exit 1 + shell: bash + + version_contains_spaces: + runs-on: ubuntu-latest + name: Version contains spaces. + steps: + - uses: actions/checkout@v4 + - name: Execute + id: execute + uses: ./ + with: + packages: xdot + version: 123 abc + debug: ${{ env.DEBUG }} + continue-on-error: true + - name: Verify + if: steps.execute.outcome != 'failure' + run: | + echo "Expected failure but got: ${{ steps.execute.outcome }}" + exit 1 + shell: bash + + debug_disabled: + runs-on: ubuntu-latest + name: Debug disabled. + steps: + - uses: actions/checkout@v4 + - uses: ./ + with: + packages: xdot + version: ${{ github.run_id }}-${{ github.run_attempt }}-debug_disabled + debug: false + + # === Regression Tests === + + regression_36: + runs-on: ubuntu-latest + name: "Reinstall existing package (regression issue #36)." + steps: + - uses: actions/checkout@v4 + - uses: ./ + with: + packages: libgtk-3-dev + version: ${{ github.run_id }}-${{ github.run_attempt }}-regression_36 + debug: ${{ env.DEBUG }} + + regression_37: + runs-on: ubuntu-latest + name: "Install with reported package deps not installed (regression issue #37)." + steps: + - uses: actions/checkout@v4 + - uses: ./ + with: + packages: libosmesa6-dev libgl1-mesa-dev python3-tk pandoc git-restore-mtime + version: ${{ github.run_id }}-${{ github.run_attempt }}-regression_37 + debug: ${{ env.DEBUG }} + + regression_72_1: + runs-on: ubuntu-latest + name: "Cache Java CA certs package v1 (regression issue #72)." + steps: + - uses: actions/checkout@v4 + - uses: ./ + with: + packages: openjdk-11-jre + version: ${{ github.run_id }}-${{ github.run_attempt }}-regression_72 + debug: ${{ env.DEBUG }} + + regression_72_2: + runs-on: ubuntu-latest + name: "Cache Java CA certs package v2 (regression issue #72)." + steps: + - uses: actions/checkout@v4 + - uses: ./ + with: + packages: default-jre + version: ${{ github.run_id }}-${{ github.run_attempt }}-regression_72 + debug: ${{ env.DEBUG }} + + regression_76: + runs-on: ubuntu-latest + name: "Cache empty archive (regression issue #76)." + steps: + - uses: actions/checkout@v4 + - run: | + sudo wget -O- https://apt.repos.intel.com/intel-gpg-keys/GPG-PUB-KEY-INTEL-SW-PRODUCTS.PUB | gpg --dearmor | sudo tee /usr/share/keyrings/oneapi-archive-keyring.gpg > /dev/null; + echo "deb [signed-by=/usr/share/keyrings/oneapi-archive-keyring.gpg] https://apt.repos.intel.com/oneapi all main" | sudo tee /etc/apt/sources.list.d/oneAPI.list; + sudo apt-get -qq update; + sudo apt-get install -y intel-oneapi-runtime-libs intel-oneapi-runtime-opencl; + sudo apt-get install -y opencl-headers ocl-icd-opencl-dev; + sudo apt-get install -y libsundials-dev; + - uses: ./ + with: + packages: intel-oneapi-runtime-libs + version: ${{ github.run_id }}-${{ github.run_attempt }}-regression_76 + debug: ${{ env.DEBUG }} + + regression_79: + runs-on: ubuntu-latest + name: "Tar error with libboost-dev (regression issue #79)." + steps: + - uses: actions/checkout@v4 + - uses: ./ + with: + packages: libboost-dev + version: ${{ github.run_id }}-${{ github.run_attempt }}-regression_79 + debug: ${{ env.DEBUG }} + + regression_81: + runs-on: ubuntu-latest + name: "Tar error with alsa-ucm-conf (regression issue #81)." + steps: + - uses: actions/checkout@v4 + - uses: ./ + with: + packages: libasound2 libatk-bridge2.0-0 libatk1.0-0 libatspi2.0-0 libcups2 libdrm2 libgbm1 libnspr4 libnss3 libxcomposite1 libxdamage1 libxfixes3 libxkbcommon0 libxrandr2 + version: ${{ github.run_id }}-${{ github.run_attempt }}-regression_81 + debug: ${{ env.DEBUG }} + + regression_84_literal_block_install: + runs-on: ubuntu-latest + name: "Install multiline package listing - literal block (regression issue #84)." + steps: + - uses: actions/checkout@v4 + - uses: ./ + with: + packages: > + xdot + rolldice distro-info-data + version: ${{ github.run_id }}-${{ github.run_attempt }}-regression_84_literal_block + debug: ${{ env.DEBUG }} + + regression_84_literal_block_restore: + needs: regression_84_literal_block_install + runs-on: ubuntu-latest + name: "Restore multiline package listing - literal block (regression issue #84)." + steps: + - uses: actions/checkout@v4 + - name: Execute + id: execute + uses: ./ + with: + packages: xdot rolldice distro-info-data + version: ${{ github.run_id }}-${{ github.run_attempt }}-regression_84_literal_block + debug: ${{ env.DEBUG }} + - name: Verify + if: steps.execute.outputs.cache-hit != 'true' + run: | + echo "cache-hit = ${{ steps.execute.outputs.cache-hit }}" + exit 1 + shell: bash + + regression_84_folded_block_install: + runs-on: ubuntu-latest + name: "Install multiline package listing - folded block (regression issue #84)." + steps: + - uses: actions/checkout@v4 + - uses: ./ + with: + packages: | + xdot \ + rolldice distro-info-data + version: ${{ github.run_id }}-${{ github.run_attempt }}-regression_84_folded_block + debug: ${{ env.DEBUG }} + + regression_84_folded_block_restore: + needs: regression_84_folded_block_install + runs-on: ubuntu-latest + name: "Restore multiline package listing - folded block (regression issue #84)." + steps: + - uses: actions/checkout@v4 + - name: Execute + id: execute + uses: ./ + with: + packages: xdot rolldice distro-info-data + version: ${{ github.run_id }}-${{ github.run_attempt }}-regression_84_folded_block + debug: ${{ env.DEBUG }} + - name: Verify + if: steps.execute.outputs.cache-hit != 'true' + run: | + echo "cache-hit = ${{ steps.execute.outputs.cache-hit }}" + exit 1 + shell: bash + + regression_89: + runs-on: ubuntu-latest + name: "Upload logs artifact name (regression issue #89)." + steps: + - uses: actions/checkout@v4 + - uses: ./ + with: + packages: libgtk-3-dev:amd64 + version: ${{ github.run_id }}-${{ github.run_attempt }}-regression_89 + debug: ${{ env.DEBUG }} + + regression_98: + runs-on: ubuntu-latest + name: "Install error due to SHELLOPTS override (regression issue #98)." + steps: + - uses: actions/checkout@v4 + - uses: ./ + with: + packages: git-restore-mtime libgl1-mesa-dev libosmesa6-dev pandoc + version: ${{ github.run_id }}-${{ github.run_attempt }}-regression_98 + debug: ${{ env.DEBUG }} + + regression_106_install: + runs-on: ubuntu-latest + name: "Stale apt repo - install phase (regression issue #106)." + steps: + - uses: actions/checkout@v4 + - uses: ./ + with: + packages: libtk8.6 + version: ${{ github.run_id }}-${{ github.run_attempt }}-regression_106 + debug: ${{ env.DEBUG }} + + regression_106_restore: + needs: regression_106_install + runs-on: ubuntu-latest + name: "Stale apt repo - restore phase (regression issue #106)." + steps: + - uses: actions/checkout@v4 + - uses: ./ + with: + packages: libtk8.6 + version: ${{ github.run_id }}-${{ github.run_attempt }}-regression_106 + debug: ${{ env.DEBUG }} + + # === Special Cases === + + multi_arch_cache_key: + runs-on: ubuntu-latest + name: "Cache packages with multi-arch cache key." + steps: + - uses: actions/checkout@v4 + - uses: ./ + with: + packages: libfuse2 + version: ${{ github.run_id }}-${{ github.run_attempt }}-multi_arch_cache_key + debug: ${{ env.DEBUG }} + + virtual_package: + runs-on: ubuntu-latest + name: "Cache virtual package." + steps: + - uses: actions/checkout@v4 + - uses: ./ + with: + packages: libvips + version: ${{ github.run_id }}-${{ github.run_attempt }}-virtual_package + debug: ${{ env.DEBUG }} + + # === dpkg Registration Tests === + + dpkg_status_install: + runs-on: ubuntu-latest + name: "dpkg knows about packages after install (phase 1)." + steps: + - uses: actions/checkout@v4 + - name: Execute + id: execute + uses: ./ + with: + packages: rolldice + version: ${{ github.run_id }}-${{ github.run_attempt }}-dpkg_status + debug: ${{ env.DEBUG }} + - name: Verify dpkg knows the package + run: | + dpkg -s rolldice | grep -q 'Status: install ok installed' + echo "dpkg reports rolldice as installed after fresh install." + shell: bash + + dpkg_status_restore: + needs: dpkg_status_install + runs-on: ubuntu-latest + name: "dpkg knows about packages after cache restore (phase 2)." + steps: + - uses: actions/checkout@v4 + - name: Execute + id: execute + uses: ./ + with: + packages: rolldice + version: ${{ github.run_id }}-${{ github.run_attempt }}-dpkg_status + debug: ${{ env.DEBUG }} + - name: Verify cache hit + run: test "${{ steps.execute.outputs.cache-hit }}" = "true" + shell: bash + - name: Verify dpkg knows the package after cache restore + run: | + dpkg -s rolldice | grep -q 'Status: install ok installed' + echo "dpkg reports rolldice as installed after cache restore." + shell: bash + - name: Verify the binary works + run: | + rolldice 2d6 + echo "rolldice binary works after cache restore." + shell: bash diff --git a/install_and_cache_pkgs.sh b/install_and_cache_pkgs.sh index 1a544ad..9c7742a 100755 --- a/install_and_cache_pkgs.sh +++ b/install_and_cache_pkgs.sh @@ -76,21 +76,19 @@ install_log_filepath="${cache_dir}/install.log" log "Clean installing ${package_count} packages..." # Zero interaction while installing or upgrading the system via apt. -sudo DEBIAN_FRONTEND=noninteractive apt-fast --yes install ${packages} > "${install_log_filepath}" -log "done" -log "Installation log written to ${install_log_filepath}" +# Explicitly check exit status since set +e (from lib.sh) is active. +sudo DEBIAN_FRONTEND=noninteractive apt-fast --yes install ${packages} 2>&1 | tee "${install_log_filepath}" +install_rc=${PIPESTATUS[0]} + +if [ "${install_rc}" -ne 0 ]; then + log_err "Failed to install packages. apt-fast exited with an error (see messages above)." + exit 5 +fi +log "Install completed successfully." log_empty_line installed_packages=$(get_installed_packages "${install_log_filepath}") -log "Installed package list:" -for installed_package in ${installed_packages}; do - # Reformat for human friendly reading. - log "- $(echo ${installed_package} | awk -F\= '{print $1" ("$2")"}')" -done - -log_empty_line - installed_packages_count=$(wc -w <<< "${installed_packages}") log "Caching ${installed_packages_count} installed packages..." for installed_package in ${installed_packages}; do @@ -101,24 +99,35 @@ for installed_package in ${installed_packages}; do read package_name package_ver < <(get_package_name_ver "${installed_package}") log " * Caching ${package_name} to ${cache_filepath}..." - # Pipe all package files (no folders), including symlinks, their targets, and installation control data to Tar. - tar -cf "${cache_filepath}" -C / --verbatim-files-from --files-from <( - { dpkg -L "${package_name}" && - get_install_script_filepath "" "${package_name}" "preinst" && - get_install_script_filepath "" "${package_name}" "postinst" ; } | + # Pipe all package files, directories, and symlinks (plus symlink targets + # and dpkg metadata) to Tar. Directories are included so that tar + # preserves their ownership and permissions on restore — without them, + # tar auto-creates parent directories using the current umask, which on + # some runners (e.g. GPU-optimized images) defaults to 0077, leaving + # restored trees inaccessible to non-root users. + tar -cf "${cache_filepath}" -C / --no-recursion --verbatim-files-from --files-from <( + { dpkg -L "${package_name}" | grep -vxF -e '/.' -e '.' -e '/' && + # Include all dpkg info files for this package (list, md5sums, + # conffiles, triggers, preinst, postinst, prerm, postrm, etc.) + # so dpkg recognizes the package after cache restore. + ls -1 /var/lib/dpkg/info/${package_name}.* 2>/dev/null && + ls -1 /var/lib/dpkg/info/${package_name}:*.* 2>/dev/null ; } | while IFS= read -r f; do - if test -f "${f}" -o -L "${f}"; then - get_tar_relpath "${f}" + if [ -f "${f}" ] || [ -L "${f}" ] || [ -d "${f}" ]; then + echo "${f#/}" if [ -L "${f}" ]; then target="$(readlink -f "${f}")" if [ -f "${target}" ]; then - get_tar_relpath "${target}" + echo "${target#/}" fi fi fi done ) + # Save the dpkg status entry so we can register the package on restore. + dpkg -s "${package_name}" > "${cache_dir}/${installed_package}.dpkg-status" 2>/dev/null || true + log " done (compressed size $(du -h "${cache_filepath}" | cut -f1))." fi diff --git a/lib.sh b/lib.sh index 755d939..b0cb54a 100755 --- a/lib.sh +++ b/lib.sh @@ -54,14 +54,14 @@ function get_install_script_filepath { # The list of colon delimited action syntax pairs with each pair equals # delimited. : :... ############################################################################### -function get_installed_packages { +function get_installed_packages { local install_log_filepath="${1}" - local regex="^Unpacking ([^ :]+)([^ ]+)? (\[[^ ]+\]\s)?\(([^ )]+)" - local dep_packages="" + local regex="^Unpacking ([^ :]+)([^ ]+)? (\[[^ ]+\]\s)?\(([^ )]+)" + local dep_packages="" while read -r line; do # ${regex} should be unquoted since it isn't a literal. if [[ "${line}" =~ ${regex} ]]; then - dep_packages="${dep_packages}${BASH_REMATCH[1]}=${BASH_REMATCH[4]} " + dep_packages="${dep_packages}${BASH_REMATCH[1]}${BASH_REMATCH[2]}=${BASH_REMATCH[4]} " else log_err "Unable to parse package name and version from \"${line}\"" exit 2 diff --git a/pre_cache_action.sh b/pre_cache_action.sh index 5cb64cb..7c06450 100755 --- a/pre_cache_action.sh +++ b/pre_cache_action.sh @@ -111,6 +111,16 @@ if [ "${cpu_arch}" != "x86_64" ]; then log "- Architecture '${cpu_arch}' added to value." fi +# Include a hash of pre-installed packages so runners with different base +# images (e.g., GPU runners with CUDA pre-installed vs plain Ubuntu) get +# different cache keys. This prevents a cache built on runner A (where some +# packages were already installed) from being restored on runner B (where +# those packages are missing). +base_pkgs_hash="$(dpkg-query -W -f='${binary:Package}\n' | sha1sum | cut -f1 -d' ')" +value="${value} base:${base_pkgs_hash}" +log "- Base packages hash '${base_pkgs_hash}' added to value." +echo "::notice::Runner base image fingerprint: ${base_pkgs_hash}. Runners with different pre-installed packages produce different fingerprints and cannot share caches." + log "- Value to hash is '${value}'." key="$(echo "${value}" | md5sum | cut -f1 -d' ')" diff --git a/restore_pkgs.sh b/restore_pkgs.sh index 4556265..b336bc7 100755 --- a/restore_pkgs.sh +++ b/restore_pkgs.sh @@ -50,7 +50,7 @@ for cached_filepath in ${cached_filepaths}; do sudo tar -xf "${cached_filepath}" -C "${cache_restore_root}" > /dev/null log " done" - # Execute install scripts if available. + # Execute install scripts if available. if test ${execute_install_scripts} == "true"; then # May have to add more handling for extracting pre-install script before extracting all files. # Keeping it simple for now. @@ -59,3 +59,69 @@ for cached_filepath in ${cached_filepaths}; do fi done log "done" + +log_empty_line + +# Register packages with dpkg so they appear as installed. +# The tar extraction restores dpkg info files (list, md5sums, etc.) but the +# main status database (/var/lib/dpkg/status) also needs updating. +dpkg_status_dir="${cache_dir}" +status_files=$(ls -1 "${dpkg_status_dir}"/*.dpkg-status 2>/dev/null || true) +if test -n "${status_files}"; then + log "Registering restored packages with dpkg..." + dpkg_status_path="${cache_restore_root}var/lib/dpkg/status" + for status_file in ${status_files}; do + pkg_name=$(grep '^Package:' "${status_file}" | head -1 | sed 's/^Package: //') + cached_ver=$(grep '^Version:' "${status_file}" | head -1 | sed 's/^Version: //') + cached_arch=$(grep '^Architecture:' "${status_file}" | head -1 | sed 's/^Architecture: //') + + # Build architecture-qualified name for dpkg queries. + dpkg_query_name="${pkg_name}" + if [ -n "${cached_arch}" ] && [ "${cached_arch}" != "all" ]; then + dpkg_query_name="${pkg_name}:${cached_arch}" + fi + + if dpkg -s "${dpkg_query_name}" > /dev/null 2>&1; then + existing_status=$(dpkg -s "${dpkg_query_name}" 2>/dev/null | grep '^Status:' | head -1) + existing_ver=$(dpkg -s "${dpkg_query_name}" 2>/dev/null | grep '^Version:' | head -1 | sed 's/^Version: //') + + if echo "${existing_status}" | grep -q 'install ok installed'; then + if [ "${existing_ver}" = "${cached_ver}" ]; then + log "- ${dpkg_query_name} already at version ${cached_ver}, skipping." + continue + fi + # Package is installed at a different version (was upgraded during + # the original install). Remove the old dpkg status entry so we can + # replace it with the cached (upgraded) version. + log "- ${dpkg_query_name} updating from ${existing_ver} to ${cached_ver}..." + sudo python3 -c " +import sys +pkg, arch, path = sys.argv[1], sys.argv[2], sys.argv[3] +with open(path, 'r') as f: + content = f.read() +entries = content.split('\n\n') +kept = [] +for entry in entries: + if not entry.strip(): + continue + lines = entry.strip().split('\n') + match_pkg = any(l == 'Package: ' + pkg for l in lines) + match_arch = any(l == 'Architecture: ' + arch for l in lines) + if match_pkg and match_arch: + continue + kept.append(entry.strip()) +with open(path, 'w') as f: + f.write('\n\n'.join(kept)) + if kept: + f.write('\n\n') +" "${pkg_name}" "${cached_arch}" "${dpkg_status_path}" + fi + fi + + # Append the status entry (with blank line separator) to the dpkg database. + echo "" | sudo tee -a "${dpkg_status_path}" > /dev/null + cat "${status_file}" | sudo tee -a "${dpkg_status_path}" > /dev/null + log "- ${dpkg_query_name} registered." + done + log "done" +fi