Latest draft, still no E2E testing yet.

This commit is contained in:
awalsh128 2025-10-04 23:38:32 -07:00
parent c143000184
commit 6e82becbf7
48 changed files with 651 additions and 1278 deletions

3
.actrc
View file

@ -1,3 +0,0 @@
-P ubuntu-latest=catthehacker/ubuntu:act-latest
--env-file=.env.local
-s GITHUB_TOKEN

2
.github/ISSUE_TEMPLATE/.remarkrc.yaml vendored Normal file
View file

@ -0,0 +1,2 @@
plugins:
remark-lint-heading-style: [true, 'atx']

View file

@ -26,7 +26,7 @@ steps:
### Package List
```txt
```text
# List the packages you're trying to cache
# Example: curl wget git
```
@ -45,7 +45,7 @@ steps:
## Logs and Error Messages
```txt
```text
# Paste relevant logs, error messages, or debug output here
# Enable debug mode by adding: debug: true to your workflow step
```
@ -66,6 +66,6 @@ steps:
## Checklist
- [ ] I have read the [non-file dependencies limitation](https://github.com/awalsh128/cache-apt-pkgs-action/blob/master/README.md#non-file-dependencies)
- [ ] I have read the [non-file dependencies limitation](https://github.com/awalsh128/cache-apt-pkgs-action/blob/main/README.md#non-file-dependencies)
- [ ] I have searched existing issues for duplicates
- [ ] I have provided all requested information above

View file

@ -44,7 +44,7 @@ jobs:
with:
packages: xdot=1.3-1
version: ${{ github.run_id }}-${{ github.run_attempt }}-list_all_versions
debug: "false"
debug: false
- name: Verify
if: |
steps.execute.outputs.cache-hit != 'false' ||
@ -69,7 +69,7 @@ jobs:
with:
packages: xdot rolldice
version: ${{ github.run_id }}-${{ github.run_attempt }}-list_versions
debug: "false"
debug: false
- name: Verify
if: steps.execute.outputs.cache-hit != 'false' || steps.execute.outputs.package-version-list != 'rolldice=1.16-1build3,xdot=1.3-1'
run: |
@ -91,7 +91,7 @@ jobs:
with:
packages: xdot rolldice
version: ${{ github.run_id }}-${{ github.run_attempt }}-standard_workflow
debug: "false"
debug: false
- name: Verify
if: steps.execute.outputs.cache-hit != 'false'
run: |
@ -111,7 +111,7 @@ jobs:
with:
packages: xdot rolldice
version: ${{ github.run_id }}-${{ github.run_attempt }}-standard_workflow_install_with_new_version
debug: "false"
debug: false
- name: Verify
if: steps.execute.outputs.cache-hit != 'false'
run: |

3
.gitignore vendored
View file

@ -5,4 +5,7 @@
# Don't ignore the main .env file
!.env
# Local Go binaries from build command
cache_apt_pkgs*
scripts/sandbox.sh

View file

@ -1,10 +1,8 @@
version: "2"
formatters:
enable:
- gofumpt # formats Go code
- goimports # formats imports and does everything that gofmt does
linters:
enable:
- asasalint # checks for pass []any as any in variadic func(...any)

View file

@ -1,54 +1,41 @@
# Enable all rules by default
default: true
# Markdown linting configuration with all rules enabled
extends: markdownlint/style/prettier
# MD003 heading-style - Header style
MD003:
style: atx # Use # style headers
# MD004 ul-style - Unordered list style
MD004:
style: consistent # Be consistent with the first list style used
# MD012 no-multiple-blanks - No multiple consecutive blank lines
MD012:
maximum: 1
# MD013 line-length - Line length
MD013:
line_length: 100
code_blocks: false
tables: false
# MD024 no-duplicate-header - No duplicate headers
MD024:
siblings_only: true # Allow duplicates if they're not siblings
# MD026 no-trailing-punctuation - No trailing punctuation in header
MD026:
punctuation: .,;:!。,;:!
# MD029 ol-prefix - Ordered list item prefix
MD029:
style: one_or_ordered
# MD033 no-inline-html - No inline HTML
MD033:
allowed_elements: []
# MD034 no-bare-urls - No bare URLs
MD034: true
# MD035 hr-style - Horizontal rule style
MD035:
style: ---
# MD041 first-line-heading - First line should be a top-level header
MD041:
level: 1
# MD046 code-block-style - Code block style
MD046:
style: fenced

View file

@ -2,3 +2,7 @@ plugins:
remark-preset-lint-consistent: true
remark-preset-lint-recommended: true
remark-lint-list-item-indent: true
# Allow ATX-style headings (using #). Previously Trunk/remark was expecting
# setext-style for certain heading levels which caused the "Unexpected ATX
# heading, expected setext" errors. Setting this rule to 'atx' relaxes that.
remark-lint-heading-style: [true, 'atx']

View file

@ -3,3 +3,9 @@ markdoc = md
[*.md]
BasedOnStyles = Vale
[*]
vocab = Project
# Disable spelling checks for technical terms
Vale.Spelling = NO

View file

@ -0,0 +1,6 @@
Goroutine
goroutines
Mutex
mutexes
heredoc
Profiler

View file

@ -1,4 +0,0 @@
version: "0.2"
# Suggestions can sometimes take longer on CI machines,
# leading to inconsistent results.
suggestionsTimeout: 5000 # ms

View file

@ -19,6 +19,7 @@ runtimes:
lint:
disabled:
- cspell
- codespell
enabled:
- yamlfmt@0.17.2
- vale@3.12.0
@ -39,7 +40,6 @@ lint:
- gitleaks@8.28.0
- deno@2.5.0
- biome@2.2.4
- codespell@2.4.1
- kube-linter@0.7.2
- golines@0.13.0
- semgrep@1.136.0
@ -59,6 +59,9 @@ lint:
- trivy@0.66.0
- trufflehog@3.90.6
- yamllint@1.37.1
ignore:
- linters: [markdownlint]
paths: [".github/ISSUE_TEMPLATE/**"]
actions:
enabled:
- trunk-announce

View file

@ -1,6 +1,6 @@
{
"goVersion": "1.24",
"toolchainVersion": "",
"syspkgVersion": "v0.1.5",
"exportDate": "2025-09-07 11:53:25"
"goVersion": "1.24",
"toolchainVersion": "",
"syspkgVersion": "v0.1.5",
"exportDate": "2025-09-07 11:53:25"
}

View file

@ -2,7 +2,9 @@
"editor.tabSize": 2,
"editor.insertSpaces": true,
"editor.detectIndentation": false,
"editor.rulers": [100],
"editor.rulers": [
100
],
"editor.formatOnSave": true,
"editor.formatOnPaste": true,
"editor.formatOnType": true,
@ -38,4 +40,4 @@
"files.readonlyInclude": {},
"workbench.editor.defaultBinaryEditor": "default",
"workbench.editor.enablePreviewFromCodeNavigation": false
}
}

343
CLAUDE.md
View file

@ -3,63 +3,64 @@
<!-- START doctoc generated TOC please keep comment here to allow auto update -->
<!-- DON'T EDIT THIS SECTION, INSTEAD RE-RUN doctoc TO UPDATE -->
- [Code Improvements by Claude](#code-improvements-by-claude)
- [General Code Organization Principles](#general-code-organization-principles)
- [1. Package Structure](#1-package-structure)
- [2. Code Style and Formatting](#2-code-style-and-formatting)
- [3. Error Handling](#3-error-handling)
- [4. API Design](#4-api-design)
- [5. Documentation Practices](#5-documentation-practices)
- [Go Code Documentation Standards](#go-code-documentation-standards)
- [Code Documentation](#code-documentation)
- [Project Documentation](#project-documentation)
- [6. Testing Strategy](#6-testing-strategy)
- [Types of Tests](#types-of-tests)
- [Test Coverage Strategy](#test-coverage-strategy)
- [7. Security Best Practices](#7-security-best-practices)
- [Input Validation](#input-validation)
- [Secure Coding](#secure-coding)
- [Secrets Management](#secrets-management)
- [8. Performance Considerations](#8-performance-considerations)
- [9. Profiling and Benchmarking](#9-profiling-and-benchmarking)
- [CPU Profiling](#cpu-profiling)
- [Memory Profiling](#memory-profiling)
- [Benchmarking](#benchmarking)
- [Trace Profiling](#trace-profiling)
- [Common Profiling Tasks](#common-profiling-tasks)
- [pprof Web Interface](#pprof-web-interface)
- [Key Metrics to Watch](#key-metrics-to-watch)
- [10. Concurrency Patterns](#10-concurrency-patterns)
- [11. Configuration Management](#11-configuration-management)
- [12. Logging and Observability](#12-logging-and-observability)
- [Non-Go Files](#non-go-files)
- [GitHub Actions](#github-actions)
- [Action File Formatting](#action-file-formatting)
- [Release Management](#release-management)
- [Create a README File](#create-a-readme-file)
- [Testing and Automation](#testing-and-automation)
- [Community Engagement](#community-engagement)
- [Further Guidance](#further-guidance)
- [Bash Scripts](#bash-scripts)
- [Script Testing](#script-testing)
- [Test Framework Architecture Pattern](#test-framework-architecture-pattern)
- [Script Argument Parsing Pattern](#script-argument-parsing-pattern)
- [Centralized Configuration Management](#centralized-configuration-management)
- [Implementation Status](#implementation-status)
- [Testing Principles](#testing-principles)
- [1. Test Organization Strategy](#1-test-organization-strategy)
- [2. Code Structure](#2-code-structure)
- [Constants and Variables](#constants-and-variables)
- [Helper Functions](#helper-functions)
- [3. Test Case Patterns](#3-test-case-patterns)
- [Table-Driven Tests (for simple cases)](#table-driven-tests-for-simple-cases)
- [Individual Tests (for complex cases)](#individual-tests-for-complex-cases)
- [4. Best Practices Applied](#4-best-practices-applied)
- [5. Examples of Improvements](#5-examples-of-improvements)
- [Before](#before)
- [After](#after)
- [Key Benefits](#key-benefits)
- [Conclusion](#conclusion)
**Table of Contents**
- [General Code Organization Principles](#general-code-organization-principles)
- [1. Package Structure](#1-package-structure)
- [2. Code Style and Formatting](#2-code-style-and-formatting)
- [3. Error Handling](#3-error-handling)
- [4. API Design](#4-api-design)
- [5. Documentation Practices](#5-documentation-practices)
- [Go Code Documentation Standards](#go-code-documentation-standards)
- [Code Documentation](#code-documentation)
- [Project Documentation](#project-documentation)
- [6. Testing Strategy](#6-testing-strategy)
- [Types of Tests](#types-of-tests)
- [Test Coverage Strategy](#test-coverage-strategy)
- [7. Security Best Practices](#7-security-best-practices)
- [Input Validation](#input-validation)
- [Secure Coding](#secure-coding)
- [Secrets Management](#secrets-management)
- [8. Performance Considerations](#8-performance-considerations)
- [9. Profiling and Benchmarking](#9-profiling-and-benchmarking)
- [CPU Profiling](#cpu-profiling)
- [Memory Profiling](#memory-profiling)
- [Benchmarking](#benchmarking)
- [Trace Profiling](#trace-profiling)
- [Common Profiling Tasks](#common-profiling-tasks)
- [`pprof` Web Interface](#pprof-web-interface)
- [Key Metrics to Watch](#key-metrics-to-watch)
- [10. Concurrency Patterns](#10-concurrency-patterns)
- [11. Configuration Management](#11-configuration-management)
- [12. Logging and Observability](#12-logging-and-observability)
- [Non-Go Files](#non-go-files)
- [GitHub Actions](#github-actions)
- [Action File Formatting](#action-file-formatting)
- [YAML Formatting](#yaml-formatting)
- [Quoting Guidelines](#quoting-guidelines)
- [Formatting Standards](#formatting-standards)
- [Bash Scripts](#bash-scripts)
- [Script Testing](#script-testing)
- [YAML Files](#yaml-files)
- [Quoting Guidelines](#quoting-guidelines-1)
- [Examples](#examples)
- [Formatting Guidelines](#formatting-guidelines)
- [Multi-line Strings](#multi-line-strings)
- [GitHub Actions Specific](#github-actions-specific)
- [Testing Principles](#testing-principles)
- [1. Test Organization Strategy](#1-test-organization-strategy)
- [2. Code Structure](#2-code-structure)
- [Constants and Variables](#constants-and-variables)
- [Helper Functions](#helper-functions)
- [3. Test Case Patterns](#3-test-case-patterns)
- [Table-Driven Tests (for simple cases)](#table-driven-tests-for-simple-cases)
- [Individual Tests (for complex cases)](#individual-tests-for-complex-cases)
- [4. Best Practices Applied](#4-best-practices-applied)
- [5. Examples of Improvements](#5-examples-of-improvements)
- [Before](#before)
- [After](#after)
- [Key Benefits](#key-benefits)
- [Conclusion](#conclusion)
<!-- END doctoc generated TOC please keep comment here to allow auto update -->
@ -250,7 +251,7 @@ package cache
- Validate all external input
- Use strong types over strings
- Implement proper sanitization
- Implement proper input validation and cleaning
- Assert array bounds
- Validate file paths
@ -266,7 +267,7 @@ package cache
- Never commit secrets
- Use environment variables
- Implement secure config loading
- Implement secure configuration loading
- Rotate credentials regularly
- Log access to sensitive operations
@ -379,7 +380,7 @@ go tool trace trace.out
go tool pprof -alloc_objects mem.prof
```
3. **Goroutine Blocking**
3. **Goroutine Block Profiling**
```bash
# Track goroutine blocks
@ -395,7 +396,7 @@ go tool trace trace.out
go tool pprof mutex.prof
```
#### pprof Web Interface
#### `pprof` Web Interface
For visual analysis:
@ -435,7 +436,7 @@ go tool pprof -http=:8080 cpu.prof
- Keep critical sections small
- Document concurrency safety
- Use context for cancellation
- Consider rate limiting and backpressure
- Consider rate limiting and load shedding
### 11. Configuration Management
@ -496,12 +497,112 @@ For more details, visit:
- <https://docs.github.com/en/actions/how-tos/create-and-publish-actions/manage-custom-actions>
- <https://docs.github.com/en/actions/how-tos/create-and-publish-actions/release-and-maintain-actions>
### YAML Formatting
#### Quoting Guidelines
Follow these rules for consistent YAML formatting:
**DO quote when required:**
```yaml
# Strings with special characters or spaces
version: "test version with spaces"
name: "app-v1.2.3"
message: "Value contains: colons, commas, quotes"
# Empty strings
packages: ""
input: ""
# Values that could be interpreted as other types
id: "123" # Prevents interpretation as number
flag: "true" # Prevents interpretation as boolean
version: "1.0" # Prevents interpretation as number
# YAML special values that should be strings
value: "null" # String "null", not null value
enable: "false" # String "false", not boolean false
```
**DO NOT quote simple values:**
```yaml
# Booleans
debug: false
enabled: true
# Numbers
count: 42
version: 1.2
# Simple strings without special characters
name: ubuntu-latest
step: checkout
action: setup-node
# GitHub Actions expressions (never quote these)
if: github.event_name == 'push'
with: ${{ secrets.TOKEN }}
```
**GitHub Actions specific guidelines:**
```yaml
# Action references - never quote
uses: actions/checkout@v4
uses: ./path/to/local/action
# Boolean inputs - don't quote
debug: false
cache: true
# Version strings with special chars - quote if needed
version: "v1.2.3-beta"
# Expressions - never quote
if: ${{ github.ref == 'refs/heads/main' }}
run: echo "${{ github.actor }}"
```
#### Formatting Standards
- Use 2 spaces for indentation
- Use `-` for list items with proper indentation
- Keep consistent spacing around colons
- Use block scalar `|` for multiline strings
- Use folded scalar `>` for wrapped text
Example of well-formatted YAML:
```yaml
name: CI Pipeline
on:
push:
branches: [main, develop]
pull_request:
branches: [main]
jobs:
test:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: Run tests
run: |
npm install
npm test
env:
NODE_ENV: test
DEBUG: false
```
### Bash Scripts
Project scripts should follow these guidelines:
- Follow formatting rules in
[Shellcheck](https://github.com/koalaman/shellcheck/wiki)
[ShellCheck](https://github.com/koalaman/shellcheck/wiki)
- Follow style guide rules in
[Google Bash Style Guide](https://google.github.io/styleguide/shellguide)
- Include proper error handling and exit codes
@ -831,6 +932,116 @@ remaining_args=$(parse_common_args "$@")
main_menu
```
### YAML Files
YAML files in the project (GitHub Actions workflows, configuration files, etc.)
should follow these best practices:
#### Quoting Guidelines
- **Avoid unnecessary quotes** - YAML values don't need quotes unless they
contain special characters
- **Use quotes when required**:
- Values containing spaces: `version: "test version with spaces"`
- Empty strings: `packages: ""`
- Values starting with special characters: `value: "@special"`
- Boolean-like strings that should be treated as strings: `value: "true"` (if
you want the string "true", not boolean)
- Numeric-like strings: `version: "1.0"` (if you want string "1.0", not number
1.0)
#### Examples
**Good - No unnecessary quotes**:
```yaml
name: Test Action
on: workflow_dispatch
jobs:
test:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: ./
with:
packages: curl wget
version: test-1.0
debug: true
```
**Avoid - Unnecessary quotes**:
```yaml
name: "Test Action"
on: "workflow_dispatch"
jobs:
test:
runs-on: "ubuntu-latest"
steps:
- uses: "actions/checkout@v4"
- uses: "./"
with:
packages: "curl wget"
version: "test-1.0"
debug: "true"
```
**Good - Quotes when needed**:
```yaml
# Quotes required for values with spaces
version: "test version with spaces"
# Quotes required for empty strings
packages: ""
# No quotes needed for simple values
debug: true
timeout: 300
name: test-job
```
#### Formatting Guidelines
- Use 2-space indentation consistently
- Keep lines under 120 characters when possible
- Use `|` for multi-line strings that need line breaks preserved
- Use `>` for multi-line strings that should be folded
- Align nested items consistently
- Use meaningful names for job IDs and step IDs (use kebab-case)
#### Multi-line Strings
```yaml
# For scripts that need line breaks preserved
run: |
echo "Line 1"
echo "Line 2"
if [[ condition ]]; then
echo "Line 3"
fi
# For long descriptions that should be folded
description: >
This is a very long description that
will be folded into a single line
when parsed by YAML.
# For package lists (GitHub Actions input)
packages: |
curl
wget
jq
```
#### GitHub Actions Specific
- Use unquoted boolean values: `required: true`, `debug: false`
- Use unquoted numeric values: `timeout-minutes: 30`
- Quote version strings that might be interpreted as numbers: `version: "1.0"`
- Use kebab-case for input/output names: `cache-hit`, `package-version-list`
- Use meaningful step IDs: `test-basic-install`, `verify-cache-hit`
## Testing Principles
### 1. Test Organization Strategy
@ -858,8 +1069,8 @@ var (
)
```
- Define constants for fixed values where the prescence and format is only
needed and the value content itself does not effect the behavior under test
- Define constants for fixed values where the presence and format is only needed
and the value content itself does not affect the behavior under test
- Use variables for reusable test data
- Group related constants and variables together
- Do not prefix constants or variables with `test`
@ -1247,4 +1458,4 @@ These improvements make the test code:
- More efficient to extend
The patterns and principles can be applied across different types of tests to
create a consistent and effective testing strategy.
create a consistent and effective testing strategy.

View file

@ -1,6 +1,7 @@
# 🤝 Contributing to cache-apt-pkgs-action
Thank you for your interest in contributing to cache-apt-pkgs-action! This document provides guidelines and instructions for contributing to the project.
Thank you for your interest in contributing to cache-apt-pkgs-action! This
document provides guidelines and instructions for contributing to the project.
[![CI](https://github.com/awalsh128/cache-apt-pkgs-action/actions/workflows/ci.yml/badge.svg?branch=dev-v2.0)](https://github.com/awalsh128/cache-apt-pkgs-action/actions/workflows/ci.yml?query=branch%3Adev-v2.0)
[![Go Report Card](https://goreportcard.com/badge/github.com/awalsh128/cache-apt-pkgs-action)](https://goreportcard.com/report/github.com/awalsh128/cache-apt-pkgs-action)
@ -8,13 +9,17 @@ Thank you for your interest in contributing to cache-apt-pkgs-action! This docum
[![License](https://img.shields.io/github/license/awalsh128/cache-apt-pkgs-action)](https://github.com/awalsh128/cache-apt-pkgs-action/blob/dev-v2.0/LICENSE)
[![Release](https://img.shields.io/github/v/release/awalsh128/cache-apt-pkgs-action)](https://github.com/awalsh128/cache-apt-pkgs-action/releases)
⚠️ **IMPORTANT**: This is a very unstable branch and will be introduced as version 2.0 once in beta.
⚠️ **IMPORTANT**: This is a very unstable branch and will be introduced as
version 2.0 once in beta.
## 🔗 Useful Links
- 📖 [GitHub Action Documentation](https://github.com/awalsh128/cache-apt-pkgs-action#readme)
- 📦 [Go Package Documentation](https://pkg.go.dev/github.com/awalsh128/cache-apt-pkgs-action)
- 🔄 [GitHub Actions Workflow Status](https://github.com/awalsh128/cache-apt-pkgs-action/actions)
- 📖
[GitHub Action Documentation](https://github.com/awalsh128/cache-apt-pkgs-action#readme)
- 📦
[Go Package Documentation](https://pkg.go.dev/github.com/awalsh128/cache-apt-pkgs-action)
- 🔄
[GitHub Actions Workflow Status](https://github.com/awalsh128/cache-apt-pkgs-action/actions)
- 🐛 [Issues](https://github.com/awalsh128/cache-apt-pkgs-action/issues)
- 🛠️ [Pull Requests](https://github.com/awalsh128/cache-apt-pkgs-action/pulls)
@ -93,12 +98,15 @@ There are two ways to test the GitHub Action workflows:
1. ☁️ **Using GitHub Actions**:
- Push your changes to a branch
- Create a PR to trigger the [test workflow](https://github.com/awalsh128/cache-apt-pkgs-action/blob/dev-v2.0/.github/workflows/test-action.yml)
- Or manually trigger the workflow from the [Actions tab](https://github.com/awalsh128/cache-apt-pkgs-action/actions/workflows/test-action.yml)
- Create a PR to trigger the
[test workflow](https://github.com/awalsh128/cache-apt-pkgs-action/blob/dev-v2.0/.github/workflows/test-action.yml)
- Or manually trigger the workflow from the
[Actions tab](https://github.com/awalsh128/cache-apt-pkgs-action/actions/workflows/test-action.yml)
2. 🐳 **Running Tests Locally** (requires Docker):
- Install Docker
- 🪟 WSL users install [Docker Desktop](https://www.docker.com/products/docker-desktop/)
- 🪟 WSL users install
[Docker Desktop](https://www.docker.com/products/docker-desktop/)
- 🐧 Non-WSL users (native Linux)
```bash
@ -108,7 +116,8 @@ There are two ways to test the GitHub Action workflows:
sudo systemctl start docker
```
- 🎭 Install [`act`](https://github.com/nektos/act) for local GitHub Actions testing:
- 🎭 Install [`act`](https://github.com/nektos/act) for local GitHub Actions
testing:
- ▶️ Run `act` on any action test in the following ways:
```bash
@ -148,12 +157,15 @@ There are two ways to test the GitHub Action workflows:
1. **Using GitHub Actions**:
- Push your changes to a branch
- Create a PR to trigger the [test workflow](https://github.com/awalsh128/cache-apt-pkgs-action/blob/dev-v2.0/.github/workflows/test-action.yml)
- Or manually trigger the workflow from the [Actions tab](https://github.com/awalsh128/cache-apt-pkgs-action/actions/workflows/test-action.yml)
- Create a PR to trigger the
[test workflow](https://github.com/awalsh128/cache-apt-pkgs-action/blob/dev-v2.0/.github/workflows/test-action.yml)
- Or manually trigger the workflow from the
[Actions tab](https://github.com/awalsh128/cache-apt-pkgs-action/actions/workflows/test-action.yml)
2. **Running Tests Locally** (requires Docker):
- Install Docker
- WSL users install [Docker Desktop](https://www.docker.com/products/docker-desktop/)
- WSL users install
[Docker Desktop](https://www.docker.com/products/docker-desktop/)
- Non-WSL users (native Linux)
```bash
@ -163,7 +175,8 @@ There are two ways to test the GitHub Action workflows:
sudo systemctl start docker
```
- Install [`act`](https://github.com/nektos/act) for local GitHub Actions testing:
- Install [`act`](https://github.com/nektos/act) for local GitHub Actions
testing:
- Run `act` on any action test in the following ways:
```bash
@ -182,11 +195,14 @@ There are two ways to test the GitHub Action workflows:
```
2. ✏️ Make your changes, following these guidelines:
- 📚 Follow Go coding [standards and conventions](https://go.dev/doc/effective_go)
- 📚 Follow Go coding
[standards and conventions](https://go.dev/doc/effective_go)
- ✅ Add tests for new features
- 🎯 Test behaviors on the public interface not implementation
- 🔍 Keep tests for each behavior separate
- 🏭 Use constants and factory functions to keep testing arrangement and asserts clear. Not a lot of boilerplate not directly relevant to the test.
- 🏭 Use constants and factory functions to keep testing arrangement and
asserts clear. Not a lot of boilerplate not directly relevant to the
test.
- 📖 Update documentation as needed
- 🎯 Keep commits focused and atomic
- 📝 Write clear commit messages
@ -212,20 +228,25 @@ There are two ways to test the GitHub Action workflows:
## 💻 Code Style Guidelines
- 📏 Follow [standard Go formatting](https://golang.org/doc/effective_go#formatting) (use `gofmt`)
- 📖 Follow [Go Code Review Comments](https://github.com/golang/go/wiki/CodeReviewComments)
- 📏 Follow
[standard Go formatting](https://golang.org/doc/effective_go#formatting) (use
`gofmt`)
- 📖 Follow
[Go Code Review Comments](https://github.com/golang/go/wiki/CodeReviewComments)
- 🔍 Write clear, self-documenting code
- 📚 Add [godoc](https://blog.golang.org/godoc) comments for complex logic
- 📚 Add [GoDoc](https://blog.golang.org/godoc) comments for complex logic
- 🏷️ Use meaningful variable and function names
- ✨ Keep functions focused and manageable in size
- 🔒 Prefer immutability vs state changing
- 📏 Aim for lines less than 50
- 🎯 Observe [single responsibility principle](https://en.wikipedia.org/wiki/Single-responsibility_principle)
- 🎯 Observe
[single responsibility principle](https://en.wikipedia.org/wiki/Single-responsibility_principle)
📚 For more details on Go best practices, refer to:
- 📖 [Effective Go](https://golang.org/doc/effective_go)
- 🔍 [Go Code Review Comments](https://github.com/golang/go/wiki/CodeReviewComments)
- 🔍
[Go Code Review Comments](https://github.com/golang/go/wiki/CodeReviewComments)
## Documentation
@ -242,15 +263,22 @@ There are two ways to test the GitHub Action workflows:
## Questions or Problems?
- Open an [issue](https://github.com/awalsh128/cache-apt-pkgs-action/issues/new) for bugs or feature requests
- Use [discussions](https://github.com/awalsh128/cache-apt-pkgs-action/discussions) for questions or ideas
- Reference the [GitHub Action documentation](https://github.com/awalsh128/cache-apt-pkgs-action#readme)
- Check existing [issues](https://github.com/awalsh128/cache-apt-pkgs-action/issues) and [pull requests](https://github.com/awalsh128/cache-apt-pkgs-action/pulls)
- Open an [issue](https://github.com/awalsh128/cache-apt-pkgs-action/issues/new)
for bugs or feature requests
- Use
[discussions](https://github.com/awalsh128/cache-apt-pkgs-action/discussions)
for questions or ideas
- Reference the
[GitHub Action documentation](https://github.com/awalsh128/cache-apt-pkgs-action#readme)
- Check existing
[issues](https://github.com/awalsh128/cache-apt-pkgs-action/issues) and
[pull requests](https://github.com/awalsh128/cache-apt-pkgs-action/pulls)
- Tag maintainers for urgent issues
## License
By contributing to this project, you agree that your contributions will be licensed under the same license as the project.
By contributing to this project, you agree that your contributions will be
licensed under the same license as the project.
## 📦 Publishing to pkg.go.dev
@ -266,7 +294,8 @@ To make the library available on [pkg.go.dev](https://pkg.go.dev):
```
2. 🔄 Trigger pkg.go.dev to fetch your module:
- Visit [pkg.go.dev for this module](https://pkg.go.dev/github.com/awalsh128/cache-apt-pkgs-action@v2.0.0)
- Visit
[pkg.go.dev for this module](https://pkg.go.dev/github.com/awalsh128/cache-apt-pkgs-action@v2.0.0)
- Or fetch via command line:
```bash
@ -274,7 +303,7 @@ To make the library available on [pkg.go.dev](https://pkg.go.dev):
```
3. 📝 Best practices for publishing:
- Add comprehensive godoc comments
- Add comprehensive `godoc` comments
- Include examples in your documentation
- Use semantic versioning for tags
- Keep the module path consistent

142
README.md
View file

@ -30,7 +30,6 @@
- [Cache Misses](#cache-misses)
- [🤝 Contributing](#-contributing)
- [📜 License](#-license)
- [🔄 Updates and Maintenance](#-updates-and-maintenance)
- [🌟 Acknowledgements](#-acknowledgements)
- [Getting Started](#getting-started)
- [Workflow Setup](#workflow-setup)
@ -39,7 +38,7 @@
- [Output Values](#output-values)
- [Cache scopes](#cache-scopes)
- [Example workflows](#example-workflows)
- [Build and Deploy Doxygen Documentation](#build-and-deploy-doxygen-documentation)
- [Build and Deploy `Doxygen` Documentation](#build-and-deploy-doxygen-documentation)
- [Simple Package Installation](#simple-package-installation)
- [Caveats](#caveats)
- [Edge Cases](#edge-cases)
@ -48,10 +47,14 @@
<!-- END doctoc generated TOC please keep comment here to allow auto update -->
Speed up your GitHub Actions workflows by caching APT package dependencies. This action integrates with [actions/cache](https://github.com/actions/cache/) to provide efficient package caching, significantly reducing workflow execution time by avoiding repeated package installations.
Speed up your GitHub Actions workflows by caching APT package dependencies. This
action integrates with [actions/cache](https://github.com/actions/cache/) to
provide efficient package caching, significantly reducing workflow execution
time by avoiding repeated package installations.
> [!IMPORTANT]
> We're looking for co-maintainers to help review changes and investigate issues. If you're interested in contributing to this project, please reach out.
> **Important:** We're looking for co-maintainers to help review changes and
> investigate issues. If you're interested in contributing to this project,
> please reach out.
## 🚀 Quick Start
@ -100,12 +103,21 @@ steps:
### Version Selection
Choose the appropriate version tag:
> ⚠️ Starting with this release, the action enforces immutable references.
> Workflows must pin `awalsh128/cache-apt-pkgs-action` to a release tag or
> commit SHA. Referencing a branch (for example `@main`) will now fail during
> the `setup` step. For more information on blocking and SHA pinning actions,
> see the
> [announcement on the GitHub changelog](https://github.blog/changelog/2025-08-15-github-actions-policy-now-supports-blocking-and-sha-pinning-actions).
- `@latest` - Latest stable release
- `@v2` - Latest v2.x.x release
- `@master` - Latest tested code (potentially unstable)
- `@dev` - Experimental features
Recommended options:
- `@v2` or any other published release tag.
- A full commit SHA such as `@4f5c863ba5ce9f1784c8ad7d8f63a9cfd3f1ab2c`.
Avoid floating references such as `@latest`, `@master`, or `@dev`. The action
will refuse to run when a branch reference is detected to protect consumers from
involuntary updates.
### Basic Example
@ -157,6 +169,25 @@ jobs:
echo "Installed packages: ${{ steps.apt-cache.outputs.package-version-list }}"
```
### Binary Integrity Verification
Every published release bundles precompiled binaries under
`distribute/<runner arch>/cache_apt_pkgs`. Starting with this release the action
verifies the binary against a co-located `.sha256` manifest before execution. If
the checksum does not match the expected value the `setup` step exits with an
error to prevent tampering or incomplete releases.
When preparing a new release:
1. Run `scripts/distribute.sh push` to build architecture-specific binaries.
2. The script now emits a matching `cache-apt-pkgs-linux-<arch>.sha256` file for
each binary.
3. Copy the binaries and checksum files into `distribute/<arch>/` before
creating the release artifact.
Workflows do not need to perform any additional setup—the checksum enforcement
is automatic as long as the bundled `.sha256` files accompany the binaries.
## 🔍 Cache Details
### Cache Scoping
@ -206,44 +237,52 @@ permissions:
## 🤝 Contributing
We welcome contributions! Please see our [Contributing Guide](CONTRIBUTING.md) for details.
We welcome contributions! Please see our [Contributing Guide](CONTRIBUTING.md)
for details.
## 📜 License
This project is licensed under the Apache License 2.0 - see the [LICENSE](LICENSE) file for details.
## 🔄 Updates and Maintenance
Stay updated:
- Watch this repository for releases
- Check the [CHANGELOG](CHANGELOG.md)
- Follow the [security policy](SECURITY.md)
This project is licensed under the Apache License 2.0 - see the
[LICENSE](LICENSE) file for details.
## 🌟 Acknowledgements
- [actions/cache](https://github.com/actions/cache/) team
- All our [contributors](https://github.com/awalsh128/cache-apt-pkgs-action/graphs/contributors)
- All our
[contributors](https://github.com/awalsh128/cache-apt-pkgs-action/graphs/contributors)
### Getting Started
#### Workflow Setup
Create a workflow `.yml` file in your repositories `.github/workflows` directory. [Example workflows](#example-workflows) are available below. For more information, reference the GitHub Help Documentation for [Creating a workflow file](https://help.github.com/en/articles/configuring-a-workflow#creating-a-workflow-file).
Create a workflow `.yml` file in your repositories `.github/workflows`
directory. [Example workflows](#example-workflows) are available below. For more
information, reference the GitHub Help Documentation for
[Creating a workflow file](https://help.github.com/en/articles/configuring-a-workflow#creating-a-workflow-file).
#### Detailed Configuration
##### Input Parameters
- `packages` - Space delimited list of packages to install.
- `version` - Version of cache to load. Each version will have its own cache. Note, all characters except spaces are allowed.
- `execute_install_scripts` - Execute Debian package pre and post install script upon restore. See [Caveats / Non-file Dependencies](#non-file-dependencies) for more information.
- `version` - Version of cache to load. Each version will have its own cache.
Note, all characters except spaces are allowed.
- `execute_install_scripts` - Execute Debian package 'preinst' and 'postinst'
install scripts upon restore. See
[Caveats / Non-file Dependencies](#non-file-dependencies) for more
information.
##### Output Values
- `cache-hit` - A boolean value to indicate a cache was found for the packages requested.
- `package-version-list` - The main requested packages and versions that are installed. Represented as a comma delimited list with equals delimit on the package version (i.e. \<package1>=<version1\>,\<package2>=\<version2>,...).
- `all-package-version-list` - All the pulled in packages and versions, including dependencies, that are installed. Represented as a comma delimited list with equals delimit on the package version (i.e. \<package1>=<version1\>,\<package2>=\<version2>,...).
- `cache-hit` - A `true` or `false` value to indicate a cache was found for the
packages requested.
- `package-version-list` - The main requested packages and versions that are
installed. Represented as a comma delimited list with equals delimit on the
package version (i.e. \<package1>=<version1\>,\<package2>=\<version2>,...).
- `all-package-version-list` - All the pulled in packages and versions,
including dependencies, that are installed. Represented as a comma delimited
list with equals delimit on the package version (i.e.
\<package1>=<version1\>,\<package2>=\<version2>,...).
### Cache scopes
@ -257,9 +296,10 @@ The cache is scoped to:
Below are some example workflows showing how to use this action.
#### Build and Deploy Doxygen Documentation
#### Build and Deploy `Doxygen` Documentation
This example shows how to cache dependencies for building and deploying Doxygen documentation:
This example shows how to cache dependencies for building and deploying
`Doxygen` documentation:
```yaml
name: Create Documentation
@ -289,7 +329,8 @@ jobs:
#### Simple Package Installation
This example shows the minimal configuration needed to cache and install packages:
This example shows the minimal configuration needed to cache and install
packages:
```yaml
name: Install Dependencies
@ -308,19 +349,27 @@ jobs:
### Edge Cases
This action is able to speed up installs by skipping the number of steps that `apt` uses.
This action is able to speed up installs by skipping the number of steps that
`apt` uses.
- This means there will be certain cases that it may not be able to handle like state management of other file configurations outside the package scope.
- In cases that can't be immediately addressed or run counter to the approach of this action, the packages affected should go into their own action `step` and using the normal `apt` utility.
- This means there will be certain cases that it may not be able to handle like
state management of other file configurations outside the package scope.
- In cases that can't be immediately addressed or run counter to the approach of
this action, the packages affected should go into their own action `step` and
using the normal `apt` utility.
### Non-file Dependencies
This action is based on the principle that most packages can be cached as a fileset. There are situations though where this is not enough.
This action is based on the principle that most packages can be cached as a set
of files. There are situations though where this is not enough.
- Pre and post installation scripts needs to be ran from `/var/lib/dpkg/info/{package name}.[preinst, postinst]`.
- The Debian package database needs to be queried for scripts above (i.e. `dpkg-query`).
- Pre and post installation scripts need to be run from
`/var/lib/dpkg/info/{package name}.[preinst, postinst]`.
- The Debian package database needs to be queried for scripts above (i.e.
`dpkg-query`).
The `execute_install_scripts` argument can be used to attempt to execute the install scripts but they are no guaranteed to resolve the issue.
The `execute_install_scripts` argument can be used to attempt to execute the
install scripts but they are no guaranteed to resolve the issue.
```yaml
- uses: awalsh128/cache-apt-pkgs-action@latest
@ -330,17 +379,28 @@ The `execute_install_scripts` argument can be used to attempt to execute the ins
execute_install_scripts: true
```
If this does not solve your issue, you will need to run `apt-get install` as a separate step for that particular package unfortunately.
If this does not solve your issue, you will need to run `apt-get install` as a
separate step for that particular package unfortunately.
```yaml
run: apt-get install mypackage
shell: bash
```
Please reach out if you have found a workaround for your scenario and it can be generalized. There is only so much this action can do and can't get into the area of reverse engineering Debian package manager. It would be beyond the scope of this action and may result in a lot of extended support and brittleness. Also, it would be better to contribute to Debian packager instead at that point.
Please reach out if you have found a workaround for your scenario and it can be
generalized. There is only so much this action can do and can't get into the
area of reverse engineering Debian package manager. It would be beyond the scope
of this action and may result in a lot of extended support and brittleness.
Also, it would be better to contribute to Debian packager instead at that point.
For more context and information see [issue #57](https://github.com/awalsh128/cache-apt-pkgs-action/issues/57#issuecomment-1321024283) which contains the investigation and conclusion.
For more context and information see
[issue #57](https://github.com/awalsh128/cache-apt-pkgs-action/issues/57#issuecomment-1321024283)
which contains the investigation and conclusion.
### Cache Limits
A repository can have up to 5GB of caches. Once the 5GB limit is reached, older caches will be evicted based on when the cache was last accessed. Caches that are not accessed within the last week will also be evicted. To get more information on how to access and manage your actions's caches, see [GitHub Actions / Using workflows / Cache dependencies](https://docs.github.com/en/actions/using-workflows/caching-dependencies-to-speed-up-workflows#viewing-cache-entries).
A repository can have up to 5GB of caches. Once the 5GB limit is reached, older
caches will be evicted based on when the cache was last accessed. Caches that
are not accessed within the last week will also be evicted. To get more
information on how to access and manage your actions's caches, see
[GitHub Actions / Using workflows / Cache dependencies](https://docs.github.com/en/actions/using-workflows/caching-dependencies-to-speed-up-workflows#viewing-cache-entries).

View file

@ -1,9 +1,11 @@
name: Cache APT Packages
description: Install APT based packages and cache them for future runs.
author: awalsh128
branding:
icon: hard-drive
color: green
inputs:
packages:
description: Space delimited list of packages to install. Version can be specified optionally using APT command syntax of <name>=<version> (e.g. xdot=1.2-2).
@ -24,6 +26,7 @@ inputs:
description: Enable debugging when there are issues with action. Minor performance penalty.
required: false
default: "false"
outputs:
cache-hit:
description: A boolean value to indicate a cache was found for the packages requested.
@ -32,91 +35,65 @@ outputs:
value: ${{ steps.load-cache.outputs.cache-hit || false }}
package-version-list:
description: The main requested packages and versions that are installed. Represented as a comma delimited list with equals delimit on the package version (i.e. <package>:<version,<package>:<version>).
value: ${{ steps.post-cache.outputs.package-version-list }}
value: ${{ steps.install-pkgs.outputs.package-version-list || steps.restore-pkgs.outputs.package-version-list }}
all-package-version-list:
description: All the pulled in packages and versions, including dependencies, that are installed. Represented as a comma delimited list with equals delimit on the package version (i.e. <package>:<version,<package>:<version>).
value: ${{ steps.post-cache.outputs.all-package-version-list }}
value: ${{ steps.install-pkgs.outputs.all-package-version-list || steps.restore-pkgs.outputs.all-package-version-list }}
runs:
using: composite
steps:
- id: set-shared-env
- id: setup
shell: bash
run: |
echo "ARCH=${{ runner.arch }}" >> "${GITHUB_ENV}"
echo "BINARY_PATH=${BINARY_PATH}" >> "${GITHUB_ENV}"
echo "CACHE_DIR=~/cache-apt-pkgs" >> "${GITHUB_ENV}"
echo "DEBUG=${{ inputs.debug }}" >> "${GITHUB_ENV}"
echo "GLOBAL_VERSION=20250910" >> "${GITHUB_ENV}"
echo "PACKAGES=${{ inputs.packages }}" >> "${GITHUB_ENV}"
echo "VERSION=${{ inputs.version }}" >> "${GITHUB_ENV}"
env:
BINARY_PATH: ${{ github.action_path }}/scripts/distribute.sh getbinpath ${{ runner.arch }}
- id: install-aptfast
shell: bash
run: |
if ! apt-fast --version > /dev/null 2>&1; then
echo "Installing apt-fast for optimized installs and updates" &&
/bin/bash -c "$(curl -sL https://raw.githubusercontent.com/ilikenwf/apt-fast/master/quick-install.sh)"
fi
- id: setup-binary
shell: bash
run: |
if [[ ! -f "${BINARY_PATH}" ]]; then
echo "Error: Binary not found at ${BINARY_PATH}"
echo "Please ensure the action has been properly built and binaries are included in the distribute directory"
exit 1
fi
${{ github.action_path }}/distribute/${{ runner.arch }}/cache_apt_pkgs setup \
--binary-path ${{ github.action_path }}/distribute/${{ runner.arch }}/cache_apt_pkgs
--checksum-file ${{ github.action_path }}/distribute/${{ runner.arch }}/cache_apt_pkgs.sha256
- id: create-cache-key
shell: bash
run: |
${BINARY_PATH} createkey \
-os-arch "${ARCH}" \
-plaintext-path "${CACHE_DIR}/cache_key.txt" \
-ciphertext-path "${CACHE_DIR}/cache_key.md5" \
-version "${VERSION}" \
-global-version "${GLOBAL_VERSION}" \
${PACKAGES}
echo "cache-key=$(cat ${CACHE_DIR}/cache_key.md5)" >> "${GITHUB_OUTPUT}"
${{ github.action_path }}/distribute/${{ runner.arch }}/cache_apt_pkgs createkey \
-os-arch ${{ runner.arch }} \
-cache-dir ~/cache-apt-pkgs \
-version "${{ inputs.version }}" \
-global-version "20250910" \
${{ inputs.packages }}
- id: load-cache
uses: actions/cache/restore@v4
uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
with:
path: ${{ env.CACHE_DIR }}
path: ~/cache-apt-pkgs
key: cache-apt-pkgs_${{ steps.create-cache-key.outputs.cache-key }}
- id: post-load-cache
# TODO get this implemented
# -exec-install-scripts ${{ inputs.execute_install_scripts }} \
- id: restore-pkgs
if: ${{ steps.load-cache.outputs.cache-hit == 'true' }}
shell: bash
run: |
if [ "${CACHE_HIT}" == "true" ]; then
${BINARY_PATH} restore \
-cache-dir "${CACHE_DIR}" \
-restore-root "/" \
"${PACKAGES}"
else
${BINARY_PATH} install \
-cache-dir "${CACHE_DIR}" \
-version "${VERSION}" \
-global-version "${GLOBAL_VERSION}" \
"${PACKAGES}"
fi
echo "package-version-list=\"$(cat "${CACHE_DIR}/pkgs_args.txt")\"" >> "${GITHUB_OUTPUT}"
echo "all-package-version-list=\"$(cat "${CACHE_DIR}/pkgs_installed.txt")\"" >> "${GITHUB_OUTPUT}"
env:
CACHE_HIT: ${{ steps.load-cache.outputs.cache-hit }}
EXEC_INSTALL_SCRIPTS: ${{ inputs.execute_install_scripts }}
${{ github.action_path }}/distribute/${{ runner.arch }}/cache_apt_pkgs restore \
--cache-dir ~/cache-apt-pkgs \
--restore-root "/" \
${{ inputs.packages }}
- id: install-pkgs
if: ${{ steps.load-cache.outputs.cache-hit != 'true' }}
shell: bash
run: |
${{ github.action_path }}/distribute/${{ runner.arch }}/cache_apt_pkgs install \
--cache-dir ~/cache-apt-pkgs \
--version "${{ inputs.version }}" \
--global-version "20250910" \
${{ inputs.packages }}
- id: upload-artifacts
if: ${{ inputs.debug == 'true' }}
uses: actions/upload-artifact@v4
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
with:
name: cache-apt-pkgs-logs_${{ env.CACHE_KEY }}
name: cache-apt-pkgs-logs_${{ steps.create-cache-key.outputs.cache-key }}
path: ~/cache-apt-pkgs/*.log
- id: save-cache
if: ${{ ! steps.load-cache.outputs.cache-hit }}
uses: actions/cache/save@v4
with:
path: ~/cache-apt-pkgs
path: actions/cache/save@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
key: ${{ steps.load-cache.outputs.cache-primary-key }}
- id: clean-cache
run: |
rm -rf ~/cache-apt-pkgs
shell: bash
run: |
${{ github.action_path }}/distribute/${{ runner.arch }}/cache_apt_pkgs cleanup \
--cache-dir ~/cache-apt-pkgs

View file

@ -1,6 +1,7 @@
package main
import (
"encoding/hex"
"fmt"
"path/filepath"
"runtime"
@ -8,6 +9,7 @@ import (
"awalsh128.com/cache-apt-pkgs-action/internal/cache"
"awalsh128.com/cache-apt-pkgs-action/internal/logging"
"awalsh128.com/cache-apt-pkgs-action/internal/pkgs"
"github.com/sethvargo/go-githubactions"
)
func createKey(cmd *Cmd, pkgArgs pkgs.Packages) error {
@ -19,7 +21,9 @@ func createKey(cmd *Cmd, pkgArgs pkgs.Packages) error {
if err != nil {
return fmt.Errorf("failed to create cache key: %w", err)
}
logging.Info("Created cache key: %s (%x)", key.String(), key.Hash())
hashHex := hex.EncodeToString(key.Hash())
logging.Info("Created cache key: %s (%s)", key.String(), hashHex)
cacheDir := cmd.StringFlag("cache-dir")
@ -32,6 +36,14 @@ func createKey(cmd *Cmd, pkgArgs pkgs.Packages) error {
}
logging.Info("Wrote cache key files:\n %s\n %s", plaintextPath, ciphertextPath)
// Output the cache key hash to GitHub Actions
if isGitHubActions() {
githubactions.SetOutput("cache-key", hashHex)
} else {
// In test/development environments, print to stdout
fmt.Printf("cache-key=%s\n", hashHex)
}
return nil
}

View file

@ -61,6 +61,11 @@ func install(cmd *Cmd, pkgArgs pkgs.Packages) error {
return fmt.Errorf("error writing manifest to %s: %v", manifestPath, err)
}
logging.Info("Wrote manifest to %s.", manifestPath)
// Set GitHub Actions outputs
SetPackageVersionList(pkgArgs)
SetAllPackageVersionList(installedPkgs)
logging.Info("Completed package installation.")
return nil
}

View file

@ -11,6 +11,8 @@ func main() {
GetCreateKeyCmd(),
GetInstallCmd(),
GetRestoreCmd(),
GetSetupCmd(),
GetCleanupCmd(),
GetValidateCmd(),
)
cmd, pkgArgs := commands.Parse()

View file

@ -10,6 +10,8 @@ func TestMain_CommandStructure(t *testing.T) {
GetCreateKeyCmd(),
GetInstallCmd(),
GetRestoreCmd(),
GetSetupCmd(),
GetCleanupCmd(),
GetValidateCmd(),
)
@ -18,7 +20,7 @@ func TestMain_CommandStructure(t *testing.T) {
}
// Check that all expected commands exist
expectedCommands := []string{"createkey", "install", "restore", "validate"}
expectedCommands := []string{"createkey", "install", "restore", "setup", "cleanup", "validate"}
for _, cmdName := range expectedCommands {
if _, ok := commands.Get(cmdName); !ok {
t.Errorf("Expected command '%s' to be available", cmdName)
@ -31,6 +33,8 @@ func TestMain_AllCommandsHaveRequiredFields(t *testing.T) {
GetCreateKeyCmd(),
GetInstallCmd(),
GetRestoreCmd(),
GetSetupCmd(),
GetCleanupCmd(),
GetValidateCmd(),
)

View file

@ -3,12 +3,35 @@ package main
import (
"flag"
"fmt"
"path/filepath"
"awalsh128.com/cache-apt-pkgs-action/internal/cache"
"awalsh128.com/cache-apt-pkgs-action/internal/logging"
"awalsh128.com/cache-apt-pkgs-action/internal/pkgs"
)
func restore(cmd *Cmd, pkgArgs pkgs.Packages) error {
return fmt.Errorf("restorePackages not implemented")
manifestPath := filepath.Join(cmd.StringFlag("cache-dir"), "manifest.json")
logging.Info("Reading manifest from %s.", manifestPath)
manifest, err := cache.Read(manifestPath)
if err != nil {
return fmt.Errorf("error reading manifest from %s: %v", manifestPath, err)
}
// Extract all installed packages from the manifest
installedPkgList := make([]pkgs.Package, 0, len(manifest.InstalledPackages))
for _, manifestPkg := range manifest.InstalledPackages {
installedPkgList = append(installedPkgList, manifestPkg.Package)
}
installedPkgs := pkgs.NewPackages(installedPkgList...)
// Set GitHub Actions outputs
SetPackageVersionList(pkgArgs)
SetAllPackageVersionList(installedPkgs)
logging.Info("Completed package restoration.")
return nil
}
func GetRestoreCmd() *Cmd {

View file

@ -1,6 +1,8 @@
package main
import (
"os"
"strings"
"testing"
"awalsh128.com/cache-apt-pkgs-action/internal/pkgs"
@ -62,27 +64,54 @@ func TestGetRestoreCmd(t *testing.T) {
func TestRestore_NotImplemented(t *testing.T) {
cmd := GetRestoreCmd()
// Create a temporary directory for testing
tmpDir, err := os.MkdirTemp("", "restore_test")
if err != nil {
t.Fatalf("Failed to create temp dir: %v", err)
}
defer os.RemoveAll(tmpDir)
// Set up command flags
cmd.Flags.Set("cache-dir", tmpDir)
packages := pkgs.NewPackages(pkgs.Package{Name: "test-package"})
// The restore function is not implemented and should return an error
err := restore(cmd, packages)
// The restore function should now fail because there's no manifest file
err = restore(cmd, packages)
if err == nil {
t.Error("Expected error from unimplemented restore function")
t.Error("Expected error when manifest file doesn't exist")
}
expectedMsg := "restorePackages not implemented"
if err.Error() != expectedMsg {
t.Errorf("Expected error message '%s', got '%s'", expectedMsg, err.Error())
// Check that the error is about reading the manifest
if !strings.Contains(err.Error(), "error reading manifest") {
t.Errorf("Expected error about reading manifest, got '%s'", err.Error())
}
}
func TestRestore_EmptyPackages(t *testing.T) {
cmd := GetRestoreCmd()
// Create a temporary directory for testing
tmpDir, err := os.MkdirTemp("", "restore_test")
if err != nil {
t.Fatalf("Failed to create temp dir: %v", err)
}
defer os.RemoveAll(tmpDir)
// Set up command flags
cmd.Flags.Set("cache-dir", tmpDir)
packages := pkgs.NewPackages()
// Even with empty packages, restore should return not implemented error
err := restore(cmd, packages)
// The restore function should fail because there's no manifest file
err = restore(cmd, packages)
if err == nil {
t.Error("Expected error from unimplemented restore function")
t.Error("Expected error when manifest file doesn't exist")
}
// Check that the error is about reading the manifest
if !strings.Contains(err.Error(), "error reading manifest") {
t.Errorf("Expected error about reading manifest, got '%s'", err.Error())
}
}

View file

@ -1,6 +1,7 @@
package main
import (
"strings"
"testing"
"awalsh128.com/cache-apt-pkgs-action/internal/pkgs"
@ -41,8 +42,10 @@ func TestValidate_EmptyPackages(t *testing.T) {
packages := pkgs.NewPackages()
// With no packages, validation should succeed (no packages to validate)
err := validate(cmd, packages)
if err != nil {
if err := validate(cmd, packages); err != nil {
if strings.Contains(err.Error(), "no supported package manager") {
t.Skip("APT is not available in the test environment")
}
t.Errorf("validate with empty packages should succeed, got error: %v", err)
}
}

View file

@ -1,163 +0,0 @@
#!/bin/bash
####################################################################################################
#
# Name: Cache APT Packages
# Description: Install APT based packages and cache them for future runs.
# Author: awalsh128
#
# Branding:
# Icon: hard-drive
# Color: green
#
# Inputs:
# Packages:
# Description: Space delimited list of packages to install. Version can be specified optionally using APT command syntax of <name>=<version> (e.g. xdot=1.2-2).
# Required: true
# Default:
# Version:
# Description: Version of cache to load. Each version will have its own cache. Note, all characters except spaces are allowed.
# Required: false
# Default:
# Execute Install Scripts:
# Description: Execute Debian package pre and post install script upon restore. See README.md caveats for more information.
# Required: false
# Default: false
# Refresh:
# Description: OBSOLETE: Refresh is not used by the action, use version instead.
# Required: false
# Default:
# Deprecation Message: Refresh is not used by the action, use version instead.
# Debug:
# Description: Enable debugging when there are issues with action. Minor performance penalty.
# Required: false
# Default: false
#
#
# Outputs:
# Cache Hit:
# Description: A boolean value to indicate a cache was found for the packages requested.
# Value: ${{ steps.load-cache.outputs.cache-hit || false }}
# Package Version List:
# Description: The main requested packages and versions that are installed. Represented as a comma delimited list with equals delimit on the package version (i.e. <package>:<version,<package>:<version>).
# Value: ${{ steps.post-cache.outputs.package-version-list }}
# All Package Version List:
# Description: All the pulled in packages and versions, including dependencies, that are installed. Represented as a comma delimited list with equals delimit on the package version (i.e. <package>:<version,<package>:<version>).
# Value: ${{ steps.post-cache.outputs.all-package-version-list }}
#
####################################################################################################
set -e
INPUTS_EXECUTE_INSTALL_SCRIPTS="false"
INPUTS_REFRESH="false"
INPUTS_DEBUG="false"
RUNNER_ARCH="X86_64"
GITHUB_ACTION_PATH="../../"
INPUTS_PACKAGES="xdot,rolldice"
INPUTS_VERSION="0"
#===================================================================================================
# Step ID: set-shared-env
#===================================================================================================
STEP_SET_SHARED_ENV_ENV_BINARY_PATH="${GITHUB_ACTION_PATH}/scripts/distribute.sh getbinpath ${RUNNER_ARCH}"
GH_ENV_ARCH="${RUNNER_ARCH}"
GH_ENV_BINARY_PATH="${BINARY_PATH}"
GH_ENV_CACHE_DIR="~/cache-apt-pkgs"
GH_ENV_DEBUG="${INPUTS_DEBUG}"
GH_ENV_GLOBAL_VERSION="20250910"
GH_ENV_PACKAGES="${INPUTS_PACKAGES}"
GH_ENV_VERSION="${INPUTS_VERSION}"
#===================================================================================================
# Step ID: install-aptfast
#===================================================================================================
if ! apt-fast --version > /dev/null 2>&1; then
echo "Installing apt-fast for optimized installs and updates" &&
/bin/bash -c "$(curl -sL https://raw.githubusercontent.com/ilikenwf/apt-fast/master/quick-install.sh)"
fi
#===================================================================================================
# Step ID: setup-binary
#===================================================================================================
if [[ ! -f "${BINARY_PATH}" ]]; then
echo "Error: Binary not found at ${BINARY_PATH}"
echo "Please ensure the action has been properly built and binaries are included in the distribute directory"
exit 1
fi
#===================================================================================================
# Step ID: create-cache-key
#===================================================================================================
${BINARY_PATH} createkey \
-os-arch "${ARCH}" \
-plaintext-path "${CACHE_DIR}/cache_key.txt" \
-ciphertext-path "${CACHE_DIR}/cache_key.md5" \
-version "${VERSION}" \
-global-version "${GLOBAL_VERSION}" \
${PACKAGES}
GH_OUTPUT_CREATE_CACHE_KEY_CACHE_KEY="$(cat ${CACHE_DIR}/cache_key.md5)"
#===================================================================================================
# Step ID: load-cache
#===================================================================================================
STEP_LOAD_CACHE_WITH_PATH="${{ env.CACHE_DIR }}"
STEP_LOAD_CACHE_WITH_KEY="cache-apt-pkgs_${{ steps.create-cache-key.outputs.cache-key }}"
if [[ -d "${cache-apt-pkgs_${{ steps.create-cache-key.outputs.cache-key }}}" ]]; then
OUTPUT_CACHE_HIT=true
else
OUTPUT_CACHE_HIT=false
mkdir "${cache-apt-pkgs_${{ steps.create-cache-key.outputs.cache-key }}}"
fi
# NO HANDLER FOUND for actions/cache/restore@v4
#===================================================================================================
# Step ID: post-load-cache
#===================================================================================================
STEP_POST_LOAD_CACHE_ENV_CACHE_HIT="${{ steps.load-cache.outputs.cache-hit }}"
STEP_POST_LOAD_CACHE_ENV_EXEC_INSTALL_SCRIPTS="${INPUTS_EXECUTE_INSTALL_SCRIPTS}"
if [ "${CACHE_HIT}" == "true" ]; then
${BINARY_PATH} restore \
-cache-dir "${CACHE_DIR}" \
-restore-root "/" \
"${PACKAGES}"
else
${BINARY_PATH} install \
-cache-dir "${CACHE_DIR}" \
-version "${VERSION}" \
-global-version "${GLOBAL_VERSION}" \
"${PACKAGES}"
fi
GH_OUTPUT_POST_LOAD_CACHE_PACKAGE_VERSION_LIST="\"$(cat "${CACHE_DIR}/pkgs_args.txt")\""
GH_OUTPUT_POST_LOAD_CACHE_ALL_PACKAGE_VERSION_LIST="\"$(cat "${CACHE_DIR}/pkgs_installed.txt")\""
#===================================================================================================
# Step ID: upload-artifacts
#===================================================================================================
# NO HANDLER FOUND for actions/upload-artifact@v4
#===================================================================================================
# Step ID: save-cache
#===================================================================================================
# NO HANDLER FOUND for actions/cache/save@v4
#===================================================================================================
# Step ID: clean-cache
#===================================================================================================
rm -rf ~/cache-apt-pkgs

View file

@ -1,99 +0,0 @@
package main
import (
"fmt"
"strings"
"mvdan.cc/sh/v3/syntax"
)
// ParseBashToAST parses a bash script string into an AST
func ParseBashToAST(script string) (*syntax.File, error) {
// Create a new parser with bash dialect
parser := syntax.NewParser(syntax.KeepComments(true), syntax.Variant(syntax.LangBash))
// Parse the script into an AST
file, err := parser.Parse(strings.NewReader(script), "")
if err != nil {
return nil, fmt.Errorf("failed to parse bash script: %v", err)
}
return file, nil
}
// AnalyzeBashScript provides analysis of a bash script including variables, functions, and commands
func AnalyzeBashScript(script string) (map[string]interface{}, error) {
file, err := ParseBashToAST(script)
if err != nil {
return nil, err
}
analysis := make(map[string]interface{})
variables := make(map[string]struct{})
functions := make([]string, 0)
commands := make([]string, 0)
// Walk the AST and collect information
syntax.Walk(file, func(node syntax.Node) bool {
switch n := node.(type) {
case *syntax.Assign:
// Found variable assignment
if n.Name != nil {
variables[n.Name.Value] = struct{}{}
}
case *syntax.FuncDecl:
// Found function declaration
if n.Name != nil {
functions = append(functions, n.Name.Value)
}
case *syntax.CallExpr:
// Found command execution
if len(n.Args) > 0 {
var cmd strings.Builder
for _, part := range n.Args[0].Parts {
if lit, ok := part.(*syntax.Lit); ok {
cmd.WriteString(lit.Value)
}
}
if cmd.Len() > 0 {
commands = append(commands, cmd.String())
}
}
}
return true
})
// Convert variables map to slice for better JSON output
varSlice := make([]string, 0, len(variables))
for v := range variables {
varSlice = append(varSlice, v)
}
analysis["variables"] = varSlice
analysis["functions"] = functions
analysis["commands"] = commands
return analysis, nil
}
func ParseAndGetAst(action Action) (string, error) {
converter := NewBashConverter(action)
script := converter.Convert()
// Analyze the generated script
analysis, err := AnalyzeBashScript(script)
if err != nil {
return script, fmt.Errorf("script analysis error: %v", err)
}
// Add analysis as comments at the top of the script
var finalScript strings.Builder
finalScript.WriteString("#!/bin/bash\n\n")
finalScript.WriteString("# Script Analysis:\n")
finalScript.WriteString(fmt.Sprintf("# Variables: %v\n", analysis["variables"]))
finalScript.WriteString(fmt.Sprintf("# Functions: %v\n", analysis["functions"]))
finalScript.WriteString(fmt.Sprintf("# Commands: %v\n\n", analysis["commands"]))
finalScript.WriteString(script)
return finalScript.String(), nil
}

View file

@ -1,251 +0,0 @@
package main
import (
"bufio"
"fmt"
"regexp"
"strings"
)
// dedent removes common leading indentation from non-empty lines.
// It also normalizes CRLF -> LF and strips a single leading newline.
func dedent(s string) string {
s = strings.ReplaceAll(s, "\r\n", "\n")
s = strings.TrimPrefix(s, "\n")
lines := strings.Split(s, "\n")
min := -1
for _, ln := range lines {
if strings.TrimSpace(ln) == "" {
continue
}
ind := len(ln) - len(strings.TrimLeft(ln, " \t"))
if min == -1 || ind < min {
min = ind
}
}
if min <= 0 {
return s
}
for i, ln := range lines {
if len(ln) >= min {
lines[i] = ln[min:]
} else {
lines[i] = strings.TrimLeft(ln, " \t")
}
}
return strings.Join(lines, "\n")
}
type ScriptBuilder struct {
textBuilder strings.Builder
}
func (s *ScriptBuilder) WriteComment(format string, a ...any) {
var c strings.Builder
scanner := bufio.NewScanner(strings.NewReader(fmt.Sprintf(format, a...)))
for scanner.Scan() {
c.WriteString("# ")
c.WriteString(scanner.Text())
c.WriteByte('\n')
}
fmt.Fprint(&s.textBuilder, c.String())
}
func (s *ScriptBuilder) WriteCommentSection(format string, a ...any) {
s.WriteBlock("\n\n#" + strings.Repeat("=", 99))
s.WriteComment(format, a...)
s.WriteBlock("#" + strings.Repeat("=", 99) + "\n")
}
func (s *ScriptBuilder) WriteBlock(format string, a ...any) {
fmt.Fprintln(&s.textBuilder, fmt.Sprintf(dedent(format), a...))
}
func (s *ScriptBuilder) String() string {
return s.textBuilder.String()
}
type BashConverter struct {
action Action
scriptBuilder ScriptBuilder
githubVars githubVars
}
func NewBashConverter(action Action) *BashConverter {
githubVars := make(map[string]githubVar)
for _, v := range []githubVar{
newGithubVar("runner.arch", "X86_64"),
newGithubVar("github.action_path", "../../"),
newGithubVar("inputs.packages", "xdot,rolldice"),
newGithubVar("inputs.version", "0"),
newGithubVar("inputs.global_version", ""),
newGithubVar("inputs.execute_install_scripts", "false"),
newGithubVar("inputs.refresh", "false"),
newGithubVar("inputs.debug", "false"),
} {
githubVars[v.name] = v
}
return &BashConverter{
action: action,
scriptBuilder: ScriptBuilder{},
githubVars: githubVars,
}
}
func (b *BashConverter) Convert() string {
b.handleAction()
return b.scriptBuilder.String()
}
func (b *BashConverter) convertShellLines(step Step, lines string) string {
var result []string
scanner := bufio.NewScanner(strings.NewReader(lines))
for scanner.Scan() {
converted := b.convertShellLine(step, scanner.Text())
result = append(result, converted)
}
return strings.Join(result, "\n")
}
// echo\s+
func (b *BashConverter) convertShellLine(step Step, line string) string {
line = b.githubVars.convert(line)
env_pattern := `^\s*echo\s+"([\w\-_]+)=(.*)"\s*>>\s*.*GITHUB_ENV.*`
env_re := regexp.MustCompile(env_pattern)
if m := env_re.FindStringSubmatch(line); m != nil {
return fmt.Sprintf(`GH_ENV_%s="%s"`, convertToShellVar(m[1]), b.githubVars.convert(m[2]))
}
out_pattern := `^\s*echo\s+"([\w\-_]+)=(.*)"\s*>>\s*.*GITHUB_OUTPUT.*`
out_re := regexp.MustCompile(out_pattern)
if m := out_re.FindStringSubmatch(line); m != nil {
return fmt.Sprintf(
`GH_OUTPUT_%s_%s="%s"`,
convertToShellVar(step.ID),
convertToShellVar(m[1]),
b.githubVars.convert(m[2]),
)
}
return line
}
func (b *BashConverter) handleExternalAction(step Step) {
handlers := map[string]func(){
"actions/cache/restore@v4": func() {
path := b.convertShellLine(step, step.With["path"])
key := b.convertShellLine(step, step.With["key"])
shellVarPrefix := "STEP_" + convertToShellVar(step.ID) + "_WITH"
pathVar := fmt.Sprintf("%s_PATH", shellVarPrefix)
keyVar := fmt.Sprintf("%s_KEY", shellVarPrefix)
b.scriptBuilder.WriteBlock(`
%s="%s"
%s="%s"
if [[ -d "${%s}" ]]; then
OUTPUT_CACHE_HIT=true
else
OUTPUT_CACHE_HIT=false
mkdir "${%s}"
fi
`, pathVar, path, keyVar, key, key, key)
},
}
if handlers[step.Uses] != nil {
handlers[step.Uses]()
}
b.scriptBuilder.WriteComment("NO HANDLER FOUND for %s", step.Uses)
}
func convertToShellVar(name string) string {
return strings.ToUpper(strings.ReplaceAll(strings.ReplaceAll(name, ".", "_"), "-", "_"))
}
type githubVar struct {
name string
shellName string
shellVal string
}
func newGithubVar(name, shellVal string) githubVar {
return githubVar{
name: name,
shellName: convertToShellVar(name),
shellVal: shellVal,
}
}
type githubVars map[string]githubVar
func (v *githubVars) convert(line string) string {
// Build pattern to match ${{ var }} style variables
// The pattern matches any known github variable name
names := make([]string, 0, len(*v))
for name := range *v {
names = append(names, regexp.QuoteMeta(name))
}
pattern := fmt.Sprintf(`\${{[[:space:]]*(%s)[[:space:]]*}}`, strings.Join(names, "|"))
re := regexp.MustCompile(pattern)
return re.ReplaceAllStringFunc(line, func(match string) string {
// Extract the variable name from between ${{ and }}
varName := re.FindStringSubmatch(match)[1]
if gvar, ok := (*v)[varName]; ok {
// If the variable exists, replace with ${SHELL_VAR}
return fmt.Sprintf("${%s}", gvar.shellName)
}
// If variable not found, return original text
return match
})
}
func (b *BashConverter) handleAction() {
b.scriptBuilder.WriteBlock("#!/bin/bash\n")
b.scriptBuilder.WriteBlock(strings.Repeat("#", 100) + "\n#")
b.scriptBuilder.WriteComment("%s", b.action.ShortString())
b.scriptBuilder.WriteBlock(strings.Repeat("#", 100) + "\n")
b.scriptBuilder.WriteBlock("set -e\n")
for _, v := range b.githubVars {
if v.shellVal != "" {
b.scriptBuilder.WriteBlock(`%s="%s"`, v.shellName, v.shellVal)
}
}
for _, step := range b.action.Runs.Steps {
if step.ID != "" {
b.scriptBuilder.WriteCommentSection("Step ID: %s", step.ID)
} else {
b.scriptBuilder.WriteCommentSection("Step ID: n/a")
}
if step.Uses != "" {
b.handleExternalAction(step)
}
if len(step.Env) > 0 {
for k, v := range step.Env {
b.scriptBuilder.WriteBlock(
`STEP_%s_ENV_%s="%s"`,
convertToShellVar(step.ID),
convertToShellVar(k),
b.githubVars.convert(v),
)
}
}
if step.Shell != "" && step.Shell != "bash" {
b.scriptBuilder.WriteComment(
"Note: Original shell was %q, but this script uses bash.\n",
step.Shell,
)
}
if step.Run != "" {
b.scriptBuilder.WriteBlock("%s\n", b.convertShellLines(step, step.Run))
}
}
// b.scriptBuilder.WriteBlock(`
// #!/bin/bash
// set -e
// `)
}

View file

@ -1,30 +0,0 @@
package main
import (
"fmt"
"log"
"os"
)
func Foo(action Action) {
converter := NewBashConverter(action)
bashScript := converter.Convert()
fmt.Println(bashScript)
const out = "action.sh"
if err := os.WriteFile(out, []byte(bashScript), 0o755); err != nil {
fmt.Println("write error:", err)
os.Exit(1)
}
fmt.Printf("Wrote script to %s\n", out)
}
func main() {
action, err := Parse("../../action.yml")
if err != nil {
log.Fatal(err)
}
txt, err := ParseAndGetAst(action)
fmt.Println(txt)
}

View file

@ -1,236 +0,0 @@
package main
import (
"fmt"
"os"
"strings"
"gopkg.in/yaml.v3"
)
const indentSize = 2
// Action represents the GitHub Action configuration structure
type Action struct {
Name string `yaml:"name"`
Description string `yaml:"description"`
Author string `yaml:"author"`
Branding Branding `yaml:"branding"`
Inputs Inputs `yaml:"inputs"`
Outputs Outputs `yaml:"outputs"`
Runs Runs `yaml:"runs"`
}
// Branding represents the action's branding configuration
type Branding struct {
Icon string `yaml:"icon"`
Color string `yaml:"color"`
}
// Inputs represents all input parameters for the action
type Inputs struct {
Packages Input `yaml:"packages"`
Version Input `yaml:"version"`
ExecuteInstallScripts Input `yaml:"execute_install_scripts"`
Refresh Input `yaml:"refresh"`
Debug Input `yaml:"debug"`
}
// Input represents a single input parameter configuration
type Input struct {
Description string `yaml:"description"`
Required bool `yaml:"required"`
Default string `yaml:"default"`
DeprecationMessage string `yaml:"deprecationMessage,omitempty"`
}
// Outputs represents all output parameters from the action
type Outputs struct {
CacheHit Output `yaml:"cache-hit"`
PackageVersionList Output `yaml:"package-version-list"`
AllPackageVersionList Output `yaml:"all-package-version-list"`
}
// Output represents a single output parameter configuration
type Output struct {
Description string `yaml:"description"`
Value string `yaml:"value"`
}
// Runs represents the action's execution configuration
type Runs struct {
Using string `yaml:"using"`
Env map[string]string `yaml:"env"`
Steps []Step `yaml:"steps"`
}
// Step represents a single step in the action's execution
type Step struct {
ID string `yaml:"id"`
Uses string `yaml:"uses"`
With map[string]string `yaml:"with"`
Shell string `yaml:"shell"`
Run string `yaml:"run"`
Env map[string]string `yaml:"env"`
}
// String implements fmt.Stringer for Action
func (a Action) String() string {
var b strings.Builder
b.WriteString(a.ShortString())
b.WriteString("\nRuns:\n")
b.WriteString(indent(a.Runs.String(), 1))
return b.String()
}
// ShortString implements fmt.Stringer for Action but with runs trimmed out
func (a Action) ShortString() string {
var b strings.Builder
b.WriteString(fmt.Sprintf("Name: %s\n", a.Name))
b.WriteString(fmt.Sprintf("Description: %s\n", a.Description))
b.WriteString(fmt.Sprintf("Author: %s\n", a.Author))
b.WriteString("\nBranding:\n")
b.WriteString(indent(a.Branding.String(), 1))
b.WriteString("\nInputs:\n")
b.WriteString(indent(a.Inputs.String(), 1))
b.WriteString("\nOutputs:\n")
b.WriteString(indent(a.Outputs.String(), 1))
return b.String()
}
// String implements fmt.Stringer for Branding
func (b Branding) String() string {
return fmt.Sprintf("Icon: %s\nColor: %s", b.Icon, b.Color)
}
// String implements fmt.Stringer for Inputs
func (i Inputs) String() string {
var b strings.Builder
b.WriteString("Packages:\n")
b.WriteString(indent(i.Packages.String(), 1))
b.WriteString("Version:\n")
b.WriteString(indent(i.Version.String(), 1))
b.WriteString("Execute Install Scripts:\n")
b.WriteString(indent(i.ExecuteInstallScripts.String(), 1))
b.WriteString("Refresh:\n")
b.WriteString(indent(i.Refresh.String(), 1))
b.WriteString("Debug:\n")
b.WriteString(indent(i.Debug.String(), 1))
return b.String()
}
// String implements fmt.Stringer for Input
func (i Input) String() string {
var b strings.Builder
b.WriteString(fmt.Sprintf("Description: %s\n", i.Description))
b.WriteString(fmt.Sprintf("Required: %v\n", i.Required))
b.WriteString(fmt.Sprintf("Default: %s", i.Default))
if i.DeprecationMessage != "" {
b.WriteString(fmt.Sprintf("\nDeprecation Message: %s", i.DeprecationMessage))
}
return b.String()
}
// String implements fmt.Stringer for Outputs
func (o Outputs) String() string {
var b strings.Builder
b.WriteString("Cache Hit:\n")
b.WriteString(indent(o.CacheHit.String(), 1))
b.WriteString("Package Version List:\n")
b.WriteString(indent(o.PackageVersionList.String(), 1))
b.WriteString("All Package Version List:\n")
b.WriteString(indent(o.AllPackageVersionList.String(), 1))
return b.String()
}
// String implements fmt.Stringer for Output
func (o Output) String() string {
return fmt.Sprintf("Description: %s\nValue: %s", o.Description, o.Value)
}
// String implements fmt.Stringer for Runs
func (r Runs) String() string {
var b strings.Builder
b.WriteString(fmt.Sprintf("Using: %s\n", r.Using))
b.WriteString("Environment:\n")
for k, v := range r.Env {
b.WriteString(indent(fmt.Sprintf("%s: %s\n", k, v), 1))
}
b.WriteString("Steps:\n")
for _, step := range r.Steps {
b.WriteString(indent(step.String()+"\n", 1))
}
return b.String()
}
// String implements fmt.Stringer for Step
func (s Step) String() string {
var b strings.Builder
if s.ID != "" {
b.WriteString(fmt.Sprintf("ID: %s\n", s.ID))
}
if len(s.With) > 0 {
b.WriteString("With:\n")
for k, v := range s.With {
b.WriteString(fmt.Sprintf("%s: %s\n", k, v))
}
}
if s.Shell != "" {
b.WriteString(fmt.Sprintf("Shell: %s\n", s.Shell))
}
if s.Run != "" {
b.WriteString(fmt.Sprintf("Run:\n%s", indent(s.Run, 1)))
}
return strings.TrimSuffix(b.String(), "\n")
}
// indent adds the specified number of indentation levels to each line of the input string
func indent(s string, level int) string {
if s == "" {
return s
}
prefix := strings.Repeat(" ", level*indentSize)
lines := strings.Split(s, "\n")
for i, line := range lines {
if line != "" {
lines[i] = prefix + line
}
}
return strings.Join(lines, "\n") + "\n"
}
func Parse(yamlFilePath string) (Action, error) {
// Read the action.yml file
data, err := os.ReadFile(yamlFilePath)
if err != nil {
return Action{}, fmt.Errorf("Error reading %s: %v", yamlFilePath, err)
}
// Parse the YAML into our Action struct
var action Action
if err := yaml.Unmarshal(data, &action); err != nil {
return Action{}, fmt.Errorf("Error parsing YAML: %v", err)
}
return action, nil
}

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

3
go.mod
View file

@ -4,11 +4,10 @@ go 1.24
require (
github.com/awalsh128/syspkg v0.1.5
github.com/sethvargo/go-githubactions v1.3.1
github.com/stretchr/testify v1.11.0
)
require mvdan.cc/sh/v3 v3.12.0 // indirect
require (
github.com/davecgh/go-spew v1.1.1 // indirect
github.com/pmezard/go-difflib v1.0.0 // indirect

4
go.sum
View file

@ -4,11 +4,11 @@ github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
github.com/sethvargo/go-githubactions v1.3.1 h1:rlwwLRUaunWLQ1aN2o5Y+3s0xhaTC30YObCnilRx448=
github.com/sethvargo/go-githubactions v1.3.1/go.mod h1:7/4WeHgYfSz9U5vwuToCK9KPnELVHAhGtRwLREOQV80=
github.com/stretchr/testify v1.11.0 h1:ib4sjIrwZKxE5u/Japgo/7SJV3PvgjGiRNAvTVGqQl8=
github.com/stretchr/testify v1.11.0/go.mod h1:wZwfW3scLgRK+23gO65QZefKpKQRnfz6sD981Nm4B6U=
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405 h1:yhCVgyC4o1eVCa2tZl7eS0r+SDo693bJlVdllGtEeKM=
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA=
gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
mvdan.cc/sh/v3 v3.12.0 h1:ejKUR7ONP5bb+UGHGEG/k9V5+pRVIyD+LsZz7o8KHrI=
mvdan.cc/sh/v3 v3.12.0/go.mod h1:Se6Cj17eYSn+sNooLZiEUnNNmNxg0imoYlTu4CyaGyg=

18
internal/cache/doc.go vendored Normal file
View file

@ -0,0 +1,18 @@
// Package cache provides functionality for managing APT package cache keys.
// It handles the creation, serialization, and validation of cache keys that uniquely
// identify sets of packages for caching in GitHub Actions.
//
// Example usage:
//
// // Create a new cache key
// key := cache.NewKey(packages, "v1.0", "v2", "amd64")
//
// // Write the key to files
// err := key.Write("key.txt", "key.md5")
// if err != nil {
// log.Fatal(err)
// }
//
// // Read and validate a key
// plaintext, hash, err := cache.ReadKey("key.txt", "key.md5")
package cache

31
internal/cache/key.go vendored
View file

@ -1,24 +1,7 @@
// Package cache provides functionality for managing APT package cache keys.
// It handles the creation, serialization, and validation of cache keys that uniquely
// identify sets of packages for caching in GitHub Actions.
//
// Example usage:
//
// // Create a new cache key
// key := cache.NewKey(packages, "v1.0", "v2", "amd64")
//
// // Write the key to files
// err := key.Write("key.txt", "key.md5")
// if err != nil {
// log.Fatal(err)
// }
//
// // Read and validate a key
// plaintext, hash, err := cache.ReadKey("key.txt", "key.md5")
package cache
import (
"crypto/md5"
"crypto/sha256"
"encoding/json"
"fmt"
"os"
@ -93,13 +76,13 @@ func (k Key) String() string {
k.packages.String(), k.version, k.globalVersion, k.osArch)
}
// Hash generates a deterministic MD5 hash of the key's contents.
// Hash generates a deterministic SHA256 hash of the key's contents.
// This hash is used as the actual cache key for storage and lookup.
//
// Note: MD5 is used here for speed and determinism, not cryptographic security.
// Note: SHA256 is used here for better collision resistance and security.
// The hash is based on the string representation to ensure consistency.
func (k Key) Hash() []byte {
hash := md5.Sum([]byte(k.String()))
hash := sha256.Sum256([]byte(k.String()))
return hash[:]
}
@ -214,13 +197,13 @@ func ReadKey(plaintextPath, hashPath string) (plaintext string, hash []byte, err
}
// Validate hash length
if len(storedHash) != md5.Size {
if len(storedHash) != sha256.Size {
return "", nil, fmt.Errorf("invalid hash length in %s: got %d bytes, want %d",
hashPath, len(storedHash), md5.Size)
hashPath, len(storedHash), sha256.Size)
}
// Verify hash matches plaintext
computedHash := md5.Sum(plaintextBytes)
computedHash := sha256.Sum256(plaintextBytes)
if string(computedHash[:]) != string(storedHash) {
return "", nil, fmt.Errorf("hash mismatch: stored hash does not match plaintext content")
}

3
internal/cio/doc.go Normal file
View file

@ -0,0 +1,3 @@
// Package cio provides common I/O operations for the application,
// including JSON serialization, console stream capturing, and file handling.
package cio

View file

@ -1,5 +1,3 @@
// Package cio provides common I/O operations for the application,
// including JSON serialization, console stream capturing, and file handling.
package cio
import (

5
internal/logging/doc.go Normal file
View file

@ -0,0 +1,5 @@
// Package logging provides enhanced logging functionality for the application.
// It wraps the standard log package with additional features like debug logging,
// file output, and concurrent-safe operations. The package maintains a global
// logger instance with configurable output destinations.
package logging

View file

@ -1,7 +1,3 @@
// Package logging provides enhanced logging functionality for the application.
// It wraps the standard log package with additional features like debug logging,
// file output, and concurrent-safe operations. The package maintains a global
// logger instance with configurable output destinations.
package logging
import (
@ -22,8 +18,10 @@ type loggerWrapper struct {
// When true, Debug() calls will output messages; when false, they are ignored.
var DebugEnabled = false
var loggerMu sync.Mutex // Protects logger operations
var logger = createDefault()
var (
loggerMu sync.Mutex // Protects logger operations
logger = createDefault()
)
// create instantiates a new logger with the specified output writers.
// Multiple writers can be provided to output logs to multiple destinations.

4
internal/pkgs/doc.go Normal file
View file

@ -0,0 +1,4 @@
// Package pkgs provides package management functionality using APT.
// It handles APT package operations including installation, querying,
// and dependency resolution for caching workflows.
package pkgs

View file

@ -1,4 +1,3 @@
// Package pkgs provides package management functionality using APT.
package pkgs
import (

View file

@ -1,4 +1,3 @@
// Package testing provides utilities for testing, including capturing standard output and error.
package testing
import (

2
internal/testing/doc.go Normal file
View file

@ -0,0 +1,2 @@
// Package testing provides utilities for testing, including capturing standard output and error.
package testing

View file

@ -1,109 +0,0 @@
#!/bin/bash
#==============================================================================
# distribute.sh
#==============================================================================
#
# DESCRIPTION:
# Manages distribution of compiled binaries for different architectures.
# Handles building, pushing, and retrieving binary paths for GitHub Actions.
#
# USAGE:
# ./scripts/distribute.sh [OPTIONS] <command> [architecture]
#
# COMMANDS:
# push - Build and push all architecture binaries to dist directory
# getbinpath [ARCH] - Get binary path for specified architecture
#
# ARCHITECTURES:
# X86, X64, ARM, ARM64 - GitHub runner architectures
#
# OPTIONS:
# -v, --verbose Enable verbose output
# -h, --help Show this help message
#==============================================================================
source "$(git rev-parse --show-toplevel)/scripts/lib.sh"
parse_common_args "$@" >/dev/null # prevent return from echo'ng
CMD="$1"
RUNNER_ARCH="$2"
BUILD_DIR="${PROJECT_ROOT}/dist"
# GitHub runner.arch values to GOARCH values
# https://github.com/github/docs/blob/main/data/reusables/actions/runner-arch-description.md
# https://github.com/golang/go/blob/master/src/internal/syslist/syslist.go
declare -A rarch_to_goarch=(
["X86"]="386"
["X64"]="amd64"
["ARM"]="arm"
["ARM64"]="arm64"
)
function push() {
rm -fr "${BUILD_DIR}"
mkdir -p "${BUILD_DIR}"
# Package name
PACKAGE_NAME="cache-apt-pkgs"
# Print the build plan
echo "Building for these architectures:"
for arch in "${!rarch_to_goarch[@]}"; do
echo " - Linux/${arch} (GOARCH=${rarch_to_goarch[${arch}]})"
done
echo
# Build for each architecture
local binary_name
for runner_arch in "${!rarch_to_goarch[@]}"; do
go_arch="${rarch_to_goarch[${runner_arch}]}"
binary_name="${BUILD_DIR}/${PACKAGE_NAME}-linux-${go_arch}"
echo "Building ${binary_name} for Linux/${runner_arch} (GOARCH=${go_arch})..."
# Build the binary
GOOS=linux GOARCH=${go_arch} go build -v \
-o "${binary_name}" \
"${PROJECT_ROOT}/cmd/cache_apt_pkgs"
echo "✓ Build ${PACKAGE_NAME}-linux-${go_arch}"
done
echo "All builds completed!"
}
function getbinpath() {
local runner_arch=$1
if [[ -z ${runner_arch} ]]; then
fail "runner architecture not provided"
fi
local go_arch="${rarch_to_goarch[${runner_arch}]}"
if [[ -z ${go_arch} ]]; then
fail "invalid runner architecture: ${runner_arch}"
fi
local binary_name="${BUILD_DIR}/cache-apt-pkgs-linux-${go_arch}"
if [[ ! -f ${binary_name} ]]; then
fail "binary not found: ${binary_name} (did you run 'push' first?)"
fi
echo "${binary_name}"
}
case ${CMD} in
push)
push
;;
getbinpath)
getbinpath "${RUNNER_ARCH}"
;;
"")
fail "command not provided"
;;
*)
fail "invalid command: ${CMD}"
;;
esac

View file

@ -1,106 +0,0 @@
#!/bin/bash
#==============================================================================
# distribute_test.sh
#==============================================================================
#
# DESCRIPTION:
# Test suite for distribute.sh. Validates command handling, binary creation,
# architecture-specific output, and error conditions for the distribution
# script.
#
# USAGE:
# distribute_test.sh [OPTIONS]
#
# OPTIONS:
# -v, --verbose Enable verbose test output
# --stop-on-failure Stop on first test failure
# -h, --help Show this help message
#
#==============================================================================
# Source the test framework, exports SCRIPT_PATH
source "$(git rev-parse --show-toplevel)/scripts/tests/test_lib.sh"
DIST_DIR="$(get_project_root)/dist"
# Define test functions
run_tests() {
# Disable exit-on-error during test execution to prevent early exit
set +e
test_section "command validation"
test_case "no command" \
"" \
"command not provided" \
false
test_case "invalid command" \
"invalid_cmd" \
"invalid command" \
false
test_section "getbinpath"
test_case "getbinpath no arch" \
"getbinpath" \
"runner architecture not provided" \
false
test_case "getbinpath invalid arch" \
"getbinpath INVALID" \
"invalid runner architecture" \
false
test_section "push and binary creation"
test_case "push command" \
"push" \
"All builds completed!" \
true # Ensure test doesn't cause script exit
# Test binary existence using direct shell commands instead of test_case
# because the distribute script doesn't have a 'test' command
for arch in "X86:386" "X64:amd64" "ARM:arm" "ARM64:arm64"; do
go_arch=${arch#*:}
test_file_exists "file exists for ${go_arch}" "${DIST_DIR}/cache-apt-pkgs-linux-${go_arch}"
done
# Test getbinpath for each architecture
for arch in "X86:386" "X64:amd64" "ARM:arm" "ARM64:arm64"; do
runner_arch=${arch%:*}
go_arch=${arch#*:}
test_case "getbinpath for ${runner_arch}" \
"getbinpath ${runner_arch}" \
"${DIST_DIR}/cache-apt-pkgs-linux-${go_arch}" \
true
done
test_section "cleanup and rebuild"
# Direct cleanup
rm -rf "${DIST_DIR}" 2>/dev/null
test_case "getbinpath after cleanup" \
"getbinpath X64" \
"binary not found" \
false
test_case "rebuild after cleanup" \
"push" \
"All builds completed!" \
true
test_case "getbinpath after rebuild" \
"getbinpath X64" \
"${DIST_DIR}/cache-apt-pkgs-linux-amd64" \
true
# Re-enable exit-on-error
set -e
}
# Start the test framework and run tests
start_tests "$@"
run_tests