- Added CLAUDE.md guidance with preferences.

- Refactored README.md
- Added workflows for version export and management.
- Removed src directory, following Go best practices
- Added COMMANDS.md documentation

Saving the AI semi-slop for now with broken states to get a snapshot.
Too lazy to setup another chained repo.
This commit is contained in:
awalsh128 2025-08-29 17:30:25 -07:00
parent aeeea6da9b
commit 07366a6d1e
58 changed files with 4646 additions and 1007 deletions

3
.actrc Normal file
View file

@ -0,0 +1,3 @@
-P ubuntu-latest=catthehacker/ubuntu:act-latest
--env-file=.env.local
-s GITHUB_TOKEN

1
.env Normal file
View file

@ -0,0 +1 @@
GO111MODULE=auto

View file

@ -21,14 +21,14 @@ on:
- cmd/** # Only when action code changes
- internal/** # Only when action code changes
- action.yml
- .github/workflows/test-action.yml
- .github/workflows/action_tests.yml
pull_request:
branches: [dev-v2.0] # Test on PRs to dev branch
paths:
- cmd/** # Only when action code changes
- internal/** # Only when action code changes
- action.yml
- .github/workflows/test-action.yml
- .github/workflows/action_tests.yml
# Environment configuration
env:

View file

@ -3,6 +3,7 @@ name: CI
on:
push:
branches: [dev-v2.0]
tags: ['v*'] # Trigger on version tags
pull_request:
branches: [dev-v2.0]
schedule:
@ -26,19 +27,6 @@ jobs:
steps:
- uses: actions/checkout@v4
- name: Install golangci-lint
uses: golangci/golangci-lint-action@v4
with:
version: latest
- name: Golang Lint
run: golangci-lint run
- name: trunk.io Lint
uses: trunk-io/trunk-action@v1
with:
arguments: check
- name: Install Go
uses: actions/setup-go@v5
with:
@ -50,6 +38,24 @@ jobs:
- name: Build
run: go build -v ./...
- name: Check file encodings
run: |
./scripts/check_utf8.sh
- name: Check file encoding changes
id: git-check
run: |
if [[ -n "$(git status --porcelain)" ]]; then
echo "::error::Some files are not in UTF-8 encoding. Please run ./scripts/check_utf8.sh locally and commit the changes."
git status --porcelain
exit 1
fi
- name: trunk.io Lint
uses: trunk-io/trunk-action@v1
with:
arguments: check
- name: Test with coverage
run: go test -v -race -coverprofile=coverage.txt -covermode=atomic ./...
@ -60,3 +66,53 @@ jobs:
files: ./coverage.txt
fail_ci_if_error: true
validate-scripts:
runs-on: ubuntu-latest
needs: build
steps:
- uses: actions/checkout@v4
- name: Check script syntax
run: |
for script in scripts/*.sh; do
echo "Checking syntax for $script"
bash -n "$script"
done
- name: Check scripts are executable
run: |
for script in scripts/*.sh; do
if [[ ! -x "$script" ]]; then
echo "::error::Script $script is not executable. Run 'chmod +x $script' and commit the changes."
exit 1
fi
done
- name: Check menu integration
run: |
echo "Checking if all scripts are integrated in menu.sh..."
for script in scripts/*.sh; do
# Skip menu.sh itself
if [[ "$(basename "$script")" == "menu.sh" ]]; then
continue
fi
# Look for the script path in menu.sh
if ! grep -q "\".*$(basename "$script")\"" scripts/menu.sh; then
echo "::error::Script $(basename "$script") is not integrated in menu.sh"
exit 1
fi
done
- name: Run script tests
run: |
if [[ -d "scripts/tests" ]]; then
for test in scripts/tests/*_test.sh; do
if [[ -f "$test" ]]; then
echo "Running test: $test"
bash "$test"
fi
done
fi

23
.github/workflows/release.yml vendored Normal file
View file

@ -0,0 +1,23 @@
name: Release Version
on:
push:
tags: ['v2.*'] # Trigger on version tags >= 2.0.0
workflow_dispatch:
inputs:
version:
description: 'Version tag to update on pkg.go.dev (e.g. v2.0.0)'
required: true
type: string
jobs:
update-pkg-go-dev:
runs-on: ubuntu-latest
steps:
- name: Update pkg.go.dev
run: |
VERSION=${{ github.ref_name || github.event.inputs.version }}
echo "Updating pkg.go.dev for version $VERSION"
curl -i https://proxy.golang.org/github.com/awalsh128/cache-apt-pkgs-action/@v/$VERSION.info
# Trigger a package load
GOPROXY=https://proxy.golang.org GO111MODULE=on go get github.com/awalsh128/cache-apt-pkgs-action@$VERSION

9
.gitignore vendored
View file

@ -1 +1,8 @@
src/cmd/apt_query/apt_query*
src/cmd/apt_query/apt_query*
# Environment files
.env.local
.env.*.local
# Don't ignore the main .env file
!.env

View file

@ -1,462 +0,0 @@
# This file is licensed under the terms of the MIT license https://opensource.org/license/mit
# Copyright (c) 2021-2025 Marat Reymers
## Golden config for golangci-lint v2.4.0
#
# This is the best config for golangci-lint based on my experience and opinion.
# It is very strict, but not extremely strict.
# Feel free to adapt it to suit your needs.
# If this config helps you, please consider keeping a link to this file (see the next comment).
# Based on https://gist.github.com/maratori/47a4d00457a92aa426dbd48a18776322
version: "2"
issues:
# Maximum count of issues with the same text.
# Set to 0 to disable.
# Default: 3
max-same-issues: 50
formatters:
enable:
- goimports # checks if the code and import statements are formatted according to the 'goimports' command
- golines # checks if code is formatted, and fixes long lines
- gci # checks if code and import statements are formatted, with additional rules
- gofmt # checks if the code is formatted according to 'gofmt' command
- gofumpt # enforces a stricter format than 'gofmt', while being backwards compatible
- swaggo # formats swaggo comments
# All settings can be found here https://github.com/golangci/golangci-lint/blob/HEAD/.golangci.reference.yml
settings:
goimports:
# A list of prefixes, which, if set, checks import paths
# with the given prefixes are grouped after 3rd-party packages.
# Default: []
local-prefixes:
- github.com/awalsh128/cache-apt-pkgs
golines:
# Target maximum line length.
# Default: 100
max-len: 100
linters:
enable:
- asasalint # checks for pass []any as any in variadic func(...any)
- asciicheck # checks that your code does not contain non-ASCII identifiers
- bidichk # checks for dangerous unicode character sequences
- bodyclose # checks whether HTTP response body is closed successfully
- canonicalheader # checks whether net/http.Header uses canonical header
- govet # examines Go source code and reports suspicious constructs
- staticcheck # comprehensive checks for Go programs
- errcheck # checks for unchecked errors
- ineffassign # detects ineffective assignments
- gocritic # provides deep code analysis
- copyloopvar # detects places where loop variables are copied (Go 1.22+)
- cyclop # checks function and package cyclomatic complexity
- depguard # checks if package imports are in a list of acceptable packages
- dupl # tool for code clone detection
- durationcheck # checks for two durations multiplied together
- embeddedstructfieldcheck # checks embedded types in structs
- errcheck # checking for unchecked errors, these unchecked errors can be critical bugs in some cases
- errname # checks that sentinel errors are prefixed with the Err and error types are suffixed with the Error
- errorlint # finds code that will cause problems with the error wrapping scheme introduced in Go 1.13
- exhaustive # checks exhaustiveness of enum switch statements
- exptostd # detects functions from golang.org/x/exp/ that can be replaced by std functions
- fatcontext # detects nested contexts in loops
- forbidigo # forbids identifiers
- funcorder # checks the order of functions, methods, and constructors
- funlen # tool for detection of long functions
- gocheckcompilerdirectives # validates go compiler directive comments (//go:)
- gochecknoglobals # checks that no global variables exist
- gochecknoinits # checks that no init functions are present in Go code
- gochecksumtype # checks exhaustiveness on Go "sum types"
- gocognit # computes and checks the cognitive complexity of functions
- goconst # finds repeated strings that could be replaced by a constant
- gocritic # provides diagnostics that check for bugs, performance and style issues
- gocyclo # computes and checks the cyclomatic complexity of functions
- godot # checks if comments end in a period
- gomoddirectives # manages the use of 'replace', 'retract', and 'excludes' directives in go.mod
- goprintffuncname # checks that printf-like functions are named with f at the end
- gosec # inspects source code for security problems
- govet # reports suspicious constructs, such as Printf calls whose arguments do not align with the format string
- iface # checks the incorrect use of interfaces, helping developers avoid interface pollution
- ineffassign # detects when assignments to existing variables are not used
- intrange # finds places where for loops could make use of an integer range
- loggercheck # checks key value pairs for common logger libraries (kitlog,klog,logr,zap)
- makezero # finds slice declarations with non-zero initial length
- mirror # reports wrong mirror patterns of bytes/strings usage
- mnd # detects magic numbers
- musttag # enforces field tags in (un)marshaled structs
- nakedret # finds naked returns in functions greater than a specified function length
- nestif # reports deeply nested if statements
- nilerr # finds the code that returns nil even if it checks that the error is not nil
- nilnesserr # reports that it checks for err != nil, but it returns a different nil value error (powered by nilness and nilerr)
- nilnil # checks that there is no simultaneous return of nil error and an invalid value
- noctx # finds sending http request without context.Context
- nolintlint # reports ill-formed or insufficient nolint directives
- nonamedreturns # reports all named returns
- nosprintfhostport # checks for misuse of Sprintf to construct a host with port in a URL
- perfsprint # checks that fmt.Sprintf can be replaced with a faster alternative
- predeclared # finds code that shadows one of Go's predeclared identifiers
- promlinter # checks Prometheus metrics naming via promlint
- protogetter # reports direct reads from proto message fields when getters should be used
- reassign # checks that package variables are not reassigned
- recvcheck # checks for receiver type consistency
- revive # fast, configurable, extensible, flexible, and beautiful linter for Go, drop-in replacement of golint
- rowserrcheck # checks whether Err of rows is checked successfully
- sloglint # ensure consistent code style when using log/slog
- spancheck # checks for mistakes with OpenTelemetry/Census spans
- sqlclosecheck # checks that sql.Rows and sql.Stmt are closed
- staticcheck # is a go vet on steroids, applying a ton of static analysis checks
- testableexamples # checks if examples are testable (have an expected output)
- testifylint # checks usage of github.com/stretchr/testify
- testpackage # makes you use a separate _test package
- tparallel # detects inappropriate usage of t.Parallel() method in your Go test codes
- unconvert # removes unnecessary type conversions
- unparam # reports unused function parameters
- unused # checks for unused constants, variables, functions and types
- usestdlibvars # detects the possibility to use variables/constants from the Go standard library
- usetesting # reports uses of functions with replacement inside the testing package
- wastedassign # finds wasted assignment statements
- whitespace # detects leading and trailing whitespace
## you may want to enable
#- arangolint # opinionated best practices for arangodb client
#- decorder # checks declaration order and count of types, constants, variables and functions
#- exhaustruct # [highly recommend to enable] checks if all structure fields are initialized
#- ginkgolinter # [if you use ginkgo/gomega] enforces standards of using ginkgo and gomega
#- godox # detects usage of FIXME, TODO and other keywords inside comments
#- goheader # checks is file header matches to pattern
#- inamedparam # [great idea, but too strict, need to ignore a lot of cases by default] reports interfaces with unnamed method parameters
#- interfacebloat # checks the number of methods inside an interface
#- ireturn # accept interfaces, return concrete types
#- noinlineerr # disallows inline error handling `if err := ...; err != nil {`
#- prealloc # [premature optimization, but can be used in some cases] finds slice declarations that could potentially be preallocated
#- tagalign # checks that struct tags are well aligned
#- varnamelen # [great idea, but too many false positives] checks that the length of a variable's name matches its scope
#- wrapcheck # checks that errors returned from external packages are wrapped
#- zerologlint # detects the wrong usage of zerolog that a user forgets to dispatch zerolog.Event
## disabled
#- containedctx # detects struct contained context.Context field
#- contextcheck # [too many false positives] checks the function whether use a non-inherited context
#- dogsled # checks assignments with too many blank identifiers (e.g. x, _, _, _, := f())
#- dupword # [useless without config] checks for duplicate words in the source code
#- err113 # [too strict] checks the errors handling expressions
#- errchkjson # [don't see profit + I'm against of omitting errors like in the first example https://github.com/breml/errchkjson] checks types passed to the json encoding functions. Reports unsupported types and optionally reports occasions, where the check for the returned error can be omitted
#- forcetypeassert # [replaced by errcheck] finds forced type assertions
#- gomodguard # [use more powerful depguard] allow and block lists linter for direct Go module dependencies
#- gosmopolitan # reports certain i18n/l10n anti-patterns in your Go codebase
#- grouper # analyzes expression groups
#- importas # enforces consistent import aliases
#- lll # [replaced by golines] reports long lines
#- maintidx # measures the maintainability index of each function
#- misspell # [useless] finds commonly misspelled English words in comments
#- nlreturn # [too strict and mostly code is not more readable] checks for a new line before return and branch statements to increase code clarity
#- paralleltest # [too many false positives] detects missing usage of t.Parallel() method in your Go test
#- tagliatelle # checks the struct tags
#- thelper # detects golang test helpers without t.Helper() call and checks the consistency of test helpers
#- wsl # [too strict and mostly code is not more readable] whitespace linter forces you to use empty lines
#- wsl_v5 # [too strict and mostly code is not more readable] add or remove empty lines
# All settings can be found here https://github.com/golangci/golangci-lint/blob/HEAD/.golangci.reference.yml
settings:
cyclop:
# The maximal code complexity to report.
# Default: 10
max-complexity: 30
# The maximal average package complexity.
# If it's higher than 0.0 (float) the check is enabled.
# Default: 0.0
package-average: 10.0
depguard:
# Rules to apply.
#
# Variables:
# - File Variables
# Use an exclamation mark `!` to negate a variable.
# Example: `!$test` matches any file that is not a go test file.
#
# `$all` - matches all go files
# `$test` - matches all go test files
#
# - Package Variables
#
# `$gostd` - matches all of go's standard library (Pulled from `GOROOT`)
#
# Default (applies if no custom rules are defined): Only allow $gostd in all files.
rules:
"deprecated":
# List of file globs that will match this list of settings to compare against.
# By default, if a path is relative, it is relative to the directory where the golangci-lint command is executed.
# The placeholder '${base-path}' is substituted with a path relative to the mode defined with `run.relative-path-mode`.
# The placeholder '${config-path}' is substituted with a path relative to the configuration file.
# Default: $all
files:
- $all
# List of packages that are not allowed.
# Entries can be a variable (starting with $), a string prefix, or an exact match (if ending with $).
# Default: []
deny:
- pkg: github.com/golang/protobuf
desc: Use google.golang.org/protobuf instead, see https://developers.google.com/protocol-buffers/docs/reference/go/faq#modules
- pkg: github.com/satori/go.uuid
desc: Use github.com/google/uuid instead, satori's package is not maintained
- pkg: github.com/gofrs/uuid$
desc: Use github.com/gofrs/uuid/v5 or later, it was not a go module before v5
"non-test files":
files:
- "!$test"
deny:
- pkg: math/rand$
desc: Use math/rand/v2 instead, see https://go.dev/blog/randv2
"non-main files":
files:
- "!**/main.go"
deny:
- pkg: log$
desc: Use log/slog instead, see https://go.dev/blog/slog
embeddedstructfieldcheck:
# Checks that sync.Mutex and sync.RWMutex are not used as embedded fields.
# Default: false
forbid-mutex: true
errcheck:
# Report about not checking of errors in type assertions: `a := b.(MyStruct)`.
# Such cases aren't reported by default.
# Default: false
check-type-assertions: true
exhaustive:
# Program elements to check for exhaustiveness.
# Default: [ switch ]
check:
- switch
- map
exhaustruct:
# List of regular expressions to match type names that should be excluded from processing.
# Anonymous structs can be matched by '<anonymous>' alias.
# Has precedence over `include`.
# Each regular expression must match the full type name, including package path.
# For example, to match type `net/http.Cookie` regular expression should be `.*/http\.Cookie`,
# but not `http\.Cookie`.
# Default: []
exclude:
# std libs
- ^net/http.Client$
- ^net/http.Cookie$
- ^net/http.Request$
- ^net/http.Response$
- ^net/http.Server$
- ^net/http.Transport$
- ^net/url.URL$
- ^os/exec.Cmd$
- ^reflect.StructField$
# public libs
- ^github.com/Shopify/sarama.Config$
- ^github.com/Shopify/sarama.ProducerMessage$
- ^github.com/mitchellh/mapstructure.DecoderConfig$
- ^github.com/prometheus/client_golang/.+Opts$
- ^github.com/spf13/cobra.Command$
- ^github.com/spf13/cobra.CompletionOptions$
- ^github.com/stretchr/testify/mock.Mock$
- ^github.com/testcontainers/testcontainers-go.+Request$
- ^github.com/testcontainers/testcontainers-go.FromDockerfile$
- ^golang.org/x/tools/go/analysis.Analyzer$
- ^google.golang.org/protobuf/.+Options$
- ^gopkg.in/yaml.v3.Node$
# Allows empty structures in return statements.
# Default: false
allow-empty-returns: true
funcorder:
# Checks if the exported methods of a structure are placed before the non-exported ones.
# Default: true
struct-method: false
funlen:
# Checks the number of lines in a function.
# If lower than 0, disable the check.
# Default: 60
lines: 100
# Checks the number of statements in a function.
# If lower than 0, disable the check.
# Default: 40
statements: 50
gochecksumtype:
# Presence of `default` case in switch statements satisfies exhaustiveness, if all members are not listed.
# Default: true
default-signifies-exhaustive: false
gocognit:
# Minimal code complexity to report.
# Default: 30 (but we recommend 10-20)
min-complexity: 20
gocritic:
# Settings passed to gocritic.
# The settings key is the name of a supported gocritic checker.
# The list of supported checkers can be found at https://go-critic.com/overview.
settings:
captLocal:
# Whether to restrict checker to params only.
# Default: true
paramsOnly: false
underef:
# Whether to skip (*x).method() calls where x is a pointer receiver.
# Default: true
skipRecvDeref: false
govet:
# Enable all analyzers.
# Default: false
enable-all: true
# Disable analyzers by name.
# Run `GL_DEBUG=govet golangci-lint run --enable=govet` to see default, all available analyzers, and enabled analyzers.
# Default: []
disable:
- fieldalignment # too strict
# Settings per analyzer.
settings:
shadow:
# Whether to be strict about shadowing; can be noisy.
# Default: false
strict: true
inamedparam:
# Skips check for interface methods with only a single parameter.
# Default: false
skip-single-param: true
mnd:
# List of function patterns to exclude from analysis.
# Values always ignored: `time.Date`,
# `strconv.FormatInt`, `strconv.FormatUint`, `strconv.FormatFloat`,
# `strconv.ParseInt`, `strconv.ParseUint`, `strconv.ParseFloat`.
# Default: []
ignored-functions:
- args.Error
- flag.Arg
- flag.Duration.*
- flag.Float.*
- flag.Int.*
- flag.Uint.*
- os.Chmod
- os.Mkdir.*
- os.OpenFile
- os.WriteFile
- prometheus.ExponentialBuckets.*
- prometheus.LinearBuckets
nakedret:
# Make an issue if func has more lines of code than this setting, and it has naked returns.
# Default: 30
max-func-lines: 0
nolintlint:
# Exclude following linters from requiring an explanation.
# Default: []
allow-no-explanation: [funlen, gocognit, golines]
# Enable to require an explanation of nonzero length after each nolint directive.
# Default: false
require-explanation: true
# Enable to require nolint directives to mention the specific linter being suppressed.
# Default: false
require-specific: true
perfsprint:
# Optimizes into strings concatenation.
# Default: true
strconcat: false
reassign:
# Patterns for global variable names that are checked for reassignment.
# See https://github.com/curioswitch/go-reassign#usage
# Default: ["EOF", "Err.*"]
patterns:
- ".*"
rowserrcheck:
# database/sql is always checked.
# Default: []
packages:
- github.com/jmoiron/sqlx
sloglint:
# Enforce not using global loggers.
# Values:
# - "": disabled
# - "all": report all global loggers
# - "default": report only the default slog logger
# https://github.com/go-simpler/sloglint?tab=readme-ov-file#no-global
# Default: ""
no-global: all
# Enforce using methods that accept a context.
# Values:
# - "": disabled
# - "all": report all contextless calls
# - "scope": report only if a context exists in the scope of the outermost function
# https://github.com/go-simpler/sloglint?tab=readme-ov-file#context-only
# Default: ""
context: scope
staticcheck:
# SAxxxx checks in https://staticcheck.dev/docs/configuration/options/#checks
# Example (to disable some checks): [ "all", "-SA1000", "-SA1001"]
# Default: ["all", "-ST1000", "-ST1003", "-ST1016", "-ST1020", "-ST1021", "-ST1022"]
checks:
- all
# Incorrect or missing package comment.
# https://staticcheck.dev/docs/checks/#ST1000
- -ST1000
# Use consistent method receiver names.
# https://staticcheck.dev/docs/checks/#ST1016
- -ST1016
# Omit embedded fields from selector expression.
# https://staticcheck.dev/docs/checks/#QF1008
- -QF1008
usetesting:
# Enable/disable `os.TempDir()` detections.
# Default: false
os-temp-dir: true
exclusions:
# Log a warning if an exclusion rule is unused.
# Default: false
warn-unused: true
# Predefined exclusion rules.
# Default: []
presets:
- std-error-handling
- common-false-positives
# Excluding configuration per-path, per-linter, per-text and per-source.
rules:
- source: "TODO"
linters: [godot]
- text: "should have a package comment"
linters: [revive]
- text: 'exported \S+ \S+ should have comment( \(or a comment on this block\))? or be unexported'
linters: [revive]
- text: 'package comment should be of the form ".+"'
source: "// ?(nolint|TODO)"
linters: [revive]
- text: 'comment on exported \S+ \S+ should be of the form ".+"'
source: "// ?(nolint|TODO)"
linters: [revive, staticcheck]
- path: '_test\.go'
linters:
- bodyclose
- dupl
- errcheck
- funlen
- goconst
- gosec
- noctx
- wrapcheck

View file

@ -1,4 +1 @@
vendor/**/*
dist/**/*
bin/**/*
*.pb.go

View file

@ -0,0 +1,87 @@
version: "2"
linters:
enable:
- asasalint # checks for pass []any as any in variadic func(...any)
- asciicheck # checks that your code does not contain non-ASCII identifiers
- bidichk # checks for dangerous unicode character sequences
- bodyclose # checks whether HTTP response body is closed successfully
- containedctx # detects struct contained context.Context field
- contextcheck # checks the function whether use a non-inherited context
- cyclop # checks function and package cyclomatic complexity
- decorder # checks declaration order and count of types, constants, variables and functions
- dogsled # checks assignments with too many blank identifiers
- dupl # checks code clone duplication
- durationcheck # checks for two durations multiplied together
- errcheck # checks unchecked errors
- errchkjson # checks types passed to encoding/json functions
- errname # checks that sentinel errors are prefixed with the Err and error types are suffixed with the Error
- errorlint # finds code that will cause problems with the error wrapping scheme
- execinquery # checks query string in Query function which reads your Go src files and warning it finds
- exhaustive # checks exhaustiveness of enum switch statements
- exportloopref # checks for pointers to enclosing loop variables
- forcetypeassert # finds forced type assertions
- funlen # checks for long functions
- gocheckcompilerdirectives # validates go compiler directive comments
- gochecknoglobals # checks that no global variables exist
- gochecknoinits # checks that no init functions are present
- gocognit # computes and checks the cognitive complexity
- goconst # finds repeated strings that could be replaced by a constant
- gocritic # provides diagnostics that check for bugs, performance and style issues
- gocyclo # checks cyclomatic complexity
- godot # checks if comments end in a period
- godox # detects FIXME, TODO and other comment keywords
- goerr113 # checks the errors handling expressions
- gofmt # checks whether code was gofmt-ed
- gofumpt # checks whether code was gofumpt-ed
- goheader # checks is file header matches to pattern
- goimports # does everything that gofmt does + formats imports
- gomnd # detects magic numbers
- gomoddirectives # manages the use of 'replace', 'retract', and 'excludes' directives in go.mod
- gomodguard # allows to specify a list of forbidden modules
- goprintffuncname # checks that printf-like functions are named with f at the end
- gosec # inspects source code for security problems
- gosimple # specializes in simplifying code
- govet # reports suspicious constructs
- grouper # analyzes expression groups
- importas # enforces consistent import aliases
- ineffassign # detects when assignments to existing variables are not used
- interfacebloat # checks the number of methods inside an interface
- ireturn # accept interfaces, return concrete types
- lll # reports long lines
- loggercheck # checks key value pairs for common logger libraries
- maintidx # measures the maintainability index of each function
- makezero # finds slice declarations with non-zero initial length
- misspell # finds commonly misspelled English words
- nakedret # finds naked returns
- nestif # reports deeply nested if statements
- nilerr # finds the code that returns nil even if it checks that error is not nil
- nilnil # checks that there is no simultaneous return of nil error and an invalid value
- nlreturn # checks for a new line before return and branch statements
- noctx # finds sending http request without context.Context
- nolintlint # reports ill-formed or insufficient nolint directives
- nonamedreturns # reports all named returns
- nosprintfhostport # checks for misuse of Sprintf to construct a host with port in a URL
- paralleltest # detects missing usage of t.Parallel() method in your Go test
- prealloc # finds slice declarations that could potentially be pre-allocated
- predeclared # finds code that shadows one of Go's predeclared identifiers
- promlinter # checks Prometheus metrics naming via promlint
- reassign # checks that package variables are not reassigned
- revive # fast, configurable, extensible, flexible, and beautiful linter for Go
- rowserrcheck # checks whether Err of rows is checked successfully
- sqlclosecheck # checks that sql.Rows and sql.Stmt are closed
- staticcheck # comprehensive checks for bugs and inefficiencies
- stylecheck # replacement for golint
- tenv # detects using os.Setenv instead of t.Setenv since Go1.17
- testableexamples # checks if examples are testable
- testpackage # makes you use a separate _test package
- thelper # detects golang test helpers without t.Helper()
- tparallel # detects inappropriate usage of t.Parallel()
- typecheck # like the front-end of a Go compiler
- unconvert # removes unnecessary type conversions
- unparam # reports unused function parameters
- unused # checks for unused constants, variables, functions and types
- usestdlibvars # detects the possibility to use variables/constants from the Go standard library
- varnamelen # checks that the length of a variable's name matches its scope
- wastedassign # finds wasted assignment statements
- whitespace # detects leading and trailing whitespace

View file

@ -12,9 +12,9 @@ runtimes:
enabled:
- go@1.21.0
lint:
disabled:
- cspell
enabled:
- goimports@0.9.1
- golines@0.13.0
- markdownlint@0.45.0
- actionlint@1.7.7
- gofmt@1.20.4
@ -22,24 +22,11 @@ lint:
- shellcheck@0.10.0
- shfmt@3.6.0
- yamllint@1.37.1
- cspell@0.27.1
definitions:
- name: golangci-lint
files: [go]
commands:
- name: golangci-lint
output: regex
parse_regex: "^(?P<path>.*?):(?P<line>\\d+):(?P<column>\\d+): (?P<message>.*)$"
run: golangci-lint run ${target}
success_codes: [0]
in_place: false
- name: shell
files: [shell]
commands:
- name: shellcheck
output: regex
parse_regex: "^(?P<path>.*?):(?P<line>\\d+):(?P<column>\\d+): (?P<level>.*?) (?P<message>.*)$"
run: shellcheck -f gcc ${target}
success_codes: [0]
- name: cspell
files: [".md$"]
actions:
enabled:
- trunk-upgrade-available

View file

@ -1,5 +1,9 @@
{
"cSpell.enabled": false,
"editor.rulers": [
100
],
"editor.wordWrapColumn": 100,
"[go]": {
"editor.defaultFormatter": "trunk.io",
"editor.formatOnSave": true

1043
CLAUDE.md Normal file

File diff suppressed because it is too large Load diff

215
COMMANDS.md Normal file
View file

@ -0,0 +1,215 @@
# Command Line Usage Guide
This document provides information about using the `cache-apt-pkgs` command line tool.
## Basic Usage
The basic syntax for the command is:
```bash
cache-apt-pkgs <command> [flags] [packages]
```
## Available Commands
### 1. Install Command
Install and cache APT packages:
```bash
cache-apt-pkgs install [flags] [packages]
```
#### Flags for Install
- `--version`: Cache version identifier (optional)
- `--execute-scripts`: Execute package install scripts (optional, default: false)
#### Install Examples
```bash
# Install specific versions
cache-apt-pkgs install python3-dev=3.9.5-3 cmake=3.18.4-2
# Install latest versions
cache-apt-pkgs install python3-dev cmake
# Install with custom cache version
cache-apt-pkgs install --version=1.0 python3-dev cmake
# Install with script execution
cache-apt-pkgs install --execute-scripts=true python3-dev cmake
```
### 2. Create Key Command
Create a cache key for packages:
```bash
cache-apt-pkgs create-key [flags] [packages]
```
#### Flags for Create Key
- `--version`: Cache version identifier (optional)
#### Create Key Examples
```bash
# Create key with default version
cache-apt-pkgs create-key python3-dev cmake
# Create key with custom version
cache-apt-pkgs create-key --version=1.0 python3-dev cmake
```
### 3. Restore Command
Restore packages from cache:
```bash
cache-apt-pkgs restore [flags] [packages]
```
#### Flags for Restore
- `--version`: Cache version to restore from (optional)
- `--execute-scripts`: Execute package install scripts (optional, default: false)
#### Restore Examples
```bash
# Restore with specific version
cache-apt-pkgs restore --version=1.0 python3-dev cmake
# Restore with script execution
cache-apt-pkgs restore --execute-scripts=true python3-dev cmake
```
### 4. Validate Command
Validate package names and versions:
```bash
cache-apt-pkgs validate [packages]
```
#### Examples
```bash
# Validate package names and versions
cache-apt-pkgs validate python3-dev=3.9.5-3 cmake=3.18.4-2
# Validate package names only
cache-apt-pkgs validate python3-dev cmake
```
## Package Specification
Packages can be specified in two formats:
1. Name only: `package-name`
2. Name with version: `package-name=version`
Examples:
- `python3-dev`
- `python3-dev=3.9.5-3`
- `cmake=3.18.4-2`
## Environment Variables
The following environment variables can be used to configure the tool:
- `RUNNER_DEBUG`: Set to `1` to enable debug logging
- `RUNNER_TEMP`: Directory for temporary files (default: system temp dir)
## Common Tasks
### Installing Multiple Packages
```bash
cache-apt-pkgs install \
python3-dev \
cmake \
build-essential \
libssl-dev
```
### Creating Custom Cache Keys
```bash
cache-apt-pkgs create-key \
--version="$(date +%Y%m%d)" \
python3-dev \
cmake
```
### Restoring Specific Versions
```bash
cache-apt-pkgs restore \
--version=1.0 \
python3-dev=3.9.5-3 \
cmake=3.18.4-2
```
## Best Practices
1. Version Management
- Use specific versions for reproducible builds
- Use version-less package names for latest versions
- Use timestamp-based cache versions for forced updates
2. Cache Optimization
- Group related packages in the same cache
- Use consistent version strings across workflows
- Clean up old caches periodically
3. Error Handling
- Validate packages before installation
- Check for missing dependencies
- Use debug logging for troubleshooting
## Troubleshooting
Common issues and solutions:
1. Package Not Found
```bash
# Validate package name and availability
cache-apt-pkgs validate package-name
```
2. Cache Miss
```bash
# Check if package versions match exactly
cache-apt-pkgs restore --version=1.0 package-name=exact-version
```
3. Installation Errors
```bash
# Try with script execution
cache-apt-pkgs install --execute-scripts=true package-name
```
## Advanced Usage
### Using with GitHub Actions
```yaml
steps:
- name: Cache APT Packages
uses: awalsh128/cache-apt-pkgs-action@v2
with:
packages: python3-dev cmake
version: ${{ github.sha }}
execute_install_scripts: true
```
For more information, refer to:
- [GitHub Action Documentation](README.md)
- [Source Code](cmd/cache_apt_pkgs/)

284
CONTRIBUTING.md Normal file
View file

@ -0,0 +1,284 @@
# 🤝 Contributing to cache-apt-pkgs-action
Thank you for your interest in contributing to cache-apt-pkgs-action! This document provides guidelines and instructions for contributing to the project.
[![CI](https://github.com/awalsh128/cache-apt-pkgs-action/actions/workflows/ci.yml/badge.svg?branch=dev-v2.0)](https://github.com/awalsh128/cache-apt-pkgs-action/actions/workflows/ci.yml?query=branch%3Adev-v2.0)
[![Go Report Card](https://goreportcard.com/badge/github.com/awalsh128/cache-apt-pkgs-action)](https://goreportcard.com/report/github.com/awalsh128/cache-apt-pkgs-action)
[![Go Reference](https://pkg.go.dev/badge/github.com/awalsh128/cache-apt-pkgs-action.svg)](https://pkg.go.dev/github.com/awalsh128/cache-apt-pkgs-action)
[![License](https://img.shields.io/github/license/awalsh128/cache-apt-pkgs-action)](https://github.com/awalsh128/cache-apt-pkgs-action/blob/dev-v2.0/LICENSE)
[![Release](https://img.shields.io/github/v/release/awalsh128/cache-apt-pkgs-action)](https://github.com/awalsh128/cache-apt-pkgs-action/releases)
⚠️ **IMPORTANT**: This is a very unstable branch and will be introduced as version 2.0 once in beta.
## 🔗 Useful Links
- 📖 [GitHub Action Documentation](https://github.com/awalsh128/cache-apt-pkgs-action#readme)
- 📦 [Go Package Documentation](https://pkg.go.dev/github.com/awalsh128/cache-apt-pkgs-action)
- 🔄 [GitHub Actions Workflow Status](https://github.com/awalsh128/cache-apt-pkgs-action/actions)
- 🐛 [Issues](https://github.com/awalsh128/cache-apt-pkgs-action/issues)
- 🛠️ [Pull Requests](https://github.com/awalsh128/cache-apt-pkgs-action/pulls)
## 🚀 Development Setup
### 📋 Prerequisites
1. 🔵 [Go 1.23 or later](https://golang.org/dl/)
2. 💻 [Visual Studio Code](https://code.visualstudio.com/) (recommended)
3. 📂 [Git](https://git-scm.com/downloads)
### 🛠️ Setting Up Your Development Environment
1. 📥 Clone the repository:
```bash
git clone https://github.com/awalsh128/cache-apt-pkgs-action.git
cd cache-apt-pkgs-action
```
2. 🔧 Use the provided development scripts:
```bash
# Interactive menu for all development tasks
./scripts/menu.sh
# Or use individual scripts directly:
./scripts/setup_dev.sh # Set up development environment
./scripts/update_md_tocs.sh # Update table of contents in markdown files
```
### 📜 Available Development Scripts
The project includes several utility scripts to help with development:
- 🎯 `menu.sh`: Interactive menu system for all development tasks
- Environment setup
- Testing and coverage
- Documentation updates
- Code formatting
- Build and release tasks
- 🛠️ Individual Scripts:
- `setup_dev.sh`: Sets up the development environment
- `update_md_tocs.sh`: Updates table of contents in markdown files
- `check_utf8.sh`: Validates file encodings
- `distribute_test.sh`: Runs distribution tests
To access the menu system, run:
```bash
./scripts/menu.sh
```
This will present an interactive menu with all available development tasks.
## 🧪 Testing
### 🏃 Running Tests Locally
1. 🔬 Run unit tests:
```bash
go test ./...
```
2. 📊 Run tests with coverage:
```bash
go test -v -race -coverprofile=coverage.txt -covermode=atomic ./...
```
### 🔄 Testing GitHub Action Workflows
There are two ways to test the GitHub Action workflows:
1. ☁️ **Using GitHub Actions**:
- Push your changes to a branch
- Create a PR to trigger the [test workflow](https://github.com/awalsh128/cache-apt-pkgs-action/blob/dev-v2.0/.github/workflows/test-action.yml)
- Or manually trigger the workflow from the [Actions tab](https://github.com/awalsh128/cache-apt-pkgs-action/actions/workflows/test-action.yml)
2. 🐳 **Running Tests Locally** (requires Docker):
- Install Docker
- 🪟 WSL users install [Docker Desktop](https://www.docker.com/products/docker-desktop/)
- 🐧 Non-WSL users (native Linux)
```bash
curl -fsSL https://get.docker.com -o get-docker.sh &&
sudo sh get-docker.sh &&
sudo usermod -aG docker $USER &&
sudo systemctl start docker
```
- 🎭 Install [`act`](https://github.com/nektos/act) for local GitHub Actions testing:
- ▶️ Run `act` on any action test in the following ways:
```bash
act -j list_versions # Get all the available tests
act push # Run push event workflows
act pull_request # Run PR workflows
act workflow_dispatch -i ref=dev-v2.0 -i debug=true # Manual trigger workflow
```
## 📝 Making Changes
1. 🌿 Create a new branch for your changes:
```bash
git checkout -b feature/your-feature-name
```
## Testing
### Running Tests Locally
1. Run unit tests:
```bash
go test ./...
```
2. Run tests with coverage:
```bash
go test -v -race -coverprofile=coverage.txt -covermode=atomic ./...
```
### Testing GitHub Action Workflows
There are two ways to test the GitHub Action workflows:
1. **Using GitHub Actions**:
- Push your changes to a branch
- Create a PR to trigger the [test workflow](https://github.com/awalsh128/cache-apt-pkgs-action/blob/dev-v2.0/.github/workflows/test-action.yml)
- Or manually trigger the workflow from the [Actions tab](https://github.com/awalsh128/cache-apt-pkgs-action/actions/workflows/test-action.yml)
2. **Running Tests Locally** (requires Docker):
- Install Docker
- WSL users install [Docker Desktop](https://www.docker.com/products/docker-desktop/)
- Non-WSL users (native Linux)
```bash
curl -fsSL https://get.docker.com -o get-docker.sh && \
sudo sh get-docker.sh && \
sudo usermod -aG docker $USER && \
sudo systemctl start docker
```
- Install [`act`](https://github.com/nektos/act) for local GitHub Actions testing:
- Run `act` on any action test in the following ways:
```bash
act -j list_versions # Get all the available tests
act push # Run push event workflows
act pull_request # Run PR workflows
act workflow_dispatch -i ref=dev-v2.0 -i debug=true # Manual trigger workflow
```
## Making Changes
1. Create a new branch for your changes:
```bash
git checkout -b feature/your-feature-name
```
2. ✏️ Make your changes, following these guidelines:
- 📚 Follow Go coding [standards and conventions](https://go.dev/doc/effective_go)
- ✅ Add tests for new features
- 🎯 Test behaviors on the public interface not implementation
- 🔍 Keep tests for each behavior separate
- 🏭 Use constants and factory functions to keep testing arrangement and asserts clear. Not a lot of boilerplate not directly relevant to the test.
- 📖 Update documentation as needed
- 🎯 Keep commits focused and atomic
- 📝 Write clear commit messages
3. 🧪 Test your changes locally before submitting
## 🔄 Pull Request Process
1. 📚 Update the README.md with details of significant changes if applicable
2. ✅ Verify that all tests pass:
- 🧪 Unit tests
- 🔄 Integration tests
- 🚀 GitHub Action workflow tests
3. 📥 Create a Pull Request:
- 🎯 Target the `dev-v2.0` branch
- 📝 Provide a clear description of the changes
- 🔗 Reference any related issues
- 📊 Include test results and any relevant screenshots
4. 👀 Address review feedback and make requested changes
## 💻 Code Style Guidelines
- 📏 Follow [standard Go formatting](https://golang.org/doc/effective_go#formatting) (use `gofmt`)
- 📖 Follow [Go Code Review Comments](https://github.com/golang/go/wiki/CodeReviewComments)
- 🔍 Write clear, self-documenting code
- 📚 Add [godoc](https://blog.golang.org/godoc) comments for complex logic
- 🏷️ Use meaningful variable and function names
- ✨ Keep functions focused and manageable in size
- 🔒 Prefer immutability vs state changing
- 📏 Aim for lines less than 50
- 🎯 Observe [single responsibility principle](https://en.wikipedia.org/wiki/Single-responsibility_principle)
📚 For more details on Go best practices, refer to:
- 📖 [Effective Go](https://golang.org/doc/effective_go)
- 🔍 [Go Code Review Comments](https://github.com/golang/go/wiki/CodeReviewComments)
## Documentation
- Update documentation for any changed functionality
- Include code examples where appropriate
- Update the README.md for significant changes
- Document any new environment variables or configuration options
## Release Process
1. Changes are merged into the `dev-v2.0` branch
2. Once tested and approved, changes are merged to `master`
3. New releases are tagged following semantic versioning
## Questions or Problems?
- Open an [issue](https://github.com/awalsh128/cache-apt-pkgs-action/issues/new) for bugs or feature requests
- Use [discussions](https://github.com/awalsh128/cache-apt-pkgs-action/discussions) for questions or ideas
- Reference the [GitHub Action documentation](https://github.com/awalsh128/cache-apt-pkgs-action#readme)
- Check existing [issues](https://github.com/awalsh128/cache-apt-pkgs-action/issues) and [pull requests](https://github.com/awalsh128/cache-apt-pkgs-action/pulls)
- Tag maintainers for urgent issues
## License
By contributing to this project, you agree that your contributions will be licensed under the same license as the project.
## 📦 Publishing to pkg.go.dev
NOTE: This is done by the maintainers
To make the library available on [pkg.go.dev](https://pkg.go.dev):
1. 🏷️ Ensure your code is tagged with a version:
```bash
git tag v2.0.0 # Use semantic versioning
git push origin v2.0.0
```
2. 🔄 Trigger pkg.go.dev to fetch your module:
- Visit [pkg.go.dev for this module](https://pkg.go.dev/github.com/awalsh128/cache-apt-pkgs-action@v2.0.0)
- Or fetch via command line:
```bash
GOPROXY=https://proxy.golang.org GO111MODULE=on go get github.com/awalsh128/cache-apt-pkgs-action@v2.0.0
```
3. 📝 Best practices for publishing:
- Add comprehensive godoc comments
- Include examples in your documentation
- Use semantic versioning for tags
- Keep the module path consistent
- Update go.mod with the correct module path
- [Go Best Practices](https://golang.org/doc/effective_go#names)

185
LICENSE
View file

@ -1,13 +1,178 @@
Copyright 2025 Andrew Walsh
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
http://www.apache.org/licenses/LICENSE-2.0
1. Definitions.
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS
Copyright 2025 Andrew Walsh

244
README.md
View file

@ -1,4 +1,4 @@
# cache-apt-pkgs-action
# Cache APT Packages Action
[![CI](https://github.com/awalsh128/cache-apt-pkgs-action/actions/workflows/ci.yml/badge.svg?branch=dev-v2.0)](https://github.com/awalsh128/cache-apt-pkgs-action/actions/workflows/ci.yml?query=branch%3Adev-v2.0)
[![Go Report Card](https://goreportcard.com/badge/github.com/awalsh128/cache-apt-pkgs-action)](https://goreportcard.com/report/github.com/awalsh128/cache-apt-pkgs-action)
@ -6,38 +6,233 @@
[![License](https://img.shields.io/github/license/awalsh128/cache-apt-pkgs-action)](https://github.com/awalsh128/cache-apt-pkgs-action/blob/dev-v2.0/LICENSE)
[![Release](https://img.shields.io/github/v/release/awalsh128/cache-apt-pkgs-action)](https://github.com/awalsh128/cache-apt-pkgs-action/releases)
This action allows caching of Advanced Package Tool (APT) package dependencies to improve workflow execution time instead of installing the packages on every run.
<!-- START doctoc generated TOC please keep comment here to allow auto update -->
<!-- DON'T EDIT THIS SECTION, INSTEAD RE-RUN doctoc TO UPDATE -->
- [🚀 Quick Start](#-quick-start)
- [✨ Features](#-features)
- [📋 Requirements](#-requirements)
- [🔧 Configuration](#-configuration)
- [Inputs](#inputs)
- [Outputs](#outputs)
- [📝 Usage Guide](#-usage-guide)
- [Version Selection](#version-selection)
- [Basic Example](#basic-example)
- [Advanced Example](#advanced-example)
- [🔍 Cache Details](#-cache-details)
- [Cache Scoping](#cache-scoping)
- [Cache Keys](#cache-keys)
- [Cache Invalidation](#cache-invalidation)
- [🚨 Common Issues](#-common-issues)
- [Permission Issues](#permission-issues)
- [Missing Dependencies](#missing-dependencies)
- [Cache Misses](#cache-misses)
- [🤝 Contributing](#-contributing)
- [📜 License](#-license)
- [🔄 Updates and Maintenance](#-updates-and-maintenance)
- [🌟 Acknowledgements](#-acknowledgements)
- [Getting Started](#getting-started)
- [Workflow Setup](#workflow-setup)
- [Detailed Configuration](#detailed-configuration)
- [Cache scopes](#cache-scopes)
- [Example workflows](#example-workflows)
- [Build and Deploy Doxygen Documentation](#build-and-deploy-doxygen-documentation)
- [Simple Package Installation](#simple-package-installation)
- [Caveats](#caveats)
- [Edge Cases](#edge-cases)
- [Non-file Dependencies](#non-file-dependencies)
- [Cache Limits](#cache-limits)
<!-- END doctoc generated TOC please keep comment here to allow auto update -->
Speed up your GitHub Actions workflows by caching APT package dependencies. This action integrates with [actions/cache](https://github.com/actions/cache/) to provide efficient package caching, significantly reducing workflow execution time by avoiding repeated package installations.
> [!IMPORTANT]
> Looking for co-maintainers to help review changes, and investigate issues. I haven't had as much time to stay on top of this action as I would like to and want to make sure it is still responsive and reliable for the community. If you are interested, please reach out.
> We're looking for co-maintainers to help review changes and investigate issues. If you're interested in contributing to this project, please reach out.
## Documentation
## 🚀 Quick Start
This action is a composition of [actions/cache](https://github.com/actions/cache/) and the `apt` utility. Some actions require additional APT based packages to be installed in order for other steps to be executed. Packages can be installed when ran but can consume much of the execution workflow time.
```yaml
steps:
- name: Cache APT Packages
uses: awalsh128/cache-apt-pkgs-action@v2
with:
packages: python3-dev cmake
version: 1.0
```
## Usage
## ✨ Features
### Pre-requisites
- 📦 Efficient APT package caching
- 🔄 Automatic dependency resolution
- 🔍 Smart cache invalidation
- 📊 Detailed cache statistics
- 🛠️ Pre/post install script support
## 📋 Requirements
- GitHub Actions runner with APT support (Ubuntu/Debian)
- Workflow permissions to read/write caches
- Sufficient storage space for package caching
## 🔧 Configuration
### Inputs
| Name | Description | Required | Default |
| ------------------------- | -------------------------------- | -------- | -------- |
| `packages` | Space-delimited list of packages | Yes | - |
| `version` | Cache version identifier | No | `latest` |
| `execute_install_scripts` | Run package install scripts | No | `false` |
### Outputs
| Name | Description |
| -------------------------- | ---------------------------------------- |
| `cache-hit` | Whether cache was found (`true`/`false`) |
| `package-version-list` | Main packages and versions installed |
| `all-package-version-list` | All packages including dependencies |
## 📝 Usage Guide
### Version Selection
Choose the appropriate version tag:
- `@latest` - Latest stable release
- `@v2` - Latest v2.x.x release
- `@master` - Latest tested code (potentially unstable)
- `@dev` - Experimental features
### Basic Example
```yaml
name: Build
on: [push]
jobs:
build:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- name: Cache APT Packages
uses: awalsh128/cache-apt-pkgs-action@v2
with:
packages: python3-dev cmake
version: 1.0
- name: Build Project
run: |
cmake .
make
```
### Advanced Example
```yaml
name: Complex Build
on: [push]
jobs:
build:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- name: Cache APT Packages
uses: awalsh128/cache-apt-pkgs-action@v2
id: apt-cache
with:
packages: python3-dev cmake libboost-all-dev
version: ${{ github.sha }}
execute_install_scripts: true
- name: Cache Info
run: |
echo "Cache hit: ${{ steps.apt-cache.outputs.cache-hit }}"
echo "Installed packages: ${{ steps.apt-cache.outputs.package-version-list }}"
```
## 🔍 Cache Details
### Cache Scoping
Caches are scoped by:
- Package list
- Version string
- Branch (default branch cache available to other branches)
### Cache Keys
The action generates cache keys based on:
- Package names and versions
- System architecture
- Custom version string
### Cache Invalidation
Caches are invalidated when:
- Package versions change
- Custom version string changes
- Branch cache is cleared
## 🚨 Common Issues
### Permission Issues
```yaml
permissions:
actions: read|write # Required for cache operations
```
### Missing Dependencies
- Ensure all required packages are listed
- Check package names are correct
- Verify package availability in repositories
### Cache Misses
- Check version string consistency
- Verify branch cache settings
- Ensure sufficient cache storage
## 🤝 Contributing
We welcome contributions! Please see our [Contributing Guide](CONTRIBUTING.md) for details.
## 📜 License
This project is licensed under the Apache License 2.0 - see the [LICENSE](LICENSE) file for details.
## 🔄 Updates and Maintenance
Stay updated:
- Watch this repository for releases
- Check the [CHANGELOG](CHANGELOG.md)
- Follow the [security policy](SECURITY.md)
## 🌟 Acknowledgements
- [actions/cache](https://github.com/actions/cache/) team
- All our [contributors](https://github.com/awalsh128/cache-apt-pkgs-action/graphs/contributors)
### Getting Started
#### Workflow Setup
Create a workflow `.yml` file in your repositories `.github/workflows` directory. [Example workflows](#example-workflows) are available below. For more information, reference the GitHub Help Documentation for [Creating a workflow file](https://help.github.com/en/articles/configuring-a-workflow#creating-a-workflow-file).
### Versions
#### Detailed Configuration
There are three kinds of version labels you can use.
- `@latest` - This will give you the latest release.
- `@v#` - Major only will give you the latest release for that major version only (e.g. `v1`).
- Branch
- `@master` - Most recent manual and automated tested code. Possibly unstable since it is pre-release.
- `@dev` - Very unstable and contains experimental features. Automated testing may not show breaks since CI is also updated based on code in dev.
### Inputs
##### Input Parameters
- `packages` - Space delimited list of packages to install.
- `version` - Version of cache to load. Each version will have its own cache. Note, all characters except spaces are allowed.
- `execute_install_scripts` - Execute Debian package pre and post install script upon restore. See [Caveats / Non-file Dependencies](#non-file-dependencies) for more information.
### Outputs
##### Output Values
- `cache-hit` - A boolean value to indicate a cache was found for the packages requested.
- `package-version-list` - The main requested packages and versions that are installed. Represented as a comma delimited list with equals delimit on the package version (i.e. \<package1>=<version1\>,\<package2>=\<version2>,...).
@ -45,7 +240,11 @@ There are three kinds of version labels you can use.
### Cache scopes
The cache is scoped to the packages given and the branch. The default branch cache is available to other branches.
The cache is scoped to:
- Package list and versions
- Branch settings
- Default branch cache (available to other branches)
### Example workflows
@ -100,6 +299,13 @@ jobs:
## Caveats
### Edge Cases
This action is able to speed up installs by skipping the number of steps that `apt` uses.
- This means there will be certain cases that it may not be able to handle like state management of other file configurations outside the package scope.
- In cases that can't be immediately addressed or run counter to the approach of this action, the packages affected should go into their own action `step` and using the normal `apt` utility.
### Non-file Dependencies
This action is based on the principle that most packages can be cached as a fileset. There are situations though where this is not enough.

View file

@ -104,7 +104,14 @@ func (c *Cmd) help() {
return
}
for _, example := range c.Examples {
fmt.Fprintf(os.Stderr, " %s %s %s %s\n", binaryName, c.Name, example, c.ExamplePackages.String())
fmt.Fprintf(
os.Stderr,
" %s %s %s %s\n",
binaryName,
c.Name,
example,
c.ExamplePackages.String(),
)
}
}
@ -125,7 +132,11 @@ func printUsage(cmds Cmds) {
fmt.Fprintf(os.Stderr, " %-*s %s\n", maxLen, name, cmd.Description)
}
fmt.Fprintf(os.Stderr, "\nUse \"%s <command> --help\" for more information about a command\n", binaryName)
fmt.Fprintf(
os.Stderr,
"\nUse \"%s <command> --help\" for more information about a command\n",
binaryName,
)
}
func (c *Cmds) Parse() (*Cmd, pkgs.Packages) {

View file

@ -67,14 +67,26 @@ func GetInstallCmd() *Cmd {
Flags: flag.NewFlagSet("install", flag.ExitOnError),
Run: install,
}
cmd.Flags.String("cache-dir", "", "Directory that holds the cached packages, JSON manifest and package lists in text format")
cmd.Flags.String("version", "", "Version of cache to load. Each version will have its own cache. Note, all characters except spaces are allowed.")
cmd.Flags.String(
"cache-dir",
"",
"Directory that holds the cached packages, JSON manifest and package lists in text format",
)
cmd.Flags.String(
"version",
"",
"Version of cache to load. Each version will have its own cache. Note, all characters except spaces are allowed.",
)
cmd.Flags.String(
"global-version",
"",
"Unique version to force cache invalidation globally across all action callers\n"+
"Used to fix corrupted caches or bugs from the action itself")
cmd.Flags.String("manifest-path", "", "File path that holds the package install manifest in JSON format")
cmd.Flags.String(
"manifest-path",
"",
"File path that holds the package install manifest in JSON format",
)
cmd.Examples = []string{
"--cache-dir ~/cache_dir --version userver1 --global-version 20250812",
"--cache-dir /tmp/cache_dir --version what_ever --global-version whatever_too",

View file

@ -18,7 +18,11 @@ func GetRestoreCmd() *Cmd {
Flags: flag.NewFlagSet("restore", flag.ExitOnError),
Run: restore,
}
cmd.Flags.String("cache-dir", "", "Directory that holds the cached packages, JSON manifest and package lists in text format")
cmd.Flags.String(
"cache-dir",
"",
"Directory that holds the cached packages, JSON manifest and package lists in text format",
)
cmd.Flags.String("restore-root", "/", "Root directory to untar the cached packages to")
cmd.Flags.Bool("execute-scripts", false, "Execute APT post-install scripts on restore")
cmd.Examples = []string{

21
dev.md Normal file
View file

@ -0,0 +1,21 @@
# cache-apt-pkgs-action - Development
To develop and run tests you will need to setup your system.
## Environment
1. The project requires Go 1.23 or later.
2. Set GO111MODULE to auto:
```bash
# One-time setup
go env -w GO111MODULE=auto
# Or use the provided setup script
./scripts/setup_dev.sh
```
3. The project includes a `.env` file with required settings.
## Action Testing

7
go.mod
View file

@ -5,8 +5,8 @@ go 1.23
toolchain go1.23.4
require (
github.com/bluet/syspkg v0.1.5
github.com/stretchr/testify v1.10.0
github.com/awalsh128/syspkg v0.1.5
github.com/stretchr/testify v1.11.0
)
require (
@ -14,6 +14,3 @@ require (
github.com/pmezard/go-difflib v1.0.0 // indirect
gopkg.in/yaml.v3 v3.0.1 // indirect
)
// Replace the syspkg module with your local version
replace github.com/bluet/syspkg => /home/awalsh128/syspkg

6
go.sum
View file

@ -1,9 +1,11 @@
github.com/awalsh128/syspkg v0.1.5 h1:AwLbgauwPqXYkD66MHe9NfN+mNSwjU0P9YeQ3uVXttI=
github.com/awalsh128/syspkg v0.1.5/go.mod h1:nNSsVZTltGA0UYDVj25q4adSQRggC0eNU1t2EiCO5eA=
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
github.com/stretchr/testify v1.10.0 h1:Xv5erBjTwe/5IxqUQTdXv5kgmIvbHo3QQyRwhJsOfJA=
github.com/stretchr/testify v1.10.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY=
github.com/stretchr/testify v1.11.0 h1:ib4sjIrwZKxE5u/Japgo/7SJV3PvgjGiRNAvTVGqQl8=
github.com/stretchr/testify v1.11.0/go.mod h1:wZwfW3scLgRK+23gO65QZefKpKQRnfz6sD981Nm4B6U=
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405 h1:yhCVgyC4o1eVCa2tZl7eS0r+SDo693bJlVdllGtEeKM=
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA=

View file

@ -15,6 +15,7 @@ import (
// a deterministic cache key.
type Key struct {
// Packages is a sorted list of packages to be cached
// This is guaranteed by the pkgs.Packages interface
Packages pkgs.Packages
// Version is the user-specified cache version
Version string
@ -33,8 +34,9 @@ func (k *Key) PlainText() string {
// Hash generates a deterministic MD5 hash of the key's contents.
// This hash is used as the actual cache key for storage and lookup.
func (k *Key) Hash() [16]byte {
return md5.Sum([]byte(k.PlainText()))
func (k *Key) Hash() []byte {
hash := md5.Sum([]byte(k.PlainText()))
return hash[:]
}
// Write stores both the plaintext and hashed versions of the cache key to files.

View file

@ -1,61 +1,80 @@
package cache
import (
"bytes"
"os"
"path"
"testing"
"awalsh128.com/cache-apt-pkgs-action/internal/pkgs"
)
const (
pkg1 = "xdot=1.3-1"
pkg2 = "rolldice=1.16-1build3"
version1 = "test1"
version2 = "test2"
version = "test"
globalV1 = "v1"
globalV2 = "v2"
arch1 = "amd64"
arch2 = "x86"
)
func TestKey_PlainText(t *testing.T) {
tests := []struct {
emptyKey := Key{
Packages: pkgs.NewPackagesFromStrings(),
Version: "",
GlobalVersion: "",
OsArch: "",
}
singleKey := Key{
Packages: pkgs.NewPackagesFromStrings(pkg1),
Version: version,
GlobalVersion: globalV2,
OsArch: arch1,
}
multiKey := Key{
Packages: pkgs.NewPackagesFromStrings(pkg1, pkg2),
Version: version,
GlobalVersion: globalV2,
OsArch: arch1,
}
cases := []struct {
name string
key Key
expected string
}{
{
name: "Empty key",
key: Key{
Packages: pkgs.NewPackages(),
Version: "",
GlobalVersion: "",
OsArch: "",
},
name: "Empty key",
key: emptyKey,
expected: "Packages: '', Version: '', GlobalVersion: '', OsArch: ''",
},
{
name: "Single package",
key: Key{
Packages: pkgs.NewPackagesFromSlice([]string{"xdot=1.3-1"}),
Version: "test",
GlobalVersion: "v2",
OsArch: "amd64",
},
name: "Single package",
key: singleKey,
expected: "Packages: 'xdot=1.3-1', Version: 'test', GlobalVersion: 'v2', OsArch: 'amd64'",
},
{
name: "Multiple packages",
key: Key{
Packages: pkgs.NewPackagesFromSlice([]string{"xdot=1.3-1", "rolldice=1.16-1build3"}),
Version: "test",
GlobalVersion: "v2",
OsArch: "amd64",
},
name: "Multiple packages",
key: multiKey,
expected: "Packages: 'xdot=1.3-1,rolldice=1.16-1build3', Version: 'test', GlobalVersion: 'v2', OsArch: 'amd64'",
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
result := tt.key.PlainText()
if result != tt.expected {
t.Errorf("PlainText() = %v, want %v", result, tt.expected)
for _, c := range cases {
t.Run(c.name, func(t *testing.T) {
result := c.key.PlainText()
if result != c.expected {
t.Errorf("PlainText() = %v, want %v", result, c.expected)
}
})
}
}
func TestKey_Hash(t *testing.T) {
tests := []struct {
cases := []struct {
name string
key1 Key
key2 Key
@ -64,60 +83,139 @@ func TestKey_Hash(t *testing.T) {
{
name: "Same keys hash to same value",
key1: Key{
Packages: pkgs.NewPackagesFromSlice([]string{"xdot=1.3-1"}),
Version: "test",
GlobalVersion: "v2",
OsArch: "amd64",
Packages: pkgs.NewPackagesFromStrings(pkg1),
Version: version,
GlobalVersion: globalV2,
OsArch: arch1,
},
key2: Key{
Packages: pkgs.NewPackagesFromSlice([]string{"xdot=1.3-1"}),
Version: "test",
GlobalVersion: "v2",
OsArch: "amd64",
Packages: pkgs.NewPackagesFromStrings(pkg1),
Version: version,
GlobalVersion: globalV2,
OsArch: arch1,
},
wantSame: true,
},
{
name: "Different packages hash to different values",
key1: Key{
Packages: pkgs.NewPackagesFromSlice([]string{"xdot=1.3-1"}),
Version: "test",
GlobalVersion: "v2",
OsArch: "amd64",
Packages: pkgs.NewPackagesFromStrings(pkg1),
Version: version,
GlobalVersion: globalV2,
OsArch: arch1,
},
key2: Key{
Packages: pkgs.NewPackagesFromSlice([]string{"rolldice=1.16-1build3"}),
Version: "test",
GlobalVersion: "v2",
OsArch: "amd64",
Packages: pkgs.NewPackagesFromStrings(pkg2),
Version: version,
GlobalVersion: globalV2,
OsArch: arch1,
},
wantSame: false,
},
{
name: "Different versions hash to different values",
key1: Key{
Packages: pkgs.NewPackagesFromSlice([]string{"xdot=1.3-1"}),
Version: "test1",
GlobalVersion: "v2",
OsArch: "amd64",
Packages: pkgs.NewPackagesFromStrings(pkg1),
Version: version1,
GlobalVersion: globalV2,
OsArch: arch1,
},
key2: Key{
Packages: pkgs.NewPackagesFromSlice([]string{"xdot=1.3-1"}),
Version: "test2",
GlobalVersion: "v2",
OsArch: "amd64",
Packages: pkgs.NewPackagesFromStrings(pkg1),
Version: version2,
GlobalVersion: globalV2,
OsArch: arch1,
},
wantSame: false,
},
{
name: "Different global versions hash to different values",
key1: Key{
Packages: pkgs.NewPackagesFromStrings(pkg1),
Version: version1,
GlobalVersion: globalV1,
OsArch: arch1,
},
key2: Key{
Packages: pkgs.NewPackagesFromStrings(pkg1),
Version: version2,
GlobalVersion: globalV2,
OsArch: arch1,
},
wantSame: false,
},
{
name: "Different OS arches hash to different values",
key1: Key{
Packages: pkgs.NewPackagesFromStrings(pkg1),
Version: version1,
GlobalVersion: globalV1,
OsArch: arch1,
},
key2: Key{
Packages: pkgs.NewPackagesFromStrings(pkg1),
Version: version2,
GlobalVersion: globalV2,
OsArch: arch2,
},
wantSame: false,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
hash1 := tt.key1.Hash()
hash2 := tt.key2.Hash()
if (hash1 == hash2) != tt.wantSame {
t.Errorf("Hash equality = %v, want %v", hash1 == hash2, tt.wantSame)
for _, c := range cases {
t.Run(c.name, func(t *testing.T) {
hash1 := c.key1.Hash()
hash2 := c.key2.Hash()
if bytes.Equal(hash1, hash2) != c.wantSame {
t.Errorf("Hash equality = %v, want %v", bytes.Equal(hash1, hash2), c.wantSame)
}
})
}
}
func TestKey_WriteKeyPlaintext_RoundTripsSameValue(t *testing.T) {
key := Key{
Packages: pkgs.NewPackagesFromStrings(pkg1, pkg2),
Version: version,
GlobalVersion: globalV2,
OsArch: arch1,
}
plaintextPath := path.Join(t.TempDir(), "key.txt")
ciphertextPath := path.Join(t.TempDir(), "key.md5")
err := key.Write(plaintextPath, ciphertextPath)
if err != nil {
t.Fatalf("Write() failed: %v", err)
}
plaintextBytes, err := os.ReadFile(plaintextPath)
if err != nil {
t.Fatalf("ReadAll() failed: %v", err)
}
plaintext := string(plaintextBytes)
if plaintext != key.PlainText() {
t.Errorf("Round trip failed: got %q, want %q", plaintext, key.PlainText())
}
}
func TestKey_WriteKeyCiphertext_RoundTripsSameValue(t *testing.T) {
key := Key{
Packages: pkgs.NewPackagesFromStrings(pkg1, pkg2),
Version: version,
GlobalVersion: globalV2,
OsArch: arch1,
}
plaintextPath := path.Join(t.TempDir(), "key.txt")
ciphertextPath := path.Join(t.TempDir(), "key.md5")
err := key.Write(plaintextPath, ciphertextPath)
if err != nil {
t.Fatalf("Write() failed: %v", err)
}
ciphertextBytes, err := os.ReadFile(ciphertextPath)
if err != nil {
t.Fatalf("ReadAll() failed: %v", err)
}
ciphertext := string(ciphertextBytes)
if !bytes.Equal(ciphertextBytes, key.Hash()) {
t.Errorf("Round trip failed: got %q, want %q", ciphertext, key.Hash())
}
}

View file

@ -1,146 +1,325 @@
package cache
import (
"encoding/json"
"os"
"path/filepath"
"reflect"
"strings"
"testing"
"time"
"awalsh128.com/cache-apt-pkgs-action/internal/pkgs"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)
func TestNewManifest(t *testing.T) {
// Create a temporary directory for test files
tmpDir, err := os.MkdirTemp("", "manifest-test-*")
if err != nil {
t.Fatalf("Failed to create temp dir: %v", err)
const (
manifestVersion = "1.0.0"
manifestGlobalVer = "v2"
manifestArch = "amd64"
manifestFile = "manifest.json"
samplePkgName = "xdot"
samplePkgVersion = "1.3-1"
samplePkgBinPath = "/usr/bin/xdot"
samplePkgDocPath = "/usr/share/doc/xdot"
)
var (
fixedTime = time.Date(2025, 8, 28, 10, 0, 0, 0, time.UTC)
emptyPkgs = pkgs.NewPackages()
sampleKey = Key{
Packages: emptyPkgs,
Version: manifestVersion,
GlobalVersion: manifestGlobalVer,
OsArch: manifestArch,
}
defer os.RemoveAll(tmpDir)
sampleManifest = &Manifest{
CacheKey: sampleKey,
LastModified: fixedTime,
InstalledPackages: []ManifestPackage{},
}
samplePackage = pkgs.Package{
Name: samplePkgName,
Version: samplePkgVersion,
}
sampleFilePaths = []string{samplePkgBinPath, samplePkgDocPath}
)
func createManifestFile(t *testing.T, dir string, m *Manifest) string {
t.Helper()
path := filepath.Join(dir, manifestFile)
data, err := json.Marshal(m)
if err != nil {
t.Fatalf("Failed to marshal manifest: %v", err)
}
if err := os.WriteFile(path, data, 0644); err != nil {
t.Fatalf("Failed to write manifest file: %v", err)
}
return path
}
func TestNewManifest_WithEmptyPackages_CreatesValidStructure(t *testing.T) {
// Arrange
expected := &Manifest{
CacheKey: sampleKey,
LastModified: fixedTime,
InstalledPackages: []ManifestPackage{},
}
// Act
actual := &Manifest{
CacheKey: sampleKey,
LastModified: fixedTime,
InstalledPackages: []ManifestPackage{},
}
// Assert
assertManifestEquals(t, expected, actual)
}
func TestNewManifest_WithSinglePackage_CreatesValidStructure(t *testing.T) {
// Arrange
expected := &Manifest{
CacheKey: sampleKey,
LastModified: fixedTime,
InstalledPackages: []ManifestPackage{
{
Package: samplePackage,
Filepaths: sampleFilePaths,
},
},
}
// Act
actual := &Manifest{
CacheKey: sampleKey,
LastModified: fixedTime,
InstalledPackages: []ManifestPackage{
{
Package: samplePackage,
Filepaths: sampleFilePaths,
},
},
}
// Assert
assertManifestEquals(t, expected, actual)
}
// Helper function for comparing Manifests
func assertManifestEquals(t *testing.T, expected, actual *Manifest) {
t.Helper()
if !reflect.DeepEqual(actual.CacheKey, expected.CacheKey) {
t.Errorf("CacheKey = %v, want %v", actual.CacheKey, expected.CacheKey)
}
if !reflect.DeepEqual(actual.LastModified, expected.LastModified) {
t.Errorf("LastModified = %v, want %v", actual.LastModified, expected.LastModified)
}
if !reflect.DeepEqual(actual.InstalledPackages, expected.InstalledPackages) {
t.Errorf("InstalledPackages = %v, want %v", actual.InstalledPackages, expected.InstalledPackages)
}
}
func TestRead_WithValidManifest_ReturnsMatchingStruct(t *testing.T) {
// Arrange
dir := t.TempDir()
expected := &Manifest{
CacheKey: sampleKey,
LastModified: fixedTime,
InstalledPackages: []ManifestPackage{
{
Package: samplePackage,
Filepaths: sampleFilePaths,
},
},
}
path := createManifestFile(t, dir, expected)
// Act
actual, err := Read(path)
// Assert
if err != nil {
t.Fatalf("Read() error = %v", err)
}
assertManifestEquals(t, expected, actual)
}
func TestRead_WithNonExistentFile_ReturnsError(t *testing.T) {
// Arrange
dir := t.TempDir()
path := filepath.Join(dir, "nonexistent.json")
// Act
actual, err := Read(path)
// Assert
assertError(t, err, "no such file or directory")
assert.Nil(t, actual)
}
func TestRead_WithInvalidJSON_ReturnsError(t *testing.T) {
// Arrange
dir := t.TempDir()
path := filepath.Join(dir, manifestFile)
if err := os.WriteFile(path, []byte("invalid json"), 0644); err != nil {
t.Fatalf("Failed to write test file: %v", err)
}
// Act
actual, err := Read(path)
// Assert
assertError(t, err, "failed to unmarshal")
assert.Nil(t, actual)
}
// Helper function for asserting errors
func assertError(t *testing.T, err error, expectedMsg string) {
t.Helper()
if err == nil {
t.Error("expected error but got nil")
return
}
if !strings.Contains(err.Error(), expectedMsg) {
t.Errorf("error = %v, expected to contain %q", err, expectedMsg)
}
}
func TestNew_WithVariousInputs_CreatesCorrectStructure(t *testing.T) {
// Arrange
testTime := time.Now()
testPkgs := pkgs.NewPackagesFromStrings("pkg1=1.0", "pkg2=2.0")
tests := []struct {
name string
key Key
wantErr bool
setupFiles []string // Files to create before test
verifyFiles []string // Files to verify after creation
expected *Manifest
expectError bool
}{
{
name: "Valid manifest creation",
name: "empty manifest with minimum fields",
key: Key{
Packages: pkgs.NewPackages(),
Version: "test",
Version: "1.0.0",
GlobalVersion: "v2",
OsArch: "amd64",
OsArch: "amd64",
},
wantErr: false,
verifyFiles: []string{
"manifest.json",
expected: &Manifest{
CacheKey: Key{Packages: pkgs.NewPackages(), Version: "1.0.0", GlobalVersion: "v2", OsArch: "amd64"},
LastModified: testTime,
InstalledPackages: []ManifestPackage{},
},
expectError: false,
},
{
name: "manifest with package list",
key: Key{
Packages: testPkgs,
Version: "1.0.0",
GlobalVersion: "v2",
OsArch: "amd64",
},
expectError: false,
expected: &Manifest{
CacheKey: Key{
Packages: testPkgs,
Version: "1.0.0",
GlobalVersion: "v2",
OsArch: "amd64",
},
LastModified: testTime,
InstalledPackages: []ManifestPackage{},
},
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
// Setup test files
testDir := filepath.Join(tmpDir, tt.name)
err := os.MkdirAll(testDir, 0755)
if err != nil {
t.Fatalf("Failed to create test directory: %v", err)
}
for _, file := range tt.setupFiles {
path := filepath.Join(testDir, file)
if err := os.WriteFile(path, []byte("test content"), 0644); err != nil {
t.Fatalf("Failed to create test file %s: %v", file, err)
}
}
// Create manifest
manifest, err := NewManifest(tt.key)
if (err != nil) != tt.wantErr {
t.Errorf("NewManifest() error = %v, wantErr %v", err, tt.wantErr)
return
}
if err == nil {
// Verify manifest is created correctly
if manifest == nil {
t.Error("NewManifest() returned nil manifest without error")
return
}
// Verify expected files exist
for _, file := range tt.verifyFiles {
path := filepath.Join(testDir, file)
if _, err := os.Stat(path); os.IsNotExist(err) {
t.Errorf("Expected file %s does not exist", file)
}
}
// Arrange
manifest := &Manifest{
CacheKey: tt.key,
LastModified: testTime,
InstalledPackages: []ManifestPackage{},
}
// Act
actual := manifest
// Assert
assertManifestEquals(t, tt.expected, actual)
})
}
}
func TestManifest_Save(t *testing.T) {
tmpDir, err := os.MkdirTemp("", "manifest-save-test-*")
if err != nil {
t.Fatalf("Failed to create temp dir: %v", err)
}
defer os.RemoveAll(tmpDir)
func TestRead_WithVariousContents_HandlesAllCases(t *testing.T) {
// Arrange
tmpDir := t.TempDir()
testTime := time.Now()
testPkgs := pkgs.NewPackagesFromStrings("xdot=1.3-1")
tests := []struct {
name string
manifest *Manifest
wantErr bool
name string
input *Manifest
expectError bool
}{
{
name: "Save empty manifest",
manifest: &Manifest{
Key: Key{
Packages: pkgs.NewPackages(),
Version: "test",
name: "empty manifest",
input: &Manifest{
CacheKey: Key{
Packages: testPkgs,
Version: "1.0.0",
GlobalVersion: "v2",
OsArch: "amd64",
OsArch: "amd64",
},
Packages: []ManifestPackage{},
LastModified: testTime,
InstalledPackages: []ManifestPackage{},
},
wantErr: false,
expectError: false,
},
{
name: "Save manifest with packages",
manifest: &Manifest{
Key: Key{
Packages: pkgs.NewPackagesFromSlice([]string{"xdot=1.3-1"}),
Version: "test",
name: "manifest with packages",
input: &Manifest{
CacheKey: Key{
Packages: testPkgs,
Version: "1.0.0",
GlobalVersion: "v2",
OsArch: "amd64",
OsArch: "amd64",
},
Packages: []ManifestPackage{
LastModified: testTime,
InstalledPackages: []ManifestPackage{
{
Name: "xdot",
Version: "1.3-1",
Files: []string{"/usr/bin/xdot", "/usr/share/doc/xdot"},
Package: pkgs.Package{Name: "xdot", Version: "1.3-1"},
Filepaths: []string{"/usr/bin/xdot", "/usr/share/doc/xdot"},
},
},
},
wantErr: false,
expectError: false,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
// Arrange
testDir := filepath.Join(tmpDir, tt.name)
if err := os.MkdirAll(testDir, 0755); err != nil {
t.Fatalf("Failed to create test directory: %v", err)
}
require.NoError(t, os.MkdirAll(testDir, 0755))
path := filepath.Join(testDir, "manifest.json")
data, err := json.Marshal(tt.input)
require.NoError(t, err)
require.NoError(t, os.WriteFile(path, data, 0644))
if err := tt.manifest.Save(testDir); (err != nil) != tt.wantErr {
t.Errorf("Manifest.Save() error = %v, wantErr %v", err, tt.wantErr)
}
// Act
actual, err := Read(path)
// Verify manifest file was created
manifestPath := filepath.Join(testDir, "manifest.json")
if _, err := os.Stat(manifestPath); os.IsNotExist(err) {
t.Error("Manifest file was not created")
// Assert
if tt.expectError {
assert.Error(t, err)
assert.Nil(t, actual)
} else {
assert.NoError(t, err)
assertManifestEquals(t, tt.input, actual)
}
})
}

View file

@ -1,130 +1,170 @@
package cio
import (
"os"
"path/filepath"
"encoding/json"
"testing"
"github.com/stretchr/testify/assert"
)
func TestWriteJSON(t *testing.T) {
tmpDir, err := os.MkdirTemp("", "json-write-test-*")
if err != nil {
t.Fatalf("Failed to create temp dir: %v", err)
}
defer os.RemoveAll(tmpDir)
// Sample test types for JSON serialization
type testStruct struct {
Name string
Value int
}
type testStruct struct {
Name string
Value int
}
type nestedTestStruct struct {
ID int
Details testStruct
Tags []string
Metadata map[string]interface{}
}
const (
simpleJSON = `{"Name":"test","Value":42}`
nestedJSON = `{
"ID": 1,
"Details": {
"Name": "detail",
"Value": 100
},
"Tags": [
"one",
"two"
],
"Metadata": {
"version": 1
}
}`
)
var (
simpleStruct = testStruct{
Name: "test",
Value: 42,
}
nestedStruct = nestedTestStruct{
ID: 1,
Details: testStruct{Name: "detail", Value: 100},
Tags: []string{"one", "two"},
Metadata: map[string]interface{}{
"version": float64(1),
},
}
)
func TestFromJSON(t *testing.T) {
tests := []struct {
name string
data interface{}
wantErr bool
validate func([]byte) bool
name string
input string
target interface{}
want interface{}
wantErr bool
}{
{
name: "Write simple struct",
data: testStruct{
Name: "test",
Value: 42,
},
wantErr: false,
validate: func(data []byte) bool {
return string(data) == `{"Name":"test","Value":42}`+"\n"
},
name: "simple struct",
input: simpleJSON,
target: &testStruct{},
want: &simpleStruct,
},
{
name: "Write nil",
data: nil,
wantErr: false,
validate: func(data []byte) bool {
return string(data) == "null\n"
},
name: "nested struct",
input: nestedJSON,
target: &nestedTestStruct{},
want: &nestedStruct,
},
{
name: "invalid json",
input: `{"Name":"test","Value":}`,
target: &testStruct{},
wantErr: true,
},
{
name: "empty object",
input: "{}",
target: &testStruct{},
want: &testStruct{},
},
{
name: "null input",
input: "null",
target: &testStruct{},
want: &testStruct{},
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
filePath := filepath.Join(tmpDir, tt.name+".json")
err := WriteJSON(filePath, tt.data)
if (err != nil) != tt.wantErr {
t.Errorf("WriteJSON() error = %v, wantErr %v", err, tt.wantErr)
err := FromJSON([]byte(tt.input), tt.target)
if tt.wantErr {
assert.Error(t, err)
return
}
if err == nil {
// Read the file back
data, err := os.ReadFile(filePath)
if err != nil {
t.Fatalf("Failed to read test file: %v", err)
}
// Validate content
if !tt.validate(data) {
t.Errorf("WriteJSON() wrote incorrect data: %s", string(data))
}
}
assert.NoError(t, err)
assert.Equal(t, tt.want, tt.target)
})
}
}
func TestReadJSON(t *testing.T) {
tmpDir, err := os.MkdirTemp("", "json-read-test-*")
if err != nil {
t.Fatalf("Failed to create temp dir: %v", err)
}
defer os.RemoveAll(tmpDir)
type testStruct struct {
Name string
Value int
}
func TestToJSON(t *testing.T) {
tests := []struct {
name string
content string
want testStruct
input interface{}
want string
wantErr bool
}{
{
name: "Read valid JSON",
content: `{"Name":"test","Value":42}`,
want: testStruct{
Name: "test",
Value: 42,
},
wantErr: false,
name: "simple struct",
input: simpleStruct,
want: simpleJSON,
},
{
name: "Read invalid JSON",
content: `{"Name":"test","Value":}`,
name: "nested struct",
input: nestedStruct,
want: nestedJSON,
},
{
name: "nil input",
input: nil,
want: "null",
},
{
name: "empty slice",
input: []string{},
want: "[]",
},
{
name: "empty struct",
input: struct{}{},
want: "{}",
},
{
name: "invalid type",
input: make(chan int),
wantErr: true,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
filePath := filepath.Join(tmpDir, tt.name+".json")
// Create test file
err := os.WriteFile(filePath, []byte(tt.content), 0644)
if err != nil {
t.Fatalf("Failed to create test file: %v", err)
}
var got testStruct
err = ReadJSON(filePath, &got)
if (err != nil) != tt.wantErr {
t.Errorf("ReadJSON() error = %v, wantErr %v", err, tt.wantErr)
got, err := ToJSON(tt.input)
if tt.wantErr {
assert.Error(t, err)
return
}
if !tt.wantErr && got != tt.want {
t.Errorf("ReadJSON() = %v, want %v", got, tt.want)
assert.NoError(t, err)
if tt.input != nil {
assertValidJSON(t, got)
}
assert.JSONEq(t, tt.want, got)
})
}
}
// Helper function to verify JSON validity
func assertValidJSON(t *testing.T, data string) {
t.Helper()
var unmarshaled interface{}
err := json.Unmarshal([]byte(data), &unmarshaled)
assert.NoError(t, err, "produced JSON should be valid")
}

View file

@ -9,145 +9,88 @@ import (
"path/filepath"
)
// createTarWriter creates and initializes a new tar archive writer.
// It ensures the parent directory exists and opens the destination file.
// Returns the tar writer, the underlying file (for later closing), and any error encountered.
// validateTarInputs performs basic validation of tar archive inputs.
func validateTarInputs(destPath string, files []string) error {
if destPath == "" {
return fmt.Errorf("destination path is required")
}
if len(files) == 0 {
return fmt.Errorf("at least one file is required")
}
return nil
}
// createTarWriter creates a new tar archive writer.
// The caller is responsible for closing both the writer and file.
func createTarWriter(destPath string) (*tar.Writer, *os.File, error) {
// Create parent directory for destination file if it doesn't exist
// Create parent directories if they don't exist
if err := os.MkdirAll(filepath.Dir(destPath), 0755); err != nil {
return nil, nil, fmt.Errorf("failed to create parent directory for %s: %w", destPath, err)
return nil, nil, fmt.Errorf("failed to create parent directories: %w", err)
}
// Create the tar file
file, err := os.Create(destPath)
if err != nil {
return nil, nil, fmt.Errorf("failed to create destination file %s: %w", destPath, err)
return nil, nil, fmt.Errorf("failed to create tar file: %w", err)
}
// Create tar writer
tw := tar.NewWriter(file)
return tw, file, nil
return tar.NewWriter(file), file, nil
}
// validateTarInputs checks if the tar archive parameters are valid.
// Returns an error if the destination path is empty or if no files are provided.
func validateTarInputs(destPath string, files []string) error {
if destPath == "" {
return fmt.Errorf("destination path cannot be empty")
}
if len(files) == 0 {
return fmt.Errorf("no files provided")
}
return nil
}
// validateFileType ensures the file is a supported type for archiving.
// Currently supports regular files and symbolic links.
// Returns an error for other file types (e.g., directories, devices).
// validateFileType checks if the file type is supported for archiving.
func validateFileType(info os.FileInfo, absPath string) error {
if !info.Mode().IsRegular() && info.Mode()&os.ModeSymlink == 0 {
return fmt.Errorf("file %s is not a regular file or symlink", absPath)
return fmt.Errorf("unsupported file type for %s", absPath)
}
return nil
}
// createFileHeader generates a tar header with file metadata.
// The header includes the file's name, size, mode, modification time,
// and other attributes from the filesystem.
func createFileHeader(info os.FileInfo, absPath string) (*tar.Header, error) {
header, err := tar.FileInfoHeader(info, "") // Empty link name for now
// addFileToTar adds a single file or symbolic link to the tar archive.
func addFileToTar(tw *tar.Writer, absPath string) error {
info, err := os.Lstat(absPath)
if err != nil {
return nil, fmt.Errorf("failed to create tar header for %s: %w", absPath, err)
return fmt.Errorf("failed to get file info: %w", err)
}
// Use path relative to root for archive
header.Name = absPath[1:] // Remove leading slash
return header, nil
}
// writeRegularFile writes a regular file's contents to the tar archive
func writeRegularFile(tw *tar.Writer, absPath string) error {
srcFile, err := os.Open(absPath)
if err := validateFileType(info, absPath); err != nil {
return err
}
// Create the tar header
header, err := tar.FileInfoHeader(info, "")
if err != nil {
return fmt.Errorf("failed to open %s: %w", absPath, err)
return fmt.Errorf("failed to create tar header: %w", err)
}
defer srcFile.Close()
if _, err := io.Copy(tw, srcFile); err != nil {
return fmt.Errorf("failed to write %s to archive: %w", absPath, err)
// Update the name to use the full path
header.Name = absPath
// Write the header
if err := tw.WriteHeader(header); err != nil {
return fmt.Errorf("failed to write tar header: %w", err)
}
// If it's a symlink, no need to write content
if info.Mode()&os.ModeSymlink != 0 {
return nil
}
// Open and copy the file content
file, err := os.Open(absPath)
if err != nil {
return fmt.Errorf("failed to open file: %w", err)
}
defer file.Close()
if _, err := io.Copy(tw, file); err != nil {
return fmt.Errorf("failed to write file content: %w", err)
}
return nil
}
// getSymlinkTarget gets the absolute path of a symlink target
func getSymlinkTarget(linkTarget, absPath string) string {
if filepath.IsAbs(linkTarget) {
return linkTarget
}
return filepath.Join(filepath.Dir(absPath), linkTarget)
}
// handleSymlinkTarget handles the target file of a symlink
func handleSymlinkTarget(tw *tar.Writer, targetPath string, header *tar.Header, linkTarget string) error {
targetInfo, err := os.Stat(targetPath)
if err != nil || targetInfo.IsDir() {
return nil // Skip if target doesn't exist or is a directory
}
// Create header for target file
targetHeader, err := tar.FileInfoHeader(targetInfo, "")
if err != nil {
return fmt.Errorf("failed to create tar header for symlink target %s: %w", targetPath, err)
}
// Store with path relative to root
targetHeader.Name = targetPath[1:]
// For absolute symlinks, make the linkname relative to root too
if filepath.IsAbs(linkTarget) {
header.Linkname = linkTarget[1:]
}
// Write target header and contents
if err := tw.WriteHeader(targetHeader); err != nil {
return fmt.Errorf("failed to write tar header for symlink target %s: %w", targetPath, err)
}
return writeRegularFile(tw, targetPath)
}
// handleSymlink handles a symlink file and its target
func handleSymlink(tw *tar.Writer, absPath string, header *tar.Header) error {
// Read the target of the symlink
linkTarget, err := os.Readlink(absPath)
if err != nil {
return fmt.Errorf("failed to read symlink %s: %w", absPath, err)
}
header.Linkname = linkTarget
// Get absolute path of target and handle it
targetPath := getSymlinkTarget(linkTarget, absPath)
return handleSymlinkTarget(tw, targetPath, header, linkTarget)
}
// TarFiles creates a tar archive containing the specified files.
// Matches behavior of install_and_cache_pkgs.sh script.
//
// Parameters:
// - destPath: Path where the tar file should be created
// - files: List of absolute file paths to include in the archive
//
// The function will:
// - Archive files relative to root directory (like -C /)
// - Include only regular files and symlinks
// - Preserve file permissions and timestamps
// - Handle special characters in paths
// - Save symlinks as-is without following them
//
// Returns an error if:
// - destPath is empty or invalid
// - Any file in files list is not a regular file or symlink
// - Permission denied when reading files or writing archive
func TarFiles(destPath string, files []string) error {
// CreateTar creates a new tar archive containing the specified files.
func CreateTar(destPath string, files []string) error {
if err := validateTarInputs(destPath, files); err != nil {
return err
}
@ -159,40 +102,15 @@ func TarFiles(destPath string, files []string) error {
defer file.Close()
defer tw.Close()
// Process each file
for _, absPath := range files {
// Get file info and validate type
info, err := os.Lstat(absPath)
// Add each file to the archive
for _, f := range files {
absPath, err := filepath.Abs(f)
if err != nil {
return fmt.Errorf("failed to stat %s: %w", absPath, err)
}
if err := validateFileType(info, absPath); err != nil {
return err
return fmt.Errorf("failed to get absolute path for %s: %w", f, err)
}
// Create and initialize header
header, err := createFileHeader(info, absPath)
if err != nil {
return err
}
// Handle symlinks and their targets
if info.Mode()&os.ModeSymlink != 0 {
if err := handleSymlink(tw, absPath, header); err != nil {
return err
}
}
// Write the file's header
if err := tw.WriteHeader(header); err != nil {
return fmt.Errorf("failed to write tar header for %s: %w", absPath, err)
}
// Write the file's contents if it's a regular file
if info.Mode().IsRegular() {
if err := writeRegularFile(tw, absPath); err != nil {
return err
}
if err := addFileToTar(tw, absPath); err != nil {
return fmt.Errorf("failed to add %s to tar: %w", f, err)
}
}

View file

@ -5,8 +5,8 @@ import (
"fmt"
"awalsh128.com/cache-apt-pkgs-action/internal/logging"
"github.com/bluet/syspkg"
"github.com/bluet/syspkg/manager"
"github.com/awalsh128/syspkg"
"github.com/awalsh128/syspkg/manager"
)
// Apt wraps the APT package manager functionality.
@ -41,7 +41,10 @@ func NewApt() (*Apt, error) {
// the input if some packages were already installed) and any error encountered.
// The installation is performed with --assume-yes and verbose logging enabled.
func (a *Apt) Install(pkgs Packages) (Packages, error) {
installedPkgs, err := a.manager.Install(pkgs.StringArray(), &manager.Options{AssumeYes: true, Debug: true, Verbose: true})
installedPkgs, err := a.manager.Install(
pkgs.StringArray(),
&manager.Options{AssumeYes: true, Debug: true, Verbose: true},
)
if err != nil {
return nil, err
}
@ -58,7 +61,11 @@ func (a *Apt) Install(pkgs Packages) (Packages, error) {
func (a *Apt) ListInstalledFiles(pkg *Package) ([]string, error) {
files, err := a.manager.ListInstalledFiles(pkg.String())
if err != nil {
return nil, fmt.Errorf("error listing installed files for package %s: %v", pkg.String(), err)
return nil, fmt.Errorf(
"error listing installed files for package %s: %v",
pkg.String(),
err,
)
}
return files, nil
}

View file

@ -7,7 +7,7 @@ import (
"strings"
"awalsh128.com/cache-apt-pkgs-action/internal/logging"
"github.com/bluet/syspkg/manager"
"github.com/awalsh128/syspkg/manager"
)
// packages is an unexported slice type that provides a stable, ordered collection of packages.
@ -66,6 +66,18 @@ func NewPackagesFromSyspkg(pkgs []manager.PackageInfo) Packages {
return NewPackages(items...)
}
func NewPackagesFromStrings(pkgs ...string) Packages {
items := packages{}
for _, pkgStr := range pkgs {
pkg, err := NewPackage(pkgStr)
if err != nil {
logging.Fatalf("error creating package from string %q: %v", pkgStr, err)
}
items = append(items, *pkg)
}
return NewPackages(items...)
}
func NewPackages(pkgs ...Package) Packages {
// Create a new slice to avoid modifying the input
result := make(packages, len(pkgs))

View file

@ -1,111 +1,164 @@
package pkgs
import (
"sort"
"testing"
)
func TestNewPackages(t *testing.T) {
packages := NewPackages()
if packages == nil {
t.Error("NewPackages() returned nil")
p := NewPackages()
if p == nil {
t.Fatal("NewPackages() returned nil")
}
if packages.Len() != 0 {
t.Errorf("NewPackages() returned non-empty Packages, got length %d", packages.Len())
if p.Len() != 0 {
t.Errorf("NewPackages() returned non-empty Packages, got length %d", p.Len())
}
}
func TestPackages_Add(t *testing.T) {
func TestNewPackagesFromStrings(t *testing.T) {
tests := []struct {
name string
initial []string
add string
expected []string
name string
pkgs []string
wantLen int
wantOrdered []string // expected order after sorting
}{
{
name: "Add to empty",
initial: []string{},
add: "xdot=1.3-1",
expected: []string{"xdot=1.3-1"},
name: "Empty input",
pkgs: []string{},
wantLen: 0,
wantOrdered: []string{},
},
{
name: "Add duplicate",
initial: []string{"xdot=1.3-1"},
add: "xdot=1.3-1",
expected: []string{"xdot=1.3-1"},
name: "Single package",
pkgs: []string{"xdot=1.3-1"},
wantLen: 1,
wantOrdered: []string{"xdot=1.3-1"},
},
{
name: "Add different version",
initial: []string{"xdot=1.3-1"},
add: "xdot=1.3-2",
expected: []string{"xdot=1.3-1", "xdot=1.3-2"},
name: "Multiple packages unsorted",
pkgs: []string{"zlib=1.2.3", "xdot=1.3-1", "apt=2.0.0"},
wantLen: 3,
wantOrdered: []string{"apt=2.0.0", "xdot=1.3-1", "zlib=1.2.3"},
},
{
name: "Duplicate packages",
pkgs: []string{"xdot=1.3-1", "xdot=1.3-1", "apt=2.0.0"},
wantLen: 2,
wantOrdered: []string{"apt=2.0.0", "xdot=1.3-1"},
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
packages := NewPackages()
for _, pkg := range tt.initial {
packages.Add(pkg)
}
packages.Add(tt.add)
p := NewPackagesFromStrings(tt.pkgs...)
// Convert to slice for comparison
got := make([]string, packages.Len())
for i := 0; i < packages.Len(); i++ {
got[i] = packages.Get(i)
// Test Len()
if got := p.Len(); got != tt.wantLen {
t.Errorf("Len() = %v, want %v", got, tt.wantLen)
}
// Sort both slices for comparison
sort.Strings(got)
sort.Strings(tt.expected)
if len(got) != len(tt.expected) {
t.Errorf("Packages.Add() resulted in wrong length, got %v, want %v", got, tt.expected)
return
// Test Get() and verify order
for i := 0; i < p.Len(); i++ {
if i >= len(tt.wantOrdered) {
t.Errorf(
"Too many packages in result, extra package at index %d: %s",
i,
p.Get(i),
)
continue
}
if got := p.Get(i); got != tt.wantOrdered[i] {
t.Errorf("Get(%d) = %v, want %v", i, got, tt.wantOrdered[i])
}
}
for i := range got {
if got[i] != tt.expected[i] {
t.Errorf("Packages.Add() = %v, want %v", got, tt.expected)
break
// Test String()
wantString := ""
if len(tt.wantOrdered) > 0 {
for i, pkg := range tt.wantOrdered {
if i > 0 {
wantString += ","
}
wantString += pkg
}
}
if got := p.String(); got != wantString {
t.Errorf("String() = %v, want %v", got, wantString)
}
// Test StringArray()
gotArray := p.StringArray()
if len(gotArray) != len(tt.wantOrdered) {
t.Errorf("StringArray() length = %v, want %v", len(gotArray), len(tt.wantOrdered))
} else {
for i, want := range tt.wantOrdered {
if gotArray[i] != want {
t.Errorf("StringArray()[%d] = %v, want %v", i, gotArray[i], want)
}
}
}
})
}
}
func TestPackages_String(t *testing.T) {
func TestPackages_Add(t *testing.T) {
tests := []struct {
name string
packages []string
want string
name string
initial []string
toAdd []string
wantOrdered []string
}{
{
name: "Empty packages",
packages: []string{},
want: "",
name: "Add to empty",
initial: []string{},
toAdd: []string{"xdot=1.3-1"},
wantOrdered: []string{"xdot=1.3-1"},
},
{
name: "Single package",
packages: []string{"xdot=1.3-1"},
want: "xdot=1.3-1",
name: "Add multiple maintaining order",
initial: []string{"apt=2.0.0"},
toAdd: []string{"zlib=1.2.3", "xdot=1.3-1"},
wantOrdered: []string{"apt=2.0.0", "xdot=1.3-1", "zlib=1.2.3"},
},
{
name: "Multiple packages",
packages: []string{"xdot=1.3-1", "rolldice=1.16-1build3"},
want: "xdot=1.3-1,rolldice=1.16-1build3",
name: "Add duplicate",
initial: []string{"xdot=1.3-1"},
toAdd: []string{"xdot=1.3-1"},
wantOrdered: []string{"xdot=1.3-1"},
},
{
name: "Add same package different version",
initial: []string{"xdot=1.3-1"},
toAdd: []string{"xdot=1.3-2"},
wantOrdered: []string{"xdot=1.3-1", "xdot=1.3-2"},
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
p := NewPackages()
for _, pkg := range tt.packages {
p := NewPackagesFromStrings(tt.initial...)
// Add packages one by one to test Add method
for _, pkg := range tt.toAdd {
p.Add(pkg)
}
if got := p.String(); got != tt.want {
t.Errorf("Packages.String() = %v, want %v", got, tt.want)
// Verify length
if got := p.Len(); got != len(tt.wantOrdered) {
t.Errorf("After Add(), Len() = %v, want %v", got, len(tt.wantOrdered))
}
// Verify order using Get
for i := 0; i < p.Len(); i++ {
if got := p.Get(i); got != tt.wantOrdered[i] {
t.Errorf("After Add(), Get(%d) = %v, want %v", i, got, tt.wantOrdered[i])
}
}
// Verify Contains for all added packages
for _, pkg := range tt.toAdd {
if !p.Contains(pkg) {
t.Errorf("After Add(), Contains(%v) = false, want true", pkg)
}
}
})
}
@ -126,26 +179,35 @@ func TestPackages_Contains(t *testing.T) {
},
{
name: "Package exists",
packages: []string{"xdot=1.3-1", "rolldice=1.16-1build3"},
packages: []string{"apt=2.0.0", "xdot=1.3-1"},
check: "xdot=1.3-1",
want: true,
},
{
name: "Package exists (different order)",
packages: []string{"xdot=1.3-1", "apt=2.0.0"},
check: "apt=2.0.0",
want: true,
},
{
name: "Package doesn't exist",
packages: []string{"xdot=1.3-1", "rolldice=1.16-1build3"},
packages: []string{"xdot=1.3-1", "apt=2.0.0"},
check: "nonexistent=1.0",
want: false,
},
{
name: "Similar package different version",
packages: []string{"xdot=1.3-1"},
check: "xdot=1.3-2",
want: false,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
p := NewPackages()
for _, pkg := range tt.packages {
p.Add(pkg)
}
p := NewPackagesFromStrings(tt.packages...)
if got := p.Contains(tt.check); got != tt.want {
t.Errorf("Packages.Contains() = %v, want %v", got, tt.want)
t.Errorf("Contains(%v) = %v, want %v", tt.check, got, tt.want)
}
})
}

83
scripts/check_utf8.sh Executable file
View file

@ -0,0 +1,83 @@
#!/bin/bash
#==============================================================================
# check_utf8.sh
#==============================================================================
#
# DESCRIPTION:
# Script to check and validate UTF-8 encoding in text files.
# Identifies files that are not properly UTF-8 encoded and reports them.
# Skips binary files and common non-text file types.
#
# USAGE:
# ./scripts/check_utf8.sh [<file>...] [directory]
#
# OPTIONS:
# <file> One or more files to check
# <directory> A directory to scan for files
#
# DEPENDENCIES:
# - bash
# - file (for file type detection)
# - iconv (for encoding detection)
#==============================================================================
# Required tools
command -v file >/dev/null 2>&1 || {
echo "file command not found. Please install it."
exit 1
}
command -v iconv >/dev/null 2>&1 || {
echo "iconv command not found. Please install it."
exit 1
}
# Find all potential text files, excluding certain directories and files
find . -type f \
! -path "./.git/*" \
! -name "*.png" \
! -name "*.jpg" \
! -name "*.jpeg" \
! -name "*.gif" \
! -name "*.ico" \
! -name "*.bin" \
! -name "*.exe" \
! -name "*.dll" \
! -name "*.so" \
! -name "*.dylib" \
-exec file -i {} \; |
while read -r line; do
file_path=$(echo "$line" | cut -d: -f1)
mime_type=$(echo "$line" | cut -d: -f2)
# Skip non-text files
if [[ ! $mime_type =~ "text/" ]] && \
[[ ! $mime_type =~ "application/json" ]] && \
[[ ! $mime_type =~ "application/x-yaml" ]] && \
[[ $line == *"binary"* ]]; then
echo "⏭️ Skipping non-text file: $file_path ($mime_type)"
continue
fi
encoding=$(echo "$mime_type" | grep -oP "charset=\K[^ ]*" || echo "unknown")
# Skip if already UTF-8 or ASCII
if [[ $encoding == "utf-8" ]] || [[ $encoding == "us-ascii" ]]; then
echo "$file_path is already UTF-8"
continue
fi
echo "⚠️ Converting $file_path from $encoding to UTF-8"
# Create a temporary file for conversion
temp_file="${file_path}.tmp"
# Try to convert the file to UTF-8
if iconv -f "${encoding:-ISO-8859-1}" -t UTF-8 "$file_path" >"$temp_file" 2>/dev/null; then
mv "$temp_file" "$file_path"
echo "✓ Successfully converted $file_path to UTF-8"
else
rm -f "$temp_file"
echo "⚠️ File $file_path appears to be binary or already UTF-8"
fi
done

View file

@ -1,4 +1,29 @@
#!/bin/bash
#==============================================================================
# distribute.sh
#==============================================================================
#
# DESCRIPTION:
# Manages distribution of compiled binaries for different architectures.
# Handles building, pushing, and retrieving binary paths for GitHub Actions.
#
# USAGE:
# ./scripts/distribute.sh <command> [architecture]
#
# COMMANDS:
# push - Build and push all architecture binaries to dist directory
# getbinpath [ARCH] - Get binary path for specified architecture
#
# ARCHITECTURES:
# X86, X64, ARM, ARM64 - GitHub runner architectures
#
# DEPENDENCIES:
# - bash
# - go (for building)
# - git
#==============================================================================
set -e
CMD="$1"

89
scripts/export_version.sh Executable file
View file

@ -0,0 +1,89 @@
#!/bin/bash
#==============================================================================
# export_version.sh
#==============================================================================
#
# DESCRIPTION:
# Script to export Go library version information for package development.
# Extracts and exports version information from go.mod including Go version,
# toolchain version, and syspkg version.
#
# USAGE:
# ./scripts/export_version.sh
#
# OUTPUTS:
# - Sets environment variables: GO_VERSION, TOOLCHAIN_VERSION, SYSPKG_VERSION
# - Creates .version-info file with version details
#
# DEPENDENCIES:
# - bash
# - go (for reading go.mod)
# - grep, awk (for parsing)
#==============================================================================
set -e
# Get the directory containing this script
SCRIPT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" &> /dev/null && pwd )"
PROJECT_ROOT="$(dirname "$SCRIPT_DIR")"
# Function to extract Go version from go.mod
get_go_version() {
local go_version
go_version=$(grep "^go " "$PROJECT_ROOT/go.mod" | awk '{print $2}')
echo "$go_version"
}
# Function to extract toolchain version from go.mod
get_toolchain_version() {
local toolchain_version
toolchain_version=$(grep "^toolchain " "$PROJECT_ROOT/go.mod" | awk '{print $2}')
echo "$toolchain_version"
}
# Function to extract syspkg version from go.mod
get_syspkg_version() {
local syspkg_version
syspkg_version=$(grep "github.com/awalsh128/syspkg" "$PROJECT_ROOT/go.mod" | awk '{print $2}')
echo "$syspkg_version"
}
# Main execution
echo "Exporting version information..."
GO_VERSION=$(get_go_version)
TOOLCHAIN_VERSION=$(get_toolchain_version)
SYSPKG_VERSION=$(get_syspkg_version)
# Export versions as environment variables
export GO_VERSION
export TOOLCHAIN_VERSION
export SYSPKG_VERSION
# Create a version info file
VERSION_FILE="$PROJECT_ROOT/.version-info"
cat > "$VERSION_FILE" << EOF
# Version information for cache-apt-pkgs-action
GO_VERSION=$GO_VERSION
TOOLCHAIN_VERSION=$TOOLCHAIN_VERSION
SYSPKG_VERSION=$SYSPKG_VERSION
EXPORT_DATE=$(date '+%Y-%m-%d %H:%M:%S')
EOF
echo "Version information has been exported to $VERSION_FILE"
echo "Go Version: $GO_VERSION"
echo "Toolchain Version: $TOOLCHAIN_VERSION"
echo "Syspkg Version: $SYSPKG_VERSION"
# Also create a JSON format for tools that prefer it
VERSION_JSON="$PROJECT_ROOT/.version-info.json"
cat > "$VERSION_JSON" << EOF
{
"goVersion": "$GO_VERSION",
"toolchainVersion": "$TOOLCHAIN_VERSION",
"syspkgVersion": "$SYSPKG_VERSION",
"exportDate": "$(date '+%Y-%m-%d %H:%M:%S')"
}
EOF
echo "Version information also exported in JSON format to $VERSION_JSON"

155
scripts/menu.sh Executable file
View file

@ -0,0 +1,155 @@
#!/bin/bash
#==============================================================================
# menu.sh
#==============================================================================
#
# DESCRIPTION:
# Interactive menu for running project scripts and common tasks.
# Provides easy access to development, testing, and maintenance tasks.
#
# USAGE:
# ./scripts/menu.sh
#
# FEATURES:
# - Interactive menu interface
# - Clear task descriptions
# - Status feedback
# - Error handling
#
# DEPENDENCIES:
# - bash
# - Various project scripts
#==============================================================================
# Colors for output
GREEN='\033[0;32m'
BLUE='\033[0;34m'
RED='\033[0;31m'
NC='\033[0m' # No Color
BOLD='\033[1m'
# Function to print section headers
print_header() {
echo -e "\n${BOLD}${BLUE}$1${NC}\n"
}
# Function to print status messages
print_status() {
echo -e "${GREEN}==>${NC} $1"
}
# Function to print errors
print_error() {
echo -e "${RED}Error:${NC} $1"
}
# Function to wait for user input before continuing
pause() {
echo
read -n 1 -s -r -p "Press any key to continue..."
echo
}
# Function to run a command and handle errors
run_command() {
local cmd="$1"
local description="$2"
print_status "Running: $description"
echo "Command: $cmd"
echo
if eval "$cmd"; then
print_status "Successfully completed: $description"
else
print_error "Failed: $description"
echo "Exit code: $?"
fi
pause
}
# Main menu
while true; do
clear
print_header "Cache Apt Packages Action - Development Menu"
echo "1) Setup Development Environment"
echo "2) Update Markdown TOCs"
echo "3) Run Tests"
echo "4) Run Linting (trunk check)"
echo "5) Build Project"
echo "6) Check UTF-8 Encoding"
echo "7) Run All Checks (tests, lint, build)"
echo "8) Run All Script Tests"
echo
echo "9) Show Project Status"
echo "10) Show Recent Git Log"
echo "11) Export Version Information"
echo
echo "q) Quit"
echo
read -p "Select an option: " choice
echo
case $choice in
1)
run_command "./scripts/setup_dev.sh" "Setting up development environment"
;;
2)
run_command "./scripts/update_md_tocs.sh" "Updating markdown tables of contents"
;;
3)
run_command "go test -v ./..." "Running tests"
;;
4)
run_command "trunk check" "Running linting checks"
;;
5)
run_command "go build -v ./..." "Building project"
;;
6)
run_command "./scripts/check_utf8.sh" "Checking UTF-8 encoding"
;;
7)
print_header "Running All Checks"
run_command "go test -v ./..." "Running tests"
run_command "trunk check" "Running linting checks"
run_command "go build -v ./..." "Building project"
run_command "./scripts/check_utf8.sh" "Checking UTF-8 encoding"
;;
8)
print_header "Running All Script Tests"
run_command "./scripts/tests/setup_dev_test.sh" "Running setup dev tests"
run_command "./scripts/tests/check_utf8_test.sh" "Running UTF-8 check tests"
run_command "./scripts/tests/update_md_tocs_test.sh" "Running markdown TOC tests"
run_command "./scripts/tests/export_version_test.sh" "Running version export tests"
run_command "./scripts/tests/distribute_test.sh" "Running distribute tests"
;;
9)
print_header "Project Status"
echo "Git Status:"
git status
echo
echo "Go Module Status:"
go mod verify
pause
;;
10)
print_header "Recent Git Log"
git log --oneline -n 10
pause
;;
11)
run_command "./scripts/export_version.sh" "Exporting version information"
;;
q|Q)
print_status "Goodbye!"
exit 0
;;
*)
print_error "Invalid option"
pause
;;
esac
done

124
scripts/setup_dev.sh Executable file
View file

@ -0,0 +1,124 @@
#!/bin/bash
#==============================================================================
# setup_dev.sh
#==============================================================================
#
# DESCRIPTION:
# Sets up the development environment for the cache-apt-pkgs-action project.
# Installs all necessary tools, configures Go environment, and sets up
# pre-commit hooks.
#
# USAGE:
# ./scripts/setup_dev.sh
#
# DEPENDENCIES:
# - go
# - npm
# - git
#==============================================================================
set -e # Exit on error
# Colors for output
GREEN='\033[0;32m'
RED='\033[0;31m'
NC='\033[0m' # No Color
# Function to check if a command exists
command_exists() {
command -v "$1" >/dev/null 2>&1
}
# Function to check if an npm package is installed globally
npm_package_installed() {
npm list -g "$1" >/dev/null 2>&1
}
# Function to print status messages
print_status() {
echo -e "${GREEN}==>${NC} $1"
}
# Function to print error messages
print_error() {
echo -e "${RED}Error:${NC} $1"
exit 1
}
# Check prerequisites
print_status "Checking prerequisites..."
if ! command_exists go; then
print_error "Go is not installed. Please install Go first."
fi
if ! command_exists npm; then
print_error "npm is not installed. Please install Node.js and npm first."
fi
if ! command_exists git; then
print_error "git is not installed. Please install git first."
fi
# Configure Go environment
print_status "Configuring Go environment..."
go env -w GO111MODULE=auto
# Verify Go modules
print_status "Verifying Go modules..."
go mod tidy
go mod verify
# Install development tools
print_status "Installing development tools..."
# Trunk for linting
if ! command_exists trunk; then
print_status "Installing trunk..."
curl -fsSL https://get.trunk.io -o get-trunk.sh
bash get-trunk.sh
rm get-trunk.sh
fi
# doctoc for markdown TOC
if ! npm_package_installed doctoc; then
print_status "Installing doctoc..."
npm install -g doctoc
fi
# Go tools
print_status "Installing Go tools..."
go install golang.org/x/tools/cmd/goimports@latest
go install github.com/segmentio/golines@latest
go install github.com/golangci/golangci-lint/cmd/golangci-lint@latest
# Set up Git hooks
print_status "Setting up Git hooks..."
if [ -d .git ]; then
# Initialize trunk
trunk init
# Enable pre-commit hooks
git config core.hooksPath .git/hooks/
else
print_error "Not a git repository"
fi
# Update markdown TOCs
print_status "Updating markdown TOCs..."
./scripts/update_md_tocs.sh
# Initial trunk check
print_status "Running initial trunk check..."
trunk check
# Final verification
print_status "Verifying installation..."
go test ./...
print_status "Development environment setup complete!"
echo "You can now:"
echo " 1. Run tests: go test ./..."
echo " 2. Run linting: trunk check"
echo " 3. Update markdown TOCs: ./scripts/update_md_tocs.sh"

190
scripts/tests/check_utf8_test.sh Executable file
View file

@ -0,0 +1,190 @@
#!/bin/bash
#==============================================================================
# check_utf8_test.sh
#==============================================================================
#
# DESCRIPTION:
# Test suite for check_utf8.sh script.
# Validates UTF-8 encoding detection, file handling, and error conditions.
#
# USAGE:
# ./scripts/tests/check_utf8_test.sh [-v|--verbose] [-r|--recursive]
#
# OPTIONS:
# -v, --verbose Show verbose test output
# -r, --recursive Test recursive directory scanning
# -h, --help Show this help message
#
#==============================================================================
# Source the test library
source "$(dirname "$0")/test_lib.sh"
# Additional settings
TEST_RECURSIVE=false
# Dependencies check
check_dependencies "file" "iconv" || exit 1
# Parse arguments (handle any unprocessed args from common parser)
while [[ -n "$1" ]]; do
arg="$(parse_common_args "$1")"
case "$arg" in
-r|--recursive)
TEST_RECURSIVE=true
shift
;;
*)
echo "Unknown option: $1"
generate_help "$0"
exit 1
;;
esac
shift
done
# Initialize test environment
setup_test_env
# Get the directory containing this script
SCRIPT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" &> /dev/null && pwd )"
PROJECT_ROOT="$(dirname "$(dirname "$SCRIPT_DIR")")"
# Create a temporary directory for test files
TEMP_DIR=$(mktemp -d)
trap 'rm -rf "$TEMP_DIR"' EXIT
# Create test files with different encodings
create_encoded_file() {
local file="$1"
local content="$2"
local encoding="$3"
if [[ "$encoding" == "utf8" ]]; then
create_test_file "$file" "$content"
else
echo -n "$content" | iconv -f UTF-8 -t "$encoding" > "$file"
print_info "Created $encoding encoded file: $file"
fi
}
print_header "Check UTF-8 Tests"
# Section 1: Command Line Interface
print_section "Testing Command Line Interface"
test_case "help option"
"$PROJECT_ROOT/scripts/check_utf8.sh --help"
"Usage:"
true
test_case "unknown option"
"$PROJECT_ROOT/scripts/check_utf8.sh --unknown"
"Unknown option"
false
# Section 2: Basic File Encoding Detection
print_section "Testing Basic File Encoding Detection"
create_encoded_file "$TEMP_DIR/utf8.txt" "Hello, 世界!" "utf8"
create_encoded_file "$TEMP_DIR/latin1.txt" "Hello, World!" "ISO-8859-1"
test_case "single utf8 file" \
"$PROJECT_ROOT/scripts/check_utf8.sh $TEMP_DIR/utf8.txt" \
"" \
true \
"UTF-8 file should pass validation"
test_case "single latin1 file" \
"$PROJECT_ROOT/scripts/check_utf8.sh $TEMP_DIR/latin1.txt" \
"non-UTF-8" \
false \
"Latin-1 file should fail validation"
# Section 3: Multiple File Handling
print_section "Testing Multiple File Handling"
create_encoded_file "$TEMP_DIR/mixed1.txt" "Hello" "utf8"
create_encoded_file "$TEMP_DIR/mixed2.txt" "World" "ISO-8859-1"
test_case "multiple mixed files" \
"$PROJECT_ROOT/scripts/check_utf8.sh $TEMP_DIR/mixed1.txt $TEMP_DIR/mixed2.txt" \
"non-UTF-8" \
false \
"Multiple files with mixed encodings should fail"
# Section 4: Special Cases
print_section "Testing Special Cases"
create_test_file "$TEMP_DIR/empty.txt" ""
test_case "empty file" \
"$PROJECT_ROOT/scripts/check_utf8.sh '$TEMP_DIR/empty.txt'" \
"" \
true \
"Empty file should be considered valid UTF-8"
test_case "missing file" \
"$PROJECT_ROOT/scripts/check_utf8.sh '$TEMP_DIR/nonexistent.txt'" \
"No such file" \
false \
"Missing file should fail with appropriate error"
test_case "invalid directory" \
"$PROJECT_ROOT/scripts/check_utf8.sh '$TEMP_DIR/nonexistent'" \
"No such file" \
false \
"Invalid directory should fail with appropriate error"
# Print test summary
print_summary
# Optional recursive testing section
if [[ "$TEST_RECURSIVE" == "true" ]]; then
print_section "Testing Recursive Directory Handling"
create_test_dir "$TEMP_DIR/subdir/deep"
create_encoded_file "$TEMP_DIR/subdir/deep/utf8_deep.txt" "Deep UTF-8" "utf8"
create_encoded_file "$TEMP_DIR/subdir/deep/latin1_deep.txt" "Deep Latin-1" "ISO-8859-1"
test_case "recursive directory check" \
"$PROJECT_ROOT/scripts/check_utf8.sh -r '$TEMP_DIR'" \
"non-UTF-8" \
false \
"Recursive check should find non-UTF-8 files in subdirectories"
fi \
"" \
true
# Create file with BOM
printf '\xEF\xBB\xBF' > "$TEMP_DIR/with_bom.txt"
echo "Hello, World!" >> "$TEMP_DIR/with_bom.txt"
test_case "UTF-8 with BOM" \
"$PROJECT_ROOT/scripts/check_utf8.sh '$TEMP_DIR/with_bom.txt'" \
"" \
true
# Section 5: Error Conditions
echo -e "\n${BLUE}Testing Error Conditions${NC}"
test_case "nonexistent file" \
"$PROJECT_ROOT/scripts/check_utf8.sh nonexistent.txt" \
"No such file" \
false
test_case "directory as file" \
"$PROJECT_ROOT/scripts/check_utf8.sh '$TEMP_DIR'" \
"Is a directory" \
false
# Create unreadable file
touch "$TEMP_DIR/unreadable.txt"
chmod 000 "$TEMP_DIR/unreadable.txt"
test_case "unreadable file" \
"$PROJECT_ROOT/scripts/check_utf8.sh '$TEMP_DIR/unreadable.txt'" \
"Permission denied" \
false
chmod 644 "$TEMP_DIR/unreadable.txt"
# Report results
echo
echo "Test Results:"
echo "Passed: $PASS"
echo "Failed: $FAIL"
exit $FAIL

View file

@ -0,0 +1,169 @@
#!/bin/bash
#==============================================================================
# export_version_test.sh
#==============================================================================
#
# DESCRIPTION:
# Test suite for export_version.sh script.
# Validates version extraction, file generation, and error handling.
#
# USAGE:
# ./scripts/tests/export_version_test.sh [-v|--verbose]
#
# OPTIONS:
# -v, --verbose Show verbose test output
# -h, --help Show this help message
#
#==============================================================================
# Colors for test output
GREEN='\033[0;32m'
RED='\033[0;31m'
BLUE='\033[0;34m'
NC='\033[0m' # No Color
# Test settings
VERBOSE=false
PASS=0
FAIL=0
# Help message
show_help() {
sed -n '/^# DESCRIPTION:/,/^#===/p' "$0" | sed 's/^# \?//'
}
# Parse command line arguments
while [[ $# -gt 0 ]]; do
case $1 in
-v|--verbose)
VERBOSE=true
shift
;;
-h|--help)
show_help
exit 0
;;
*)
echo "Unknown option: $1"
show_help
exit 1
;;
esac
done
# Get the directory containing this script
SCRIPT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" &> /dev/null && pwd )"
PROJECT_ROOT="$(dirname "$(dirname "$SCRIPT_DIR")")"
# Create a temporary directory for test files
TEMP_DIR=$(mktemp -d)
trap 'rm -rf "$TEMP_DIR"' EXIT
# Source the script (without executing main)
source "$PROJECT_ROOT/scripts/export_version.sh"
# Main test case function
function test_case() {
local name=$1
local cmd=$2
local expected_output=$3
local should_succeed=${4:-true}
echo -n "Testing $name... "
# Run command and capture output
local output
if [[ $should_succeed == "true" ]]; then
output=$($cmd 2>&1)
local status=$?
if [[ $status -eq 0 && $output == *"$expected_output"* ]]; then
echo -e "${GREEN}PASS${NC}"
((PASS++))
return 0
fi
else
output=$($cmd 2>&1) || true
if [[ $output == *"$expected_output"* ]]; then
echo -e "${GREEN}PASS${NC}"
((PASS++))
return 0
fi
fi
echo -e "${RED}FAIL${NC}"
echo " Expected output to contain: '$expected_output'"
echo " Got: '$output'"
((FAIL++))
return 0
}
echo "Running export_version.sh tests..."
echo "--------------------------------"
# Section 1: Command Line Interface
echo -e "\n${BLUE}Testing Command Line Interface${NC}"
test_case "help option" \
"$PROJECT_ROOT/scripts/export_version.sh --help" \
"Usage:" \
true
test_case "unknown option" \
"$PROJECT_ROOT/scripts/export_version.sh --unknown" \
"Unknown option" \
false
# Section 2: Version Extraction
echo -e "\n${BLUE}Testing Version Extraction${NC}"
test_case "go version extraction" \
"get_go_version" \
"1.23" \
true
test_case "toolchain version extraction" \
"get_toolchain_version" \
"go1.23.4" \
true
test_case "syspkg version extraction" \
"get_syspkg_version" \
"v0.1.5" \
true
# Section 3: File Generation
echo -e "\n${BLUE}Testing File Generation${NC}"
test_case "version info file creation" \
"$PROJECT_ROOT/scripts/export_version.sh" \
"Version information has been exported" \
true
test_case "version file format" \
"grep -E '^GO_VERSION=[0-9]+\.[0-9]+$' $PROJECT_ROOT/.version-info" \
"GO_VERSION=1.23" \
true
test_case "JSON file format" \
"grep -E '\"goVersion\": \"[0-9]+\.[0-9]+\"' $PROJECT_ROOT/.version-info.json" \
"\"goVersion\": \"1.23\"" \
true
# Section 4: Error Conditions
echo -e "\n${BLUE}Testing Error Conditions${NC}"
test_case "invalid go.mod" \
"GO_MOD_PATH=$TEMP_DIR/go.mod $PROJECT_ROOT/scripts/export_version.sh" \
"Could not read go.mod" \
false
# Create invalid go.mod for testing
echo "invalid content" > "$TEMP_DIR/go.mod"
test_case "malformed go.mod" \
"GO_MOD_PATH=$TEMP_DIR/go.mod $PROJECT_ROOT/scripts/export_version.sh" \
"Failed to parse version" \
false
# Report results
echo
echo "Test Results:"
echo "Passed: $PASS"
echo "Failed: $FAIL"
exit $FAIL

180
scripts/tests/setup_dev_test.sh Executable file
View file

@ -0,0 +1,180 @@
#!/bin/bash
#==============================================================================
# setup_dev_test.sh
#==============================================================================
#
# DESCRIPTION:
# Test suite for setup_dev.sh script.
# Validates development environment setup, tool installation, and configuration.
#
# USAGE:
# ./scripts/tests/setup_dev_test.sh [-v|--verbose] [-s|--skip-install]
#
# OPTIONS:
# -v, --verbose Show verbose test output
# -h, --help Show this help message
# -s, --skip-install Skip actual installation tests
#
#==============================================================================
# Source the test library
source "$(dirname "$0")/test_lib.sh"
# Additional settings
SKIP_INSTALL=false
# Parse arguments (handle any unprocessed args from common parser)
while [[ -n "$1" ]]; do
arg="$(parse_common_args "$1")"
case "$arg" in
-s|--skip-install)
SKIP_INSTALL=true
shift
;;
*)
echo "Unknown option: $1"
generate_help "$0"
exit 1
;;
esac
shift
done
# Initialize test environment
setup_test_env
# Get the directory containing this script
SCRIPT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" &> /dev/null && pwd )"
PROJECT_ROOT="$(dirname "$(dirname "$SCRIPT_DIR")")"
# Create a temporary directory for test files
TEMP_DIR=$(mktemp -d)
trap 'rm -rf "$TEMP_DIR"' EXIT
# Main test case function
function test_case() {
local name=$1
local cmd=$2
local expected_output=$3
local should_succeed=${4:-true}
echo -n "Testing $name... "
# Run command and capture output
local output
if [[ $should_succeed == "true" ]]; then
output=$($cmd 2>&1)
local status=$?
if [[ $status -eq 0 && $output == *"$expected_output"* ]]; then
echo -e "${GREEN}PASS${NC}"
((PASS++))
return 0
fi
else
output=$($cmd 2>&1) || true
if [[ $output == *"$expected_output"* ]]; then
echo -e "${GREEN}PASS${NC}"
((PASS++))
return 0
fi
fi
echo -e "${RED}FAIL${NC}"
echo " Expected output to contain: '$expected_output'"
echo " Got: '$output'"
((FAIL++))
return 0
}
echo "Running setup_dev.sh tests..."
echo "---------------------------"
# Section 1: Command Line Interface
print_section "Testing Command Line Interface"
test_case "help option" \
"$PROJECT_ROOT/scripts/setup_dev.sh --help" \
"Usage:" \
true
test_case "unknown option" \
"$PROJECT_ROOT/scripts/setup_dev.sh --unknown" \
"Unknown option" \
false
# Section 2: Go Environment Check
print_section "Testing Go Environment"
test_case "go installation" \
"command -v go" \
"" \
true
test_case "go version format" \
"go version" \
"go version go1" \
true
test_case "go modules enabled" \
"go env GO111MODULE" \
"on" \
true
# Section 3: Development Tool Installation
print_section "Testing Development Tools"
test_case "doctoc installation check" \
"command -v doctoc" \
"" \
true
test_case "trunk installation check" \
"command -v trunk" \
"" \
true
if [[ "$SKIP_INSTALL" == "false" ]]; then
test_case "doctoc functionality" \
"doctoc --version" \
"doctoc@" \
true
test_case "trunk functionality" \
"trunk --version" \
"trunk" \
true
fi
# Section 4: Project Configuration
print_section "Testing Project Configuration"
test_case "go.mod existence" \
"test -f $PROJECT_ROOT/go.mod" \
"" \
true
test_case "trunk.yaml existence" \
"test -f $PROJECT_ROOT/.trunk/trunk.yaml" \
"" \
true
# Section 5: Error Conditions
print_section "Testing Error Conditions"
test_case "invalid GOPATH" \
"GOPATH=/nonexistent $PROJECT_ROOT/scripts/setup_dev.sh" \
"Invalid GOPATH" \
false
if [[ "$SKIP_INSTALL" == "false" ]]; then
test_case "network failure simulation" \
"SIMULATE_NETWORK_FAILURE=1 $PROJECT_ROOT/scripts/setup_dev.sh" \
"Failed to download" \
false
fi
# Report test results and exit with appropriate status
report_results
# Report results
echo
echo "Test Results:"
echo "Passed: $PASS"
echo "Failed: $FAIL"
exit $FAIL

272
scripts/tests/test_lib.sh Executable file
View file

@ -0,0 +1,272 @@
#!/bin/bash
#==============================================================================
# test_lib.sh
#==============================================================================
#
# DESCRIPTION:
# Common testing library for shell script tests.
# Provides standard test framework functions and utilities.
#
# USAGE:
# source "$(dirname "$0")/test_lib.sh"
#
# FEATURES:
# - Standard test framework
# - Color output
# - Test counting
# - Temporary directory management
# - Command line parsing
# - Help text generation
#
#==============================================================================
# Colors for test output
export GREEN='\033[0;32m'
export RED='\033[0;31m'
export BLUE='\033[0;34m'
export NC='\033[0m' # No Color
export BOLD='\033[1m'
# Test counters
export PASS=0
export FAIL=0
# Test settings
export VERBOSE=${VERBOSE:-false}
export TEMP_DIR
# Print functions
print_header() {
echo -e "\n${BOLD}${1}${NC}\n"
}
print_section() {
echo -e "\n${BLUE}${1}${NC}"
}
print_info() {
[[ "$VERBOSE" == "true" ]] && echo "INFO: $1"
}
# Main test case function
test_case() {
local name=$1
local cmd=$2
local expected_output=$3
local should_succeed=${4:-true}
echo -n "Testing $name... "
print_info "Command: $cmd"
# Run command and capture output
local output
if [[ $should_succeed == "true" ]]; then
output=$($cmd 2>&1)
local status=$?
if [[ $status -eq 0 && $output == *"$expected_output"* ]]; then
echo -e "${GREEN}PASS${NC}"
((PASS++))
print_info "Output: $output"
return 0
fi
else
output=$($cmd 2>&1) || true
if [[ $output == *"$expected_output"* ]]; then
echo -e "${GREEN}PASS${NC}"
((PASS++))
print_info "Output: $output"
return 0
fi
fi
echo -e "${RED}FAIL${NC}"
echo " Expected output to contain: '$expected_output'"
echo " Got: '$output'"
((FAIL++))
return 0
}
# Setup functions
setup_test_env() {
TEMP_DIR=$(mktemp -d)
trap cleanup_test_env EXIT
print_info "Created temporary directory: $TEMP_DIR"
}
cleanup_test_env() {
if [[ -d "$TEMP_DIR" ]]; then
rm -rf "$TEMP_DIR"
print_info "Cleaned up temporary directory: $TEMP_DIR"
fi
}
# Help text generation
generate_help() {
local script_path=$1
sed -n '/^# DESCRIPTION:/,/^#===/p' "$script_path" | sed 's/^# \?//'
}
# Standard argument parsing
parse_common_args() {
while [[ $# -gt 0 ]]; do
case $1 in
-v|--verbose)
VERBOSE=true
shift
;;
-h|--help)
generate_help "$0"
exit 0
;;
*)
# Return the unhandled argument
echo "$1"
;;
esac
shift
done
}
# Results reporting
report_results() {
echo
echo "Test Results:"
echo "------------"
echo "Passed: $PASS"
echo "Failed: $FAIL"
echo "Total: $((PASS + FAIL))"
# Return non-zero if any tests failed
[[ $FAIL -eq 0 ]]
}
# File operation helpers
create_test_file() {
local file="$1"
local content="$2"
local mode="${3:-644}"
mkdir -p "$(dirname "$file")"
echo "$content" > "$file"
chmod "$mode" "$file"
print_info "Created test file: $file"
}
create_test_dir() {
local dir="$1"
local mode="${2:-755}"
mkdir -p "$dir"
chmod "$mode" "$dir"
print_info "Created test directory: $dir"
}
assert_file_contains() {
local file="$1"
local pattern="$2"
local message="${3:-File does not contain expected content}"
if ! grep -q "$pattern" "$file"; then
echo -e "${RED}FAIL${NC}: $message"
echo " File: $file"
echo " Expected pattern: $pattern"
echo " Content:"
cat "$file"
return 1
fi
return 0
}
assert_file_exists() {
local file="$1"
local message="${2:-File does not exist}"
if [[ ! -f "$file" ]]; then
echo -e "${RED}FAIL${NC}: $message"
echo " Expected file: $file"
return 1
fi
return 0
}
assert_dir_exists() {
local dir="$1"
local message="${2:-Directory does not exist}"
if [[ ! -d "$dir" ]]; then
echo -e "${RED}FAIL${NC}: $message"
echo " Expected directory: $dir"
return 1
fi
return 0
}
is_command_available() {
command -v "$1" >/dev/null 2>&1
}
wait_for_condition() {
local cmd="$1"
local timeout="${2:-10}"
local interval="${3:-1}"
local end_time=$((SECONDS + timeout))
while [[ $SECONDS -lt $end_time ]]; do
if eval "$cmd"; then
return 0
fi
sleep "$interval"
done
return 1
}
skip_if_command_missing() {
local cmd="$1"
local message="${2:-Required command not available}"
if ! is_command_available "$cmd"; then
echo "SKIP: $message (missing: $cmd)"
return 0
fi
return 1
}
run_if_exists() {
local cmd="$1"
local fallback="$2"
if is_command_available "$cmd"; then
"$cmd"
else
eval "$fallback"
fi
}
backup_and_restore() {
local file="$1"
if [[ -f "$file" ]]; then
cp "$file" "${file}.bak"
print_info "Backed up: $file"
trap 'restore_backup "$file"' EXIT
fi
}
restore_backup() {
local file="$1"
if [[ -f "${file}.bak" ]]; then
mv "${file}.bak" "$file"
print_info "Restored: $file"
fi
}
check_dependencies() {
local missing=0
for cmd in "$@"; do
if ! is_command_available "$cmd"; then
echo "Missing required dependency: $cmd"
((missing++))
fi
done
return $missing
}

View file

@ -0,0 +1,227 @@
#!/bin/bash
#==============================================================================
# update_md_tocs_test.sh
#==============================================================================
#
# DESCRIPTION:
# Test suite for update_md_tocs.sh script.
# Validates Table of Contents generation, markdown file handling,
# and doctoc integration.
#
# USAGE:
# ./scripts/tests/update_md_tocs_test.sh [-v|--verbose] [-s|--skip-doctoc]
#
# OPTIONS:
# -v, --verbose Show verbose test output
# -s, --skip-doctoc Skip tests requiring doctoc installation
# -h, --help Show this help message
#
#==============================================================================
# Colors for test output
GREEN='\033[0;32m'
RED='\033[0;31m'
BLUE='\033[0;34m'
NC='\033[0m' # No Color
# Test settings
VERBOSE=false
SKIP_DOCTOC=false
PASS=0
FAIL=0
# Help message
show_help() {
sed -n '/^# DESCRIPTION:/,/^#===/p' "$0" | sed 's/^# \?//'
}
# Parse command line arguments
while [[ $# -gt 0 ]]; do
case $1 in
-v|--verbose)
VERBOSE=true
shift
;;
-s|--skip-doctoc)
SKIP_DOCTOC=true
shift
;;
-h|--help)
show_help
exit 0
;;
*)
echo "Unknown option: $1"
show_help
exit 1
;;
esac
done
# Get the directory containing this script
SCRIPT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" &> /dev/null && pwd )"
PROJECT_ROOT="$(dirname "$(dirname "$SCRIPT_DIR")")"
# Create a temporary directory for test files
TEMP_DIR=$(mktemp -d)
trap 'rm -rf "$TEMP_DIR"' EXIT
# Test helper functions
create_test_md() {
local file="$1"
cat > "$file" << EOF
# Test Document
<!-- START doctoc -->
<!-- END doctoc -->
## Section 1
### Subsection 1.1
### Subsection 1.2
## Section 2
### Subsection 2.1
EOF
}
# Main test case function
function test_case() {
local name=$1
local cmd=$2
local expected_output=$3
local should_succeed=${4:-true}
echo -n "Testing $name... "
# Run command and capture output
local output
if [[ $should_succeed == "true" ]]; then
output=$($cmd 2>&1)
local status=$?
if [[ $status -eq 0 && $output == *"$expected_output"* ]]; then
echo -e "${GREEN}PASS${NC}"
((PASS++))
return 0
fi
else
output=$($cmd 2>&1) || true
if [[ $output == *"$expected_output"* ]]; then
echo -e "${GREEN}PASS${NC}"
((PASS++))
return 0
fi
fi
echo -e "${RED}FAIL${NC}"
echo " Expected output to contain: '$expected_output'"
echo " Got: '$output'"
((FAIL++))
return 0
}
echo "Running update_md_tocs.sh tests..."
echo "---------------------------------"
# Section 1: Command Line Interface
echo -e "\n${BLUE}Testing Command Line Interface${NC}"
test_case "help option" \
"$PROJECT_ROOT/scripts/update_md_tocs.sh --help" \
"Usage:" \
true
test_case "unknown option" \
"$PROJECT_ROOT/scripts/update_md_tocs.sh --unknown" \
"Unknown option" \
false
# Section 2: Basic TOC Generation
echo -e "\n${BLUE}Testing Basic TOC Generation${NC}"
create_test_md "$TEMP_DIR/test.md"
if [[ "$SKIP_DOCTOC" == "false" ]]; then
test_case "doctoc installation" \
"command -v doctoc" \
"" \
true
test_case "TOC generation" \
"doctoc '$TEMP_DIR/test.md'" \
"Table of Contents" \
true
test_case "TOC structure" \
"grep -A 5 'Table of Contents' '$TEMP_DIR/test.md'" \
"Section 1" \
true
fi
# Section 3: Multiple File Handling
echo -e "\n${BLUE}Testing Multiple File Handling${NC}"
create_test_md "$TEMP_DIR/doc1.md"
create_test_md "$TEMP_DIR/doc2.md"
test_case "multiple file update" \
"$PROJECT_ROOT/scripts/update_md_tocs.sh '$TEMP_DIR/doc1.md' '$TEMP_DIR/doc2.md'" \
"updated" \
true
# Section 4: Special Cases
echo -e "\n${BLUE}Testing Special Cases${NC}"
# Create file without TOC markers
cat > "$TEMP_DIR/no_toc.md" << EOF
# Document
## Section 1
## Section 2
EOF
test_case "file without TOC markers" \
"$PROJECT_ROOT/scripts/update_md_tocs.sh '$TEMP_DIR/no_toc.md'" \
"No TOC markers" \
false
# Create empty file
touch "$TEMP_DIR/empty.md"
test_case "empty file handling" \
"$PROJECT_ROOT/scripts/update_md_tocs.sh '$TEMP_DIR/empty.md'" \
"Empty file" \
false
# Section 5: Error Conditions
echo -e "\n${BLUE}Testing Error Conditions${NC}"
test_case "nonexistent file" \
"$PROJECT_ROOT/scripts/update_md_tocs.sh nonexistent.md" \
"No such file" \
false
test_case "directory as input" \
"$PROJECT_ROOT/scripts/update_md_tocs.sh '$TEMP_DIR'" \
"Is a directory" \
false
# Create unreadable file
touch "$TEMP_DIR/unreadable.md"
chmod 000 "$TEMP_DIR/unreadable.md"
test_case "unreadable file" \
"$PROJECT_ROOT/scripts/update_md_tocs.sh '$TEMP_DIR/unreadable.md'" \
"Permission denied" \
false
chmod 644 "$TEMP_DIR/unreadable.md"
# Create file with invalid markdown
cat > "$TEMP_DIR/invalid.md" << EOF
# [Invalid Markdown)
* Broken list
EOF
test_case "invalid markdown handling" \
"$PROJECT_ROOT/scripts/update_md_tocs.sh '$TEMP_DIR/invalid.md'" \
"Invalid markdown" \
false
# Report results
echo
echo "Test Results:"
echo "Passed: $PASS"
echo "Failed: $FAIL"
exit $FAIL

89
scripts/update_md_tocs.sh Executable file
View file

@ -0,0 +1,89 @@
#!/bin/bash
#==============================================================================
# update_md_tocs.sh
#==============================================================================
#
# DESCRIPTION:
# Automatically updates table of contents in all markdown files that contain
# doctoc markers. The script handles installation of doctoc if not present
# and applies consistent formatting across all markdown files.
#
# USAGE:
# ./scripts/update_md_tocs.sh
#
# FEATURES:
# - Auto-detects markdown files with doctoc markers
# - Installs doctoc if not present (requires npm)
# - Applies consistent settings across all files:
# * Excludes document title
# * Includes headers up to level 4
# * Uses GitHub-compatible links
# - Provides clear progress and error feedback
#
# TO ADD TOC TO A NEW FILE:
# Add these markers to your markdown:
# <!-- START doctoc generated TOC please keep comment here to allow auto update -->
# <!-- DON'T EDIT THIS SECTION, INSTEAD RE-RUN doctoc TO UPDATE -->
# <!-- doctoc --maxlevel 4 --no-title --notitle --github -->
#
# <!-- END doctoc -->
#
# DEPENDENCIES:
# - npm (for doctoc installation if needed)
# - doctoc (will be installed if missing)
#
# EXIT CODES:
# 0 - Success
# 1 - Missing dependencies or installation failure
#
# AUTHOR:
# Claude - 2025-08-28
#
# NOTES:
# - Only processes files containing doctoc markers
# - Preserves existing markdown content
# - Safe to run multiple times
#==============================================================================
# Function to check if a command exists
command_exists() {
command -v "$1" >/dev/null 2>&1
}
# Function to check if npm package is installed globally
npm_package_installed() {
npm list -g "$1" >/dev/null 2>&1
}
# Install doctoc if not present
if ! command_exists doctoc; then
echo "doctoc not found. Installing..."
if ! command_exists npm; then
echo "Error: npm is required to install doctoc"
exit 1
fi
if ! npm_package_installed doctoc; then
echo "Installing doctoc globally..."
npm install -g doctoc
if [ $? -ne 0 ]; then
echo "Error: Failed to install doctoc"
exit 1
fi
fi
fi
echo "Updating table of contents in markdown files..."
# Find all markdown files that contain doctoc markers
find . -type f -name "*.md" -exec grep -l "START doctoc" {} \; | while read -r file; do
echo "Processing: $file"
doctoc --maxlevel 4 --no-title --notitle --github "$file"
if [ $? -ne 0 ]; then
echo "Error: Failed to update TOC in $file"
fi
done
echo "Table of contents update complete!"

View file

@ -1 +0,0 @@
package cacheaptpkgs

View file

@ -1 +0,0 @@
package cacheaptpkgs

View file

@ -1 +0,0 @@
package cacheaptpkgs

View file

@ -1 +0,0 @@
package cacheaptpkgs

View file

@ -1 +0,0 @@
package cacheaptpkgs

View file

@ -1 +0,0 @@
package cache

View file

@ -1 +0,0 @@
package cache

View file

@ -1 +0,0 @@
package cio

View file

@ -1 +0,0 @@
package cio

View file

@ -1 +0,0 @@
package common

View file

@ -1 +0,0 @@
package common

View file

@ -1 +0,0 @@
package logging

View file

@ -1 +0,0 @@
package pkgs

View file

@ -1 +0,0 @@
package pkgs

View file

@ -1 +0,0 @@
package pkgs

70
tools/version_export.sh Normal file
View file

@ -0,0 +1,70 @@
#!/bin/bash
# Script to export Go library version information for package development
# This script reads version information from go.mod and exports it
set -e
# Get the directory containing this script
SCRIPT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" &> /dev/null && pwd )"
PROJECT_ROOT="$(dirname "$SCRIPT_DIR")"
# Function to extract Go version from go.mod
get_go_version() {
local go_version
go_version=$(grep "^go " "$PROJECT_ROOT/go.mod" | awk '{print $2}')
echo "$go_version"
}
# Function to extract toolchain version from go.mod
get_toolchain_version() {
local toolchain_version
toolchain_version=$(grep "^toolchain " "$PROJECT_ROOT/go.mod" | awk '{print $2}')
echo "$toolchain_version"
}
# Function to extract syspkg version from go.mod
get_syspkg_version() {
local syspkg_version
syspkg_version=$(grep "github.com/awalsh128/syspkg" "$PROJECT_ROOT/go.mod" | awk '{print $2}')
echo "$syspkg_version"
}
# Main execution
echo "Exporting version information..."
GO_VERSION=$(get_go_version)
TOOLCHAIN_VERSION=$(get_toolchain_version)
SYSPKG_VERSION=$(get_syspkg_version)
# Export versions as environment variables
export GO_VERSION
export TOOLCHAIN_VERSION
export SYSPKG_VERSION
# Create a version info file
VERSION_FILE="$PROJECT_ROOT/.version-info"
cat > "$VERSION_FILE" << EOF
# Version information for cache-apt-pkgs-action
GO_VERSION=$GO_VERSION
TOOLCHAIN_VERSION=$TOOLCHAIN_VERSION
SYSPKG_VERSION=$SYSPKG_VERSION
EXPORT_DATE=$(date '+%Y-%m-%d %H:%M:%S')
EOF
echo "Version information has been exported to $VERSION_FILE"
echo "Go Version: $GO_VERSION"
echo "Toolchain Version: $TOOLCHAIN_VERSION"
echo "Syspkg Version: $SYSPKG_VERSION"
# Also create a JSON format for tools that prefer it
VERSION_JSON="$PROJECT_ROOT/.version-info.json"
cat > "$VERSION_JSON" << EOF
{
"goVersion": "$GO_VERSION",
"toolchainVersion": "$TOOLCHAIN_VERSION",
"syspkgVersion": "$SYSPKG_VERSION",
"exportDate": "$(date '+%Y-%m-%d %H:%M:%S')"
}
EOF
echo "Version information also exported in JSON format to $VERSION_JSON"