Compare commits

..

No commits in common. "master" and "v1.4.0" have entirely different histories.

44 changed files with 83122 additions and 250038 deletions

View File

@ -1,50 +0,0 @@
# https://docs.github.com/en/code-security/dependabot/dependabot-version-updates/optimizing-pr-creation-version-updates#setting-up-a-cooldown-period-for-dependency-updates
version: 2
updates:
- package-ecosystem: cargo
directories:
- tests
- tests/wasm-workspace
schedule:
interval: weekly
# https://docs.github.com/en/code-security/dependabot/working-with-dependabot/dependabot-options-reference#groups--
# 1 PR per week and group
groups:
cargo-major:
update-types: ["major"]
cargo-minor:
update-types: ["minor"]
cargo-patch:
update-types: ["patch"]
- package-ecosystem: github-actions
directory: /
schedule:
interval: weekly
groups:
actions:
# Combine all images of the last week
patterns: ["*"]
- package-ecosystem: npm
directory: /
schedule:
interval: weekly
groups:
prd-major:
dependency-type: "production"
update-types: ["major"]
prd-minor:
dependency-type: "production"
update-types: ["minor"]
prd-patch:
dependency-type: "production"
update-types: ["patch"]
dev-major:
dependency-type: "development"
update-types: ["major"]
dev-minor:
dependency-type: "development"
update-types: ["minor"]
dev-patch:
dependency-type: "development"
update-types: ["patch"]

View File

@ -1,33 +0,0 @@
name: buildjet
on: [push, pull_request]
jobs:
buildjet:
if: github.repository == 'Swatinem/rust-cache'
strategy:
fail-fast: false
matrix:
os: [ubuntu-latest, macos-latest, windows-latest]
name: Test buildjet provider on ${{ matrix.os }}
runs-on: ${{ matrix.os }}
env:
CARGO_TERM_COLOR: always
steps:
- uses: actions/checkout@v5
- run: rustup toolchain install stable --profile minimal --no-self-update
- uses: ./
with:
workspaces: tests
cache-provider: buildjet
- run: |
cargo check
cargo test
cargo build --release
working-directory: tests

View File

@ -1,46 +0,0 @@
name: check dist/
on:
push:
branches:
- master
paths-ignore:
- "**.md"
pull_request:
paths-ignore:
- "**.md"
workflow_dispatch:
jobs:
check-dist:
if: github.repository == 'Swatinem/rust-cache'
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v5
- name: Setup Node.js 20.x
uses: actions/setup-node@v4
with:
node-version: 20.x
cache: npm
- name: Install dependencies
run: npm ci
- name: Rebuild the dist/ directory
run: npm run prepare
- name: Compare the expected and actual dist/ directories
run: |
if [ "$(git diff dist/ | wc -l)" -gt "0" ]; then
echo "Detected uncommitted changes after build. See status below:"
git diff
exit 1
fi
id: diff
- uses: actions/upload-artifact@v4
if: ${{ failure() && steps.diff.conclusion == 'failure' }}
with:
name: dist
path: dist/

View File

@ -1,31 +0,0 @@
name: coverage
on: [push, pull_request]
jobs:
coverage:
if: github.repository == 'Swatinem/rust-cache'
strategy:
fail-fast: false
matrix:
os: [ubuntu-latest, macos-latest, windows-latest]
name: Test `cargo-llvm-cov` on ${{ matrix.os }}
runs-on: ${{ matrix.os }}
env:
CARGO_TERM_COLOR: always
steps:
- uses: actions/checkout@v5
- run: rustup toolchain install stable --profile minimal --component llvm-tools-preview --no-self-update
- uses: taiki-e/install-action@cargo-llvm-cov
- uses: ./
with:
workspaces: tests
- run: cargo llvm-cov --all-features --workspace
working-directory: tests

View File

@ -1,25 +0,0 @@
# https://docs.github.com/en/code-security/dependabot/working-with-dependabot/automating-dependabot-with-github-actions#enabling-automerge-on-a-pull-request
name: Dependabot Automation
on: pull_request
permissions:
contents: write
pull-requests: write
jobs:
automerge:
runs-on: ubuntu-latest
if: github.event.pull_request.user.login == 'dependabot[bot]' && github.repository == 'Swatinem/rust-cache'
steps:
- name: Fetch metadata
id: metadata
uses: dependabot/fetch-metadata@v2
with:
github-token: "${{ secrets.GITHUB_TOKEN }}"
- name: Auto-merge Patch PRs
if: steps.metadata.outputs.update-type == 'version-update:semver-patch'
run: gh pr merge --auto --merge "$PR_URL"
env:
PR_URL: ${{github.event.pull_request.html_url}}
GH_TOKEN: ${{secrets.GITHUB_TOKEN}}

View File

@ -1,32 +0,0 @@
name: git-registry
on: [push, pull_request]
jobs:
git-registry:
if: github.repository == 'Swatinem/rust-cache'
strategy:
fail-fast: false
matrix:
os: [ubuntu-latest, macos-latest, windows-latest]
name: Test cargo "git" registry on ${{ matrix.os }}
runs-on: ${{ matrix.os }}
env:
CARGO_TERM_COLOR: always
CARGO_REGISTRIES_CRATES_IO_PROTOCOL: git
steps:
- uses: actions/checkout@v5
- run: rustup toolchain install stable --profile minimal --no-self-update
- uses: ./
with:
workspaces: tests
- run: |
cargo check
cargo test
working-directory: tests

View File

@ -1,26 +0,0 @@
name: install
on: [push, pull_request]
jobs:
install:
if: github.repository == 'Swatinem/rust-cache'
strategy:
fail-fast: false
matrix:
os: [ubuntu-latest, macos-latest, windows-latest]
name: Test `cargo install` on ${{ matrix.os }}
runs-on: ${{ matrix.os }}
env:
CARGO_TERM_COLOR: always
steps:
- uses: actions/checkout@v5
- run: rustup toolchain install stable --profile minimal --no-self-update
- uses: ./
- run: cargo install cargo-deny --locked

31
.github/workflows/selftest.yml vendored Normal file
View File

@ -0,0 +1,31 @@
name: CI
on: [push, pull_request]
jobs:
selftest:
strategy:
fail-fast: false
matrix:
os: [ubuntu-latest, macos-latest, windows-latest]
name: Test Action on ${{ matrix.os }}
runs-on: ${{ matrix.os }}
steps:
- uses: actions/checkout@v2
- uses: actions-rs/toolchain@v1
with:
toolchain: stable
profile: minimal
override: true
- uses: ./
with:
cache-on-failure: true
- run: |
cargo install cargo-deny --locked
cargo check
cargo test

View File

@ -1,32 +0,0 @@
name: simple
on: [push, pull_request]
jobs:
simple:
if: github.repository == 'Swatinem/rust-cache'
strategy:
fail-fast: false
matrix:
os: [ubuntu-latest, macos-latest, windows-latest]
name: Test `cargo check/test/build` on ${{ matrix.os }}
runs-on: ${{ matrix.os }}
env:
CARGO_TERM_COLOR: always
steps:
- uses: actions/checkout@v5
- run: rustup toolchain install stable --profile minimal --no-self-update
- uses: ./
with:
workspaces: tests
- run: |
cargo check
cargo test
cargo build --release
working-directory: tests

View File

@ -1,30 +0,0 @@
name: target-dir
on: [push, pull_request]
jobs:
target-dir:
if: github.repository == 'Swatinem/rust-cache'
strategy:
fail-fast: false
matrix:
os: [ubuntu-latest, macos-latest, windows-latest]
name: Test custom target-dir on ${{ matrix.os }}
runs-on: ${{ matrix.os }}
env:
CARGO_TERM_COLOR: always
steps:
- uses: actions/checkout@v5
- run: rustup toolchain install stable --profile minimal --no-self-update
# the `workspaces` option has the format `$workspace -> $target-dir`
# and the `$target-dir` is relative to the `$workspace`.
- uses: ./
with:
workspaces: tests -> ../custom-target-dir
- run: cargo test --manifest-path tests/Cargo.toml --target-dir custom-target-dir

View File

@ -1,36 +0,0 @@
name: workspaces
on: [push, pull_request]
jobs:
workspaces:
if: github.repository == 'Swatinem/rust-cache'
strategy:
fail-fast: false
matrix:
os: [ubuntu-latest, macos-latest, windows-latest]
name: Test multiple workspaces on ${{ matrix.os }}
runs-on: ${{ matrix.os }}
env:
CARGO_TERM_COLOR: always
steps:
- uses: actions/checkout@v5
- run: rustup toolchain install stable --profile minimal --target wasm32-unknown-unknown --no-self-update
- uses: ./
with:
workspaces: |
tests
tests/wasm-workspace
- name: cargo check (tests)
working-directory: tests
run: cargo check
- name: cargo check (tests/wasm-workspace)
working-directory: tests/wasm-workspace
run: cargo check

10
.gitignore vendored
View File

@ -1,8 +1,2 @@
node_modules/ node_modules
target/ /target
# Editors
.idea/
# Mac
.DS_Store

View File

@ -1,113 +1,5 @@
# Changelog # Changelog
## 2.8.0
- Add support for `warpbuild` cache provider
- Add new `cache-workspace-crates` feature
## 2.7.8
- Include CPU arch in the cache key
## 2.7.7
- Also cache `cargo install` metadata
## 2.7.6
- Allow opting out of caching $CARGO_HOME/bin
- Add runner OS in cache key
- Adds an option to do lookup-only of the cache
## 2.7.5
- Support Cargo.lock format cargo-lock v4
- Only run macOsWorkaround() on macOS
## 2.7.3
- Work around upstream problem that causes cache saving to hang for minutes.
## 2.7.2
- Only key by `Cargo.toml` and `Cargo.lock` files of workspace members.
## 2.7.1
- Update toml parser to fix parsing errors.
## 2.7.0
- Properly cache `trybuild` tests.
## 2.6.2
- Fix `toml` parsing.
## 2.6.1
- Fix hash contributions of `Cargo.lock`/`Cargo.toml` files.
## 2.6.0
- Add "buildjet" as a second `cache-provider` backend.
- Clean up sparse registry index.
- Do not clean up src of `-sys` crates.
- Remove `.cargo/credentials.toml` before saving.
## 2.5.1
- Fix hash contribution of `Cargo.lock`.
## 2.5.0
- feat: Rm workspace crates version before caching.
- feat: Add hash of `.cargo/config.toml` to key.
## 2.4.0
- Fix cache key stability.
- Use 8 character hash components to reduce the key length, making it more readable.
## 2.3.0
- Add `cache-all-crates` option, which enables caching of crates installed by workflows.
- Add installed packages to cache key, so changes to workflows that install rust tools are detected and cached properly.
- Fix cache restore failures due to upstream bug.
- Fix `EISDIR` error due to globed directories.
- Update runtime `@actions/cache`, `@actions/io` and dev `typescript` dependencies.
- Update `npm run prepare` so it creates distribution files with the right line endings.
## 2.2.1
- Update `@actions/cache` dependency to fix usage of `zstd` compression.
## 2.2.0
- Add new `save-if` option to always restore, but only conditionally save the cache.
## 2.1.0
- Only hash `Cargo.{lock,toml}` files in the configured workspace directories.
## 2.0.2
- Avoid calling `cargo metadata` on pre-cleanup.
- Added `prefix-key`, `cache-directories` and `cache-targets` options.
## 2.0.1
- Primarily just updating dependencies to fix GitHub deprecation notices.
## 2.0.0
- The action code was refactored to allow for caching multiple workspaces and
different `target` directory layouts.
- The `working-directory` and `target-dir` input options were replaced by a
single `workspaces` option that has the form of `$workspace -> $target`.
- Support for considering `env-vars` as part of the cache key.
- The `sharedKey` input option was renamed to `shared-key` for consistency.
## 1.4.0 ## 1.4.0
- Clean both `debug` and `release` target directories. - Clean both `debug` and `release` target directories.

1665
Cargo.lock generated Normal file

File diff suppressed because it is too large Load Diff

10
Cargo.toml Normal file
View File

@ -0,0 +1,10 @@
[package]
publish = false
name = "rust-cache"
version = "0.1.0"
authors = ["Arpad Borsos <arpad.borsos@googlemail.com>"]
edition = "2018"
[dev-dependencies]
reqwest = "0.11.0"
actix-web = { git = "https://github.com/actix/actix-web.git", rev = "bd26083f333ecf63e3eb444748250364ce124f5e" }

162
README.md
View File

@ -6,96 +6,39 @@ sensible defaults.
## Example usage ## Example usage
```yaml ```yaml
- uses: actions/checkout@v5 - uses: actions/checkout@v2
# selecting a toolchain either by action or manual `rustup` calls should happen # selecting a toolchain either by action or manual `rustup` calls should happen
# before the plugin, as the cache uses the current rustc version as its cache key # before the plugin, as it uses the current rustc version as its cache key
- run: rustup toolchain install stable --profile minimal - uses: actions-rs/toolchain@v1
- uses: Swatinem/rust-cache@v2
with: with:
# The prefix cache key, this can be changed to start a new cache manually. profile: minimal
# default: "v0-rust" toolchain: stable
prefix-key: ""
# A cache key that is used instead of the automatic `job`-based key, - uses: Swatinem/rust-cache@v1
# and is stable over multiple jobs.
# default: empty
shared-key: ""
# An additional cache key that is added alongside the automatic `job`-based
# cache key and can be used to further differentiate jobs.
# default: empty
key: ""
# A whitespace separated list of env-var *prefixes* who's value contributes
# to the environment cache key.
# The env-vars are matched by *prefix*, so the default `RUST` var will
# match all of `RUSTC`, `RUSTUP_*`, `RUSTFLAGS`, `RUSTDOC_*`, etc.
# default: "CARGO CC CFLAGS CXX CMAKE RUST"
env-vars: ""
# The cargo workspaces and target directory configuration.
# These entries are separated by newlines and have the form
# `$workspace -> $target`. The `$target` part is treated as a directory
# relative to the `$workspace` and defaults to "target" if not explicitly given.
# default: ". -> target"
workspaces: ""
# Additional non workspace directories to be cached, separated by newlines.
cache-directories: ""
# Determines whether workspace `target` directories are cached.
# If `false`, only the cargo registry will be cached.
# default: "true"
cache-targets: ""
# Determines if the cache should be saved even when the workflow has failed.
# default: "false"
cache-on-failure: ""
# Determines which crates are cached.
# If `true` all crates will be cached, otherwise only dependent crates will be cached.
# Useful if additional crates are used for CI tooling.
# default: "false"
cache-all-crates: ""
# Similar to cache-all-crates.
# If `true` the workspace crates will be cached.
# Useful if the workspace contains libraries that are only updated sporadically.
# default: "false"
cache-workspace-crates: ""
# Determines whether the cache should be saved.
# If `false`, the cache is only restored.
# Useful for jobs where the matrix is additive e.g. additional Cargo features,
# or when only runs from `master` should be saved to the cache.
# default: "true"
save-if: ""
# To only cache runs from `master`:
save-if: ${{ github.ref == 'refs/heads/master' }}
# Determines whether the cache should be restored.
# If `true` the cache key will be checked and the `cache-hit` output will be set
# but the cache itself won't be restored
# default: "false"
lookup-only: ""
# Specifies what to use as the backend providing cache
# Can be set to "github", "buildjet", or "warpbuild"
# default: "github"
cache-provider: ""
# Determines whether to cache the ~/.cargo/bin directory.
# default: "true"
cache-bin: ""
``` ```
Further examples are available in the [.github/workflows](./.github/workflows/) directory. ## Inputs
: `key`
An optional key that is added to the automatic cache key.
: `sharedKey`
An additional key that is stable over multiple jobs.
: `working-directory`
The working directory the action operates in, is case the cargo project is not
located in the repo root.
: `target-dir`
The target directory that should be cleaned and persisted, defaults to `./target`.
: `cache-on-failure`
Cache even if the build fails, defaults to false
## Outputs ## Outputs
**`cache-hit`** : `cache-hit`
This is a boolean flag that will be set to `true` when there was an exact cache hit. This is a boolean flag that will be set to `true` when there was an exact cache hit.
@ -109,29 +52,35 @@ repositories with only a `Cargo.toml` file have limited benefits, as cargo will
_always_ use the most up-to-date dependency versions, which may not be cached. _always_ use the most up-to-date dependency versions, which may not be cached.
Usage with Stable Rust is most effective, as a cache is tied to the Rust version. Usage with Stable Rust is most effective, as a cache is tied to the Rust version.
Using it with Nightly Rust is less effective as it will throw away the cache every day, Using it with Nightly Rust is less effective as it will throw away the cache every day.
unless a specific nightly build is being pinned.
## Versioning
I use the `v1` branch similar to `master` development, so if you want to have
a more stable experience, please use a fixed revision or tag.
## Cache Details ## Cache Details
This action currently caches the following files/directories: This action currently caches the following files/directories:
- `~/.cargo` (installed binaries, the cargo registry, cache, and git dependencies) - `~/.cargo/bin`
- `./target` (build artifacts of dependencies) - `~/.cargo/registry/index`
- `~/.cargo/registry/cache`
- `~/.cargo/git`
- `~/.cargo/.crates.toml`
- `~/.cargo/.crates2.json`
- `./target`
This cache is automatically keyed by: This cache is automatically keyed by:
- the github [`job_id`](https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_id), - the github [`job_id`](https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_id),
- the rustc release / host / hash, - the rustc release / host / hash, and
- the value of some compiler-specific environment variables (eg. RUSTFLAGS, etc), and
- a hash of all `Cargo.lock` / `Cargo.toml` files found anywhere in the repository (if present). - a hash of all `Cargo.lock` / `Cargo.toml` files found anywhere in the repository (if present).
- a hash of all `rust-toolchain` / `rust-toolchain.toml` files in the root of the repository (if present). - a hash of all `rust-toolchain` / `rust-toolchain.toml` files in the root of the repository (if present).
- a hash of all `.cargo/config.toml` files in the root of the repository (if present).
An additional input `key` can be provided if the builtin keys are not sufficient. An additional input `key` can be provided if the builtin keys are not sufficient.
Before being persisted, the cache is cleaned of: Before being persisted, the cache is cleaned of:
- Any files in `~/.cargo/bin` that were present before the action ran (for example `rustc`). - Any files in `~/.cargo/bin` that were present before the action ran (for example `rustc`).
- Dependencies that are no longer used. - Dependencies that are no longer used.
- Anything that is not a dependency. - Anything that is not a dependency.
@ -150,42 +99,13 @@ to recreate it from the compressed crate archives in `~/.cargo/registry/cache`.
The action will try to restore from a previous `Cargo.lock` version as well, so The action will try to restore from a previous `Cargo.lock` version as well, so
lockfile updates should only re-build changed dependencies. lockfile updates should only re-build changed dependencies.
The action invokes `cargo metadata` to determine the current set of dependencies.
Additionally, the action automatically works around Additionally, the action automatically works around
[cargo#8603](https://github.com/rust-lang/cargo/issues/8603) / [cargo#8603](https://github.com/rust-lang/cargo/issues/8603) /
[actions/cache#403](https://github.com/actions/cache/issues/403) which would [actions/cache#403](https://github.com/actions/cache/issues/403) which would
otherwise corrupt the cache on macOS builds. otherwise corrupt the cache on macOS builds.
## Cache Limits and Control
This specialized cache action is built on top of the upstream cache action
maintained by GitHub. The same restrictions and limits apply, which are
documented here:
[Caching dependencies to speed up workflows](https://docs.github.com/en/actions/using-workflows/caching-dependencies-to-speed-up-workflows)
In particular, caches are currently limited to 10 GB in total and exceeding that
limit will cause eviction of older caches.
Caches from base branches are available to PRs, but not across unrelated
branches.
The caches can be controlled using the [Cache API](https://docs.github.com/en/rest/actions/cache)
which allows listing existing caches and manually removing entries.
## Debugging
The action prints detailed information about which information it considers
for its cache key, and it outputs more debug-only information about which
cleanup steps it performs before persisting the cache.
You can read up on how to [enable debug logging](https://docs.github.com/en/actions/monitoring-and-troubleshooting-workflows/enabling-debug-logging)
to see those details as well as further details related to caching operations.
## Known issues ## Known issues
- The cache cleaning process currently removes all the files from `~/.cargo/bin` - The cache cleaning process currently only runs against the build artifacts under
that were present before the action ran (for example `rustc`), by default. `./target/debug/`, so projects using release or cross-compiled builds will experience
This can be an issue on long-running self-hosted runners, where such state larger cache sizes.
is expected to be preserved across runs. You can work around this by setting
`cache-bin: "false"`.

View File

@ -1,3 +0,0 @@
- better .cargo/bin handling:
- get a list of all the files on "pre"/"restore"
- move the files out of the way on "post"/"save" and move them back afterwards

View File

@ -1,62 +1,27 @@
name: "Rust Cache" name: "Rust Cache"
description: "A GitHub Action that implements smart caching for rust/cargo projects with sensible defaults." description: "A GitHub Action that implements smart caching for rust/cargo projects with sensible defaults."
author: "Arpad Borsos <swatinem@swatinem.de>" author: "Arpad Borsos <arpad.borsos@googlemail.com>"
inputs: inputs:
prefix-key:
description: "The prefix cache key, this can be changed to start a new cache manually."
required: false
default: "v0-rust"
shared-key:
description: "A cache key that is used instead of the automatic `job`-based key, and is stable over multiple jobs."
required: false
key: key:
description: "An additional cache key that is added alongside the automatic `job`-based cache key and can be used to further differentiate jobs." description: "An additional key for the cache"
required: false required: false
env-vars: sharedKey:
description: "Additional environment variables to include in the cache key, separated by spaces." description: "An additional cache key that is stable over multiple jobs"
required: false required: false
workspaces: working-directory:
description: "Paths to multiple Cargo workspaces and their target directories, separated by newlines." description: "The working directory this action should operate in"
required: false required: false
cache-directories: target-dir:
description: "Additional non workspace directories to be cached, separated by newlines." description: "The target dir that should be cleaned and persisted, defaults to `./target`"
required: false required: false
cache-targets:
description: "Determines whether workspace targets are cached. If `false`, only the cargo registry will be cached."
required: false
default: "true"
cache-on-failure: cache-on-failure:
description: "Cache even if the build fails. Defaults to false." description: "Cache even if the build fails. Defaults to false"
required: false required: false
cache-all-crates:
description: "Determines which crates are cached. If `true` all crates will be cached, otherwise only dependent crates will be cached."
required: false
default: "false"
cache-workspace-crates:
description: "Similar to cache-all-crates. If `true` the workspace crates will be cached."
required: false
default: "false"
save-if:
description: "Determiners whether the cache should be saved. If `false`, the cache is only restored."
required: false
default: "true"
cache-provider:
description: "Determines which provider to use for caching. Options are github, buildjet, or warpbuild. Defaults to github."
required: false
default: "github"
cache-bin:
description: "Determines whether to cache ${CARGO_HOME}/bin."
required: false
default: "true"
lookup-only:
description: "Check if a cache entry exists without downloading the cache"
required: false
default: "false"
outputs: outputs:
cache-hit: cache-hit:
description: "A boolean value that indicates an exact match was found." description: "A boolean value that indicates an exact match was found"
runs: runs:
using: "node20" using: "node12"
main: "dist/restore/index.js" main: "dist/restore/index.js"
post: "dist/save/index.js" post: "dist/save/index.js"
post-if: "success() || env.CACHE_ON_FAILURE == 'true'" post-if: "success() || env.CACHE_ON_FAILURE == 'true'"

161600
dist/restore/index.js vendored

File diff suppressed because one or more lines are too long

161691
dist/save/index.js vendored

File diff suppressed because one or more lines are too long

2206
package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@ -1,7 +1,7 @@
{ {
"private": true, "private": true,
"name": "rust-cache", "name": "rust-cache",
"version": "2.8.0", "version": "1.4.0",
"description": "A GitHub Action that implements smart caching for rust/cargo projects with sensible defaults.", "description": "A GitHub Action that implements smart caching for rust/cargo projects with sensible defaults.",
"keywords": [ "keywords": [
"actions", "actions",
@ -22,22 +22,17 @@
}, },
"homepage": "https://github.com/Swatinem/rust-cache#readme", "homepage": "https://github.com/Swatinem/rust-cache#readme",
"dependencies": { "dependencies": {
"@actions/buildjet-cache": "npm:github-actions.cache-buildjet@0.2.0", "@actions/cache": "^2.0.2",
"@actions/warpbuild-cache": "npm:github-actions.warp-cache@1.4.7", "@actions/core": "^1.6.0",
"@actions/cache": "^4.0.5",
"@actions/core": "^1.11.1",
"@actions/exec": "^1.1.1", "@actions/exec": "^1.1.1",
"@actions/glob": "^0.5.0", "@actions/glob": "^0.2.1",
"@actions/io": "^1.1.3", "@actions/io": "^1.1.2"
"smol-toml": "^1.4.2"
}, },
"devDependencies": { "devDependencies": {
"@types/node": "^22.16.0", "@vercel/ncc": "^0.33.3",
"@vercel/ncc": "^0.38.3", "typescript": "4.6.3"
"linefix": "^0.1.1",
"typescript": "5.8.3"
}, },
"scripts": { "scripts": {
"prepare": "ncc build --target es2020 -o dist/restore src/restore.ts && ncc build --target es2020 -o dist/save src/save.ts && linefix dist" "prepare": "ncc build --target es2020 -o dist/restore src/restore.ts && ncc build --target es2020 -o dist/save src/save.ts"
} }
} }

View File

@ -1,311 +0,0 @@
import * as core from "@actions/core";
import * as io from "@actions/io";
import fs from "fs";
import path from "path";
import { CARGO_HOME } from "./config";
import { exists } from "./utils";
import { Packages } from "./workspace";
export async function cleanTargetDir(targetDir: string, packages: Packages, checkTimestamp = false) {
core.debug(`cleaning target directory "${targetDir}"`);
// remove all *files* from the profile directory
let dir = await fs.promises.opendir(targetDir);
for await (const dirent of dir) {
if (dirent.isDirectory()) {
let dirName = path.join(dir.path, dirent.name);
// is it a profile dir, or a nested target dir?
let isNestedTarget =
(await exists(path.join(dirName, "CACHEDIR.TAG"))) || (await exists(path.join(dirName, ".rustc_info.json")));
try {
if (isNestedTarget) {
await cleanTargetDir(dirName, packages, checkTimestamp);
} else {
await cleanProfileTarget(dirName, packages, checkTimestamp);
}
} catch {}
} else if (dirent.name !== "CACHEDIR.TAG") {
await rm(dir.path, dirent);
}
}
}
async function cleanProfileTarget(profileDir: string, packages: Packages, checkTimestamp = false) {
core.debug(`cleaning profile directory "${profileDir}"`);
// Quite a few testing utility crates store compilation artifacts as nested
// workspaces under `target/tests`. Notably, `target/tests/target` and
// `target/tests/trybuild`.
if (path.basename(profileDir) === "tests") {
try {
// https://github.com/vertexclique/kaos/blob/9876f6c890339741cc5be4b7cb9df72baa5a6d79/src/cargo.rs#L25
// https://github.com/eupn/macrotest/blob/c4151a5f9f545942f4971980b5d264ebcd0b1d11/src/cargo.rs#L27
cleanTargetDir(path.join(profileDir, "target"), packages, checkTimestamp);
} catch {}
try {
// https://github.com/dtolnay/trybuild/blob/eec8ca6cb9b8f53d0caf1aa499d99df52cae8b40/src/cargo.rs#L50
cleanTargetDir(path.join(profileDir, "trybuild"), packages, checkTimestamp);
} catch {}
// Delete everything else.
await rmExcept(profileDir, new Set(["target", "trybuild"]), checkTimestamp);
return;
}
let keepProfile = new Set(["build", ".fingerprint", "deps"]);
await rmExcept(profileDir, keepProfile);
const keepPkg = new Set(packages.map((p) => p.name));
await rmExcept(path.join(profileDir, "build"), keepPkg, checkTimestamp);
await rmExcept(path.join(profileDir, ".fingerprint"), keepPkg, checkTimestamp);
const keepDeps = new Set(
packages.flatMap((p) => {
const names = [];
for (const n of [p.name, ...p.targets]) {
const name = n.replace(/-/g, "_");
names.push(name, `lib${name}`);
}
return names;
}),
);
await rmExcept(path.join(profileDir, "deps"), keepDeps, checkTimestamp);
}
export async function getCargoBins(): Promise<Set<string>> {
const bins = new Set<string>();
try {
const { installs }: { installs: { [key: string]: { bins: Array<string> } } } = JSON.parse(
await fs.promises.readFile(path.join(CARGO_HOME, ".crates2.json"), "utf8"),
);
for (const pkg of Object.values(installs)) {
for (const bin of pkg.bins) {
bins.add(bin);
}
}
} catch {}
return bins;
}
/**
* Clean the cargo bin directory, removing the binaries that existed
* when the action started, as they were not created by the build.
*
* @param oldBins The binaries that existed when the action started.
*/
export async function cleanBin(oldBins: Array<string>) {
const bins = await getCargoBins();
for (const bin of oldBins) {
bins.delete(bin);
}
const dir = await fs.promises.opendir(path.join(CARGO_HOME, "bin"));
for await (const dirent of dir) {
if (dirent.isFile() && !bins.has(dirent.name)) {
await rm(dir.path, dirent);
}
}
}
export async function cleanRegistry(packages: Packages, crates = true) {
// remove `.cargo/credentials.toml`
try {
const credentials = path.join(CARGO_HOME, ".cargo", "credentials.toml");
core.debug(`deleting "${credentials}"`);
await fs.promises.unlink(credentials);
} catch {}
// `.cargo/registry/index`
let pkgSet = new Set(packages.map((p) => p.name));
const indexDir = await fs.promises.opendir(path.join(CARGO_HOME, "registry", "index"));
for await (const dirent of indexDir) {
if (dirent.isDirectory()) {
// eg `.cargo/registry/index/github.com-1ecc6299db9ec823`
// or `.cargo/registry/index/index.crates.io-e139d0d48fed7772`
const dirPath = path.join(indexDir.path, dirent.name);
// for a git registry, we can remove `.cache`, as cargo will recreate it from git
if (await exists(path.join(dirPath, ".git"))) {
await rmRF(path.join(dirPath, ".cache"));
} else {
await cleanRegistryIndexCache(dirPath, pkgSet);
}
}
}
if (!crates) {
core.debug("skipping registry cache and src cleanup");
return;
}
// `.cargo/registry/src`
// Cargo usually re-creates these from the `.crate` cache below,
// but for some reason that does not work for `-sys` crates that check timestamps
// to decide if rebuilds are necessary.
pkgSet = new Set(packages.filter((p) => p.name.endsWith("-sys")).map((p) => `${p.name}-${p.version}`));
const srcDir = await fs.promises.opendir(path.join(CARGO_HOME, "registry", "src"));
for await (const dirent of srcDir) {
if (dirent.isDirectory()) {
// eg `.cargo/registry/src/github.com-1ecc6299db9ec823`
// or `.cargo/registry/src/index.crates.io-e139d0d48fed7772`
const dir = await fs.promises.opendir(path.join(srcDir.path, dirent.name));
for await (const dirent of dir) {
if (dirent.isDirectory() && !pkgSet.has(dirent.name)) {
await rmRF(path.join(dir.path, dirent.name));
}
}
}
}
// `.cargo/registry/cache`
pkgSet = new Set(packages.map((p) => `${p.name}-${p.version}.crate`));
const cacheDir = await fs.promises.opendir(path.join(CARGO_HOME, "registry", "cache"));
for await (const dirent of cacheDir) {
if (dirent.isDirectory()) {
// eg `.cargo/registry/cache/github.com-1ecc6299db9ec823`
// or `.cargo/registry/cache/index.crates.io-e139d0d48fed7772`
const dir = await fs.promises.opendir(path.join(cacheDir.path, dirent.name));
for await (const dirent of dir) {
// here we check that the downloaded `.crate` matches one from our dependencies
if (dirent.isFile() && !pkgSet.has(dirent.name)) {
await rm(dir.path, dirent);
}
}
}
}
}
/// Recursively walks and cleans the index `.cache`
async function cleanRegistryIndexCache(dirName: string, keepPkg: Set<string>) {
let dirIsEmpty = true;
const cacheDir = await fs.promises.opendir(dirName);
for await (const dirent of cacheDir) {
if (dirent.isDirectory()) {
if (await cleanRegistryIndexCache(path.join(dirName, dirent.name), keepPkg)) {
await rm(dirName, dirent);
} else {
dirIsEmpty &&= false;
}
} else {
if (keepPkg.has(dirent.name)) {
dirIsEmpty &&= false;
} else {
await rm(dirName, dirent);
}
}
}
return dirIsEmpty;
}
export async function cleanGit(packages: Packages) {
const coPath = path.join(CARGO_HOME, "git", "checkouts");
const dbPath = path.join(CARGO_HOME, "git", "db");
const repos = new Map<string, Set<string>>();
for (const p of packages) {
if (!p.path.startsWith(coPath)) {
continue;
}
const [repo, ref] = p.path.slice(coPath.length + 1).split(path.sep);
const refs = repos.get(repo);
if (refs) {
refs.add(ref);
} else {
repos.set(repo, new Set([ref]));
}
}
// we have to keep both the clone, and the checkout, removing either will
// trigger a rebuild
// clean the db
try {
let dir = await fs.promises.opendir(dbPath);
for await (const dirent of dir) {
if (!repos.has(dirent.name)) {
await rm(dir.path, dirent);
}
}
} catch {}
// clean the checkouts
try {
let dir = await fs.promises.opendir(coPath);
for await (const dirent of dir) {
const refs = repos.get(dirent.name);
if (!refs) {
await rm(dir.path, dirent);
continue;
}
if (!dirent.isDirectory()) {
continue;
}
const refsDir = await fs.promises.opendir(path.join(dir.path, dirent.name));
for await (const dirent of refsDir) {
if (!refs.has(dirent.name)) {
await rm(refsDir.path, dirent);
}
}
}
} catch {}
}
const ONE_WEEK = 7 * 24 * 3600 * 1000;
/**
* Removes all files or directories in `dirName` matching some criteria.
*
* When the `checkTimestamp` flag is set, this will also remove anything older
* than one week.
*
* Otherwise, it will remove everything that does not match any string in the
* `keepPrefix` set.
* The matching strips and trailing `-$hash` suffix.
*/
async function rmExcept(dirName: string, keepPrefix: Set<string>, checkTimestamp = false) {
const dir = await fs.promises.opendir(dirName);
for await (const dirent of dir) {
if (checkTimestamp) {
const fileName = path.join(dir.path, dirent.name);
const { mtime } = await fs.promises.stat(fileName);
const isOutdated = Date.now() - mtime.getTime() > ONE_WEEK;
if (isOutdated) {
await rm(dir.path, dirent);
}
return;
}
let name = dirent.name;
// strip the trailing hash
const idx = name.lastIndexOf("-");
if (idx !== -1) {
name = name.slice(0, idx);
}
if (!keepPrefix.has(name)) {
await rm(dir.path, dirent);
}
}
}
async function rm(parent: string, dirent: fs.Dirent) {
try {
const fileName = path.join(parent, dirent.name);
core.debug(`deleting "${fileName}"`);
if (dirent.isFile()) {
await fs.promises.unlink(fileName);
} else if (dirent.isDirectory()) {
await io.rmRF(fileName);
}
} catch {}
}
async function rmRF(dirName: string) {
core.debug(`deleting "${dirName}"`);
await io.rmRF(dirName);
}

265
src/common.ts Normal file
View File

@ -0,0 +1,265 @@
import * as core from "@actions/core";
import * as exec from "@actions/exec";
import * as glob from "@actions/glob";
import * as io from "@actions/io";
import crypto from "crypto";
import fs from "fs";
import os from "os";
import path from "path";
process.on("uncaughtException", (e) => {
core.info(`[warning] ${e.message}`);
if (e.stack) {
core.info(e.stack);
}
});
const cwd = core.getInput("working-directory");
// TODO: this could be read from .cargo config file directly
const targetDir = core.getInput("target-dir") || "./target";
if (cwd) {
process.chdir(cwd);
}
export const stateBins = "RUST_CACHE_BINS";
export const stateKey = "RUST_CACHE_KEY";
const stateHash = "RUST_CACHE_HASH";
const home = os.homedir();
const cargoHome = process.env.CARGO_HOME || path.join(home, ".cargo");
export const paths = {
cargoHome,
index: path.join(cargoHome, "registry/index"),
cache: path.join(cargoHome, "registry/cache"),
git: path.join(cargoHome, "git"),
target: targetDir,
};
interface CacheConfig {
paths: Array<string>;
key: string;
restoreKeys: Array<string>;
}
const RefKey = "GITHUB_REF";
export function isValidEvent(): boolean {
return RefKey in process.env && Boolean(process.env[RefKey]);
}
export async function getCacheConfig(): Promise<CacheConfig> {
let lockHash = core.getState(stateHash);
if (!lockHash) {
lockHash = await getLockfileHash();
core.saveState(stateHash, lockHash);
}
let key = `v0-rust-`;
const sharedKey = core.getInput("sharedKey");
if (sharedKey) {
key += `${sharedKey}-`;
} else {
const inputKey = core.getInput("key");
if (inputKey) {
key += `${inputKey}-`;
}
const job = process.env.GITHUB_JOB;
if (job) {
key += `${job}-`;
}
}
key += await getRustKey();
return {
paths: [
path.join(cargoHome, "bin"),
path.join(cargoHome, ".crates2.json"),
path.join(cargoHome, ".crates.toml"),
paths.git,
paths.cache,
paths.index,
paths.target,
],
key: `${key}-${lockHash}`,
restoreKeys: [key],
};
}
export async function getCargoBins(): Promise<Set<string>> {
try {
const { installs }: { installs: { [key: string]: { bins: Array<string> } } } = JSON.parse(
await fs.promises.readFile(path.join(paths.cargoHome, ".crates2.json"), "utf8"),
);
const bins = new Set<string>();
for (const pkg of Object.values(installs)) {
for (const bin of pkg.bins) {
bins.add(bin);
}
}
return bins;
} catch {
return new Set<string>();
}
}
async function getRustKey(): Promise<string> {
const rustc = await getRustVersion();
return `${rustc.release}-${rustc.host}-${rustc["commit-hash"].slice(0, 12)}`;
}
interface RustVersion {
host: string;
release: string;
"commit-hash": string;
}
async function getRustVersion(): Promise<RustVersion> {
const stdout = await getCmdOutput("rustc", ["-vV"]);
let splits = stdout
.split(/[\n\r]+/)
.filter(Boolean)
.map((s) => s.split(":").map((s) => s.trim()))
.filter((s) => s.length === 2);
return Object.fromEntries(splits);
}
export async function getCmdOutput(
cmd: string,
args: Array<string> = [],
options: exec.ExecOptions = {},
): Promise<string> {
let stdout = "";
await exec.exec(cmd, args, {
silent: true,
listeners: {
stdout(data) {
stdout += data.toString();
},
},
...options,
});
return stdout;
}
async function getLockfileHash(): Promise<string> {
const globber = await glob.create("**/Cargo.toml\n**/Cargo.lock\nrust-toolchain\nrust-toolchain.toml", {
followSymbolicLinks: false,
});
const files = await globber.glob();
files.sort((a, b) => a.localeCompare(b));
const hasher = crypto.createHash("sha1");
for (const file of files) {
for await (const chunk of fs.createReadStream(file)) {
hasher.update(chunk);
}
}
return hasher.digest("hex").slice(0, 20);
}
export interface PackageDefinition {
name: string;
version: string;
path: string;
targets: Array<string>;
}
export type Packages = Array<PackageDefinition>;
interface Meta {
packages: Array<{
name: string;
version: string;
manifest_path: string;
targets: Array<{ kind: Array<string>; name: string }>;
}>;
}
export async function getPackages(): Promise<Packages> {
const cwd = process.cwd();
const meta: Meta = JSON.parse(await getCmdOutput("cargo", ["metadata", "--all-features", "--format-version", "1"]));
return meta.packages
.filter((p) => !p.manifest_path.startsWith(cwd))
.map((p) => {
const targets = p.targets.filter((t) => t.kind[0] === "lib").map((t) => t.name);
return { name: p.name, version: p.version, targets, path: path.dirname(p.manifest_path) };
});
}
export async function cleanTarget(packages: Packages) {
await fs.promises.unlink(path.join(targetDir, "./.rustc_info.json"));
await cleanProfileTarget(packages, "debug");
await cleanProfileTarget(packages, "release");
}
async function cleanProfileTarget(packages: Packages, profile: string) {
try {
await fs.promises.access(path.join(targetDir, profile));
} catch {
return;
}
await io.rmRF(path.join(targetDir, profile, "./examples"));
await io.rmRF(path.join(targetDir, profile, "./incremental"));
let dir: fs.Dir;
// remove all *files* from the profile directory
dir = await fs.promises.opendir(path.join(targetDir, profile));
for await (const dirent of dir) {
if (dirent.isFile()) {
await rm(dir.path, dirent);
}
}
const keepPkg = new Set(packages.map((p) => p.name));
await rmExcept(path.join(targetDir, profile, "./build"), keepPkg);
await rmExcept(path.join(targetDir, profile, "./.fingerprint"), keepPkg);
const keepDeps = new Set(
packages.flatMap((p) => {
const names = [];
for (const n of [p.name, ...p.targets]) {
const name = n.replace(/-/g, "_");
names.push(name, `lib${name}`);
}
return names;
}),
);
await rmExcept(path.join(targetDir, profile, "./deps"), keepDeps);
}
const oneWeek = 7 * 24 * 3600 * 1000;
export async function rmExcept(dirName: string, keepPrefix: Set<string>) {
const dir = await fs.promises.opendir(dirName);
for await (const dirent of dir) {
let name = dirent.name;
const idx = name.lastIndexOf("-");
if (idx !== -1) {
name = name.slice(0, idx);
}
const fileName = path.join(dir.path, dirent.name);
const { mtime } = await fs.promises.stat(fileName);
// we dont really know
if (!keepPrefix.has(name) || Date.now() - mtime.getTime() > oneWeek) {
await rm(dir.path, dirent);
}
}
}
export async function rm(parent: string, dirent: fs.Dirent) {
try {
const fileName = path.join(parent, dirent.name);
core.debug(`deleting "${fileName}"`);
if (dirent.isFile()) {
await fs.promises.unlink(fileName);
} else if (dirent.isDirectory()) {
await io.rmRF(fileName);
}
} catch {}
}

View File

@ -1,397 +0,0 @@
import * as core from "@actions/core";
import * as glob from "@actions/glob";
import crypto from "crypto";
import fs from "fs";
import fs_promises from "fs/promises";
import os from "os";
import path from "path";
import * as toml from "smol-toml";
import { getCargoBins } from "./cleanup";
import { CacheProvider, exists, getCmdOutput } from "./utils";
import { Workspace } from "./workspace";
const HOME = os.homedir();
export const CARGO_HOME = process.env.CARGO_HOME || path.join(HOME, ".cargo");
const STATE_CONFIG = "RUST_CACHE_CONFIG";
const HASH_LENGTH = 8;
export class CacheConfig {
/** All the paths we want to cache */
public cachePaths: Array<string> = [];
/** The primary cache key */
public cacheKey = "";
/** The secondary (restore) key that only contains the prefix and environment */
public restoreKey = "";
/** Whether to cache CARGO_HOME/.bin */
public cacheBin: boolean = true;
/** The workspace configurations */
public workspaces: Array<Workspace> = [];
/** The cargo binaries present during main step */
public cargoBins: Array<string> = [];
/** The prefix portion of the cache key */
private keyPrefix = "";
/** The rust version considered for the cache key */
private keyRust = "";
/** The environment variables considered for the cache key */
private keyEnvs: Array<string> = [];
/** The files considered for the cache key */
private keyFiles: Array<string> = [];
private constructor() {}
/**
* Constructs a [`CacheConfig`] with all the paths and keys.
*
* This will read the action `input`s, and read and persist `state` as necessary.
*/
static async new(): Promise<CacheConfig> {
const self = new CacheConfig();
// Construct key prefix:
// This uses either the `shared-key` input,
// or the `key` input combined with the `job` key.
let key = core.getInput("prefix-key") || "v0-rust";
const sharedKey = core.getInput("shared-key");
if (sharedKey) {
key += `-${sharedKey}`;
} else {
const inputKey = core.getInput("key");
if (inputKey) {
key += `-${inputKey}`;
}
const job = process.env.GITHUB_JOB;
if (job) {
key += `-${job}`;
}
}
// Add runner OS and CPU architecture to the key to avoid cross-contamination of cache
const runnerOS = os.type();
const runnerArch = os.arch();
key += `-${runnerOS}-${runnerArch}`;
self.keyPrefix = key;
// Construct environment portion of the key:
// This consists of a hash that considers the rust version
// as well as all the environment variables as given by a default list
// and the `env-vars` input.
// The env vars are sorted, matched by prefix and hashed into the
// resulting environment hash.
let hasher = crypto.createHash("sha1");
const rustVersion = await getRustVersion();
let keyRust = `${rustVersion.release} ${rustVersion.host}`;
hasher.update(keyRust);
hasher.update(rustVersion["commit-hash"]);
keyRust += ` (${rustVersion["commit-hash"]})`;
self.keyRust = keyRust;
// these prefixes should cover most of the compiler / rust / cargo keys
const envPrefixes = ["CARGO", "CC", "CFLAGS", "CXX", "CMAKE", "RUST"];
envPrefixes.push(...core.getInput("env-vars").split(/\s+/).filter(Boolean));
// sort the available env vars so we have a more stable hash
const keyEnvs = [];
const envKeys = Object.keys(process.env);
envKeys.sort((a, b) => a.localeCompare(b));
for (const key of envKeys) {
const value = process.env[key];
if (envPrefixes.some((prefix) => key.startsWith(prefix)) && value) {
hasher.update(`${key}=${value}`);
keyEnvs.push(key);
}
}
self.keyEnvs = keyEnvs;
key += `-${digest(hasher)}`;
self.restoreKey = key;
// Construct the lockfiles portion of the key:
// This considers all the files found via globbing for various manifests
// and lockfiles.
self.cacheBin = core.getInput("cache-bin").toLowerCase() == "true";
// Constructs the workspace config and paths to restore:
// The workspaces are given using a `$workspace -> $target` syntax.
const workspaces: Array<Workspace> = [];
const workspacesInput = core.getInput("workspaces") || ".";
for (const workspace of workspacesInput.trim().split("\n")) {
let [root, target = "target"] = workspace.split("->").map((s) => s.trim());
root = path.resolve(root);
target = path.join(root, target);
workspaces.push(new Workspace(root, target));
}
self.workspaces = workspaces;
let keyFiles = await globFiles(".cargo/config.toml\nrust-toolchain\nrust-toolchain.toml");
const parsedKeyFiles = []; // keyFiles that are parsed, pre-processed and hashed
hasher = crypto.createHash("sha1");
for (const workspace of workspaces) {
const root = workspace.root;
keyFiles.push(
...(await globFiles(
`${root}/**/.cargo/config.toml\n${root}/**/rust-toolchain\n${root}/**/rust-toolchain.toml`,
)),
);
const workspaceMembers = await workspace.getWorkspaceMembers();
const cargo_manifests = sort_and_uniq(workspaceMembers.map((member) => path.join(member.path, "Cargo.toml")));
for (const cargo_manifest of cargo_manifests) {
try {
const content = await fs_promises.readFile(cargo_manifest, { encoding: "utf8" });
// Use any since TomlPrimitive is not exposed
const parsed = toml.parse(content) as { [key: string]: any };
if ("package" in parsed) {
const pack = parsed.package;
if ("version" in pack) {
pack["version"] = "0.0.0";
}
}
for (const prefix of ["", "build-", "dev-"]) {
const section_name = `${prefix}dependencies`;
if (!(section_name in parsed)) {
continue;
}
const deps = parsed[section_name];
for (const key of Object.keys(deps)) {
const dep = deps[key];
try {
if ("path" in dep) {
dep.version = "0.0.0";
dep.path = "";
}
} catch (_e) {
// Not an object, probably a string (version),
// continue.
continue;
}
}
}
hasher.update(JSON.stringify(parsed));
parsedKeyFiles.push(cargo_manifest);
} catch (e) {
// Fallback to caching them as regular file
core.warning(`Error parsing Cargo.toml manifest, fallback to caching entire file: ${e}`);
keyFiles.push(cargo_manifest);
}
}
const cargo_lock = path.join(workspace.root, "Cargo.lock");
if (await exists(cargo_lock)) {
try {
const content = await fs_promises.readFile(cargo_lock, { encoding: "utf8" });
const parsed = toml.parse(content);
if ((parsed.version !== 3 && parsed.version !== 4) || !("package" in parsed)) {
// Fallback to caching them as regular file since this action
// can only handle Cargo.lock format version 3
core.warning("Unsupported Cargo.lock format, fallback to caching entire file");
keyFiles.push(cargo_lock);
continue;
}
// Package without `[[package]].source` and `[[package]].checksum`
// are the one with `path = "..."` to crates within the workspace.
const packages = (parsed.package as any[]).filter((p: any) => "source" in p || "checksum" in p);
hasher.update(JSON.stringify(packages));
parsedKeyFiles.push(cargo_lock);
} catch (e) {
// Fallback to caching them as regular file
core.warning(`Error parsing Cargo.lock manifest, fallback to caching entire file: ${e}`);
keyFiles.push(cargo_lock);
}
}
}
keyFiles = sort_and_uniq(keyFiles);
for (const file of keyFiles) {
for await (const chunk of fs.createReadStream(file)) {
hasher.update(chunk);
}
}
let lockHash = digest(hasher);
keyFiles.push(...parsedKeyFiles);
self.keyFiles = sort_and_uniq(keyFiles);
key += `-${lockHash}`;
self.cacheKey = key;
self.cachePaths = [path.join(CARGO_HOME, "registry"), path.join(CARGO_HOME, "git")];
if (self.cacheBin) {
self.cachePaths = [
path.join(CARGO_HOME, "bin"),
path.join(CARGO_HOME, ".crates.toml"),
path.join(CARGO_HOME, ".crates2.json"),
...self.cachePaths,
];
}
const cacheTargets = core.getInput("cache-targets").toLowerCase() || "true";
if (cacheTargets === "true") {
self.cachePaths.push(...workspaces.map((ws) => ws.target));
}
const cacheDirectories = core.getInput("cache-directories");
for (const dir of cacheDirectories.trim().split(/\s+/).filter(Boolean)) {
self.cachePaths.push(dir);
}
const bins = await getCargoBins();
self.cargoBins = Array.from(bins.values());
return self;
}
/**
* Reads and returns the cache config from the action `state`.
*
* @throws {Error} if the state is not present.
* @returns {CacheConfig} the configuration.
* @see {@link CacheConfig#saveState}
* @see {@link CacheConfig#new}
*/
static fromState(): CacheConfig {
const source = core.getState(STATE_CONFIG);
if (!source) {
throw new Error("Cache configuration not found in state");
}
const self = new CacheConfig();
Object.assign(self, JSON.parse(source));
self.workspaces = self.workspaces.map((w: any) => new Workspace(w.root, w.target));
return self;
}
/**
* Prints the configuration to the action log.
*/
printInfo(cacheProvider: CacheProvider) {
core.startGroup("Cache Configuration");
core.info(`Cache Provider:`);
core.info(` ${cacheProvider.name}`);
core.info(`Workspaces:`);
for (const workspace of this.workspaces) {
core.info(` ${workspace.root}`);
}
core.info(`Cache Paths:`);
for (const path of this.cachePaths) {
core.info(` ${path}`);
}
core.info(`Restore Key:`);
core.info(` ${this.restoreKey}`);
core.info(`Cache Key:`);
core.info(` ${this.cacheKey}`);
core.info(`.. Prefix:`);
core.info(` - ${this.keyPrefix}`);
core.info(`.. Environment considered:`);
core.info(` - Rust Version: ${this.keyRust}`);
for (const env of this.keyEnvs) {
core.info(` - ${env}`);
}
core.info(`.. Lockfiles considered:`);
for (const file of this.keyFiles) {
core.info(` - ${file}`);
}
core.endGroup();
}
/**
* Saves the configuration to the state store.
* This is used to restore the configuration in the post action.
*/
saveState() {
core.saveState(STATE_CONFIG, this);
}
}
/**
* Checks if the cache is up to date.
*
* @returns `true` if the cache is up to date, `false` otherwise.
*/
export function isCacheUpToDate(): boolean {
return core.getState(STATE_CONFIG) === "";
}
/**
* Returns a hex digest of the given hasher truncated to `HASH_LENGTH`.
*
* @param hasher The hasher to digest.
* @returns The hex digest.
*/
function digest(hasher: crypto.Hash): string {
return hasher.digest("hex").substring(0, HASH_LENGTH);
}
interface RustVersion {
host: string;
release: string;
"commit-hash": string;
}
async function getRustVersion(): Promise<RustVersion> {
const stdout = await getCmdOutput("rustc", ["-vV"]);
let splits = stdout
.split(/[\n\r]+/)
.filter(Boolean)
.map((s) => s.split(":").map((s) => s.trim()))
.filter((s) => s.length === 2);
return Object.fromEntries(splits);
}
async function globFiles(pattern: string): Promise<string[]> {
const globber = await glob.create(pattern, {
followSymbolicLinks: false,
});
// fs.statSync resolve the symbolic link and returns stat for the
// file it pointed to, so isFile would make sure the resolved
// file is actually a regular file.
return (await globber.glob()).filter((file) => fs.statSync(file).isFile());
}
function sort_and_uniq(a: string[]) {
return a
.sort((a, b) => a.localeCompare(b))
.reduce((accumulator: string[], currentValue: string) => {
const len = accumulator.length;
// If accumulator is empty or its last element != currentValue
// Since array is already sorted, elements with the same value
// are grouped together to be continugous in space.
//
// If currentValue != last element, then it must be unique.
if (len == 0 || accumulator[len - 1].localeCompare(currentValue) != 0) {
accumulator.push(currentValue);
}
return accumulator;
}, []);
}

View File

@ -1,20 +1,9 @@
import * as cache from "@actions/cache";
import * as core from "@actions/core"; import * as core from "@actions/core";
import { cleanTarget, getCacheConfig, getCargoBins, getPackages, stateBins, stateKey } from "./common";
import { cleanTargetDir } from "./cleanup";
import { CacheConfig } from "./config";
import { getCacheProvider, reportError } from "./utils";
process.on("uncaughtException", (e) => {
core.error(e.message);
if (e.stack) {
core.error(e.stack);
}
});
async function run() { async function run() {
const cacheProvider = getCacheProvider(); if (!cache.isFeatureAvailable()) {
if (!cacheProvider.cache.isFeatureAvailable()) {
setCacheHitOutput(false); setCacheHitOutput(false);
return; return;
} }
@ -24,51 +13,40 @@ async function run() {
if (cacheOnFailure !== "true") { if (cacheOnFailure !== "true") {
cacheOnFailure = "false"; cacheOnFailure = "false";
} }
var lookupOnly = core.getInput("lookup-only").toLowerCase() === "true";
core.exportVariable("CACHE_ON_FAILURE", cacheOnFailure); core.exportVariable("CACHE_ON_FAILURE", cacheOnFailure);
core.exportVariable("CARGO_INCREMENTAL", 0); core.exportVariable("CARGO_INCREMENTAL", 0);
const config = await CacheConfig.new(); const { paths, key, restoreKeys } = await getCacheConfig();
config.printInfo(cacheProvider);
core.info("");
core.info(`... ${lookupOnly ? "Checking" : "Restoring"} cache ...`); const bins = await getCargoBins();
const key = config.cacheKey; core.saveState(stateBins, JSON.stringify([...bins]));
// Pass a copy of cachePaths to avoid mutating the original array as reported by:
// https://github.com/actions/toolkit/pull/1378 core.info(`Restoring paths:\n ${paths.join("\n ")}`);
// TODO: remove this once the underlying bug is fixed. core.info(`In directory:\n ${process.cwd()}`);
const restoreKey = await cacheProvider.cache.restoreCache(config.cachePaths.slice(), key, [config.restoreKey], { core.info(`Using keys:\n ${[key, ...restoreKeys].join("\n ")}`);
lookupOnly, const restoreKey = await cache.restoreCache(paths, key, restoreKeys);
});
if (restoreKey) { if (restoreKey) {
const match = restoreKey === key; core.info(`Restored from cache key "${restoreKey}".`);
core.info(`${lookupOnly ? "Found" : "Restored from"} cache key "${restoreKey}" full match: ${match}.`); core.saveState(stateKey, restoreKey);
if (!match) {
// pre-clean the target directory on cache mismatch
for (const workspace of config.workspaces) {
try {
await cleanTargetDir(workspace.target, [], true);
} catch {}
}
// We restored the cache but it is not a full match. if (restoreKey !== key) {
config.saveState(); // pre-clean the target directory on cache mismatch
const packages = await getPackages();
await cleanTarget(packages);
} }
setCacheHitOutput(match); setCacheHitOutput(restoreKey === key);
} else { } else {
core.info("No cache found."); core.info("No cache found.");
config.saveState();
setCacheHitOutput(false); setCacheHitOutput(false);
} }
} catch (e) { } catch (e) {
setCacheHitOutput(false); setCacheHitOutput(false);
reportError(e); core.info(`[warning] ${(e as any).message}`);
} }
process.exit();
} }
function setCacheHitOutput(cacheHit: boolean): void { function setCacheHitOutput(cacheHit: boolean): void {

View File

@ -1,95 +1,157 @@
import * as cache from "@actions/cache";
import * as core from "@actions/core"; import * as core from "@actions/core";
import * as exec from "@actions/exec"; import * as exec from "@actions/exec";
import * as glob from "@actions/glob";
import { cleanBin, cleanGit, cleanRegistry, cleanTargetDir } from "./cleanup"; import * as io from "@actions/io";
import { CacheConfig, isCacheUpToDate } from "./config"; import fs from "fs";
import { getCacheProvider, reportError } from "./utils"; import path from "path";
import {
process.on("uncaughtException", (e) => { cleanTarget,
core.error(e.message); getCacheConfig,
if (e.stack) { getCargoBins,
core.error(e.stack); getPackages,
} Packages,
}); paths,
rm,
stateBins,
stateKey,
} from "./common";
async function run() { async function run() {
const cacheProvider = getCacheProvider(); if (!cache.isFeatureAvailable()) {
const save = core.getInput("save-if").toLowerCase() || "true";
if (!(cacheProvider.cache.isFeatureAvailable() && save === "true")) {
return; return;
} }
try { try {
if (isCacheUpToDate()) { const { paths: savePaths, key } = await getCacheConfig();
if (core.getState(stateKey) === key) {
core.info(`Cache up-to-date.`); core.info(`Cache up-to-date.`);
return; return;
} }
const config = CacheConfig.fromState();
config.printInfo(cacheProvider);
core.info("");
// TODO: remove this once https://github.com/actions/toolkit/pull/553 lands // TODO: remove this once https://github.com/actions/toolkit/pull/553 lands
if (process.env["RUNNER_OS"] == "macOS") { await macOsWorkaround();
await macOsWorkaround();
}
const workspaceCrates = core.getInput("cache-workspace-crates").toLowerCase() || "false"; const registryName = await getRegistryName();
const allPackages = []; const packages = await getPackages();
for (const workspace of config.workspaces) {
const packages = await workspace.getPackagesOutsideWorkspaceRoot();
if (workspaceCrates === "true") {
const wsMembers = await workspace.getWorkspaceMembers();
packages.push(...wsMembers);
}
allPackages.push(...packages);
try {
core.info(`... Cleaning ${workspace.target} ...`);
await cleanTargetDir(workspace.target, packages);
} catch (e) {
core.debug(`${(e as any).stack}`);
}
}
try { try {
const crates = core.getInput("cache-all-crates").toLowerCase() || "false"; await cleanRegistry(registryName, packages);
core.info(`... Cleaning cargo registry (cache-all-crates: ${crates}) ...`); } catch {}
await cleanRegistry(allPackages, crates !== "true");
} catch (e) {
core.debug(`${(e as any).stack}`);
}
if (config.cacheBin) {
try {
core.info(`... Cleaning cargo/bin ...`);
await cleanBin(config.cargoBins);
} catch (e) {
core.debug(`${(e as any).stack}`);
}
}
try { try {
core.info(`... Cleaning cargo git cache ...`); await cleanBin();
await cleanGit(allPackages); } catch {}
} catch (e) {
core.debug(`${(e as any).stack}`);
}
core.info(`... Saving cache ...`); try {
// Pass a copy of cachePaths to avoid mutating the original array as reported by: await cleanGit(packages);
// https://github.com/actions/toolkit/pull/1378 } catch {}
// TODO: remove this once the underlying bug is fixed.
await cacheProvider.cache.saveCache(config.cachePaths.slice(), config.cacheKey); try {
await cleanTarget(packages);
} catch {}
core.info(`Saving paths:\n ${savePaths.join("\n ")}`);
core.info(`In directory:\n ${process.cwd()}`);
core.info(`Using key:\n ${key}`);
await cache.saveCache(savePaths, key);
} catch (e) { } catch (e) {
reportError(e); core.info(`[warning] ${(e as any).message}`);
} }
process.exit();
} }
run(); run();
async function getRegistryName(): Promise<string> {
const globber = await glob.create(`${paths.index}/**/.last-updated`, { followSymbolicLinks: false });
const files = await globber.glob();
if (files.length > 1) {
core.warning(`got multiple registries: "${files.join('", "')}"`);
}
const first = files.shift()!;
return path.basename(path.dirname(first));
}
async function cleanBin() {
const bins = await getCargoBins();
const oldBins = JSON.parse(core.getState(stateBins));
for (const bin of oldBins) {
bins.delete(bin);
}
const dir = await fs.promises.opendir(path.join(paths.cargoHome, "bin"));
for await (const dirent of dir) {
if (dirent.isFile() && !bins.has(dirent.name)) {
await rm(dir.path, dirent);
}
}
}
async function cleanRegistry(registryName: string, packages: Packages) {
await io.rmRF(path.join(paths.index, registryName, ".cache"));
const pkgSet = new Set(packages.map((p) => `${p.name}-${p.version}.crate`));
const dir = await fs.promises.opendir(path.join(paths.cache, registryName));
for await (const dirent of dir) {
if (dirent.isFile() && !pkgSet.has(dirent.name)) {
await rm(dir.path, dirent);
}
}
}
async function cleanGit(packages: Packages) {
const coPath = path.join(paths.git, "checkouts");
const dbPath = path.join(paths.git, "db");
const repos = new Map<string, Set<string>>();
for (const p of packages) {
if (!p.path.startsWith(coPath)) {
continue;
}
const [repo, ref] = p.path.slice(coPath.length + 1).split(path.sep);
const refs = repos.get(repo);
if (refs) {
refs.add(ref);
} else {
repos.set(repo, new Set([ref]));
}
}
// we have to keep both the clone, and the checkout, removing either will
// trigger a rebuild
let dir: fs.Dir;
// clean the db
dir = await fs.promises.opendir(dbPath);
for await (const dirent of dir) {
if (!repos.has(dirent.name)) {
await rm(dir.path, dirent);
}
}
// clean the checkouts
dir = await fs.promises.opendir(coPath);
for await (const dirent of dir) {
const refs = repos.get(dirent.name);
if (!refs) {
await rm(dir.path, dirent);
continue;
}
if (!dirent.isDirectory()) {
continue;
}
const refsDir = await fs.promises.opendir(path.join(dir.path, dirent.name));
for await (const dirent of refsDir) {
if (!refs.has(dirent.name)) {
await rm(refsDir.path, dirent);
}
}
}
}
async function macOsWorkaround() { async function macOsWorkaround() {
try { try {
// Workaround for https://github.com/actions/cache/issues/403 // Workaround for https://github.com/actions/cache/issues/403

View File

@ -1,89 +0,0 @@
import * as core from "@actions/core";
import * as exec from "@actions/exec";
import * as buildjetCache from "@actions/buildjet-cache";
import * as warpbuildCache from "@actions/warpbuild-cache";
import * as ghCache from "@actions/cache";
import fs from "fs";
export function reportError(e: any) {
const { commandFailed } = e;
if (commandFailed) {
core.error(`Command failed: ${commandFailed.command}`);
core.error(commandFailed.stderr);
} else {
core.error(`${e.stack}`);
}
}
export async function getCmdOutput(
cmd: string,
args: Array<string> = [],
options: exec.ExecOptions = {},
): Promise<string> {
let stdout = "";
let stderr = "";
try {
await exec.exec(cmd, args, {
silent: true,
listeners: {
stdout(data) {
stdout += data.toString();
},
stderr(data) {
stderr += data.toString();
},
},
...options,
});
} catch (e) {
(e as any).commandFailed = {
command: `${cmd} ${args.join(" ")}`,
stderr,
};
throw e;
}
return stdout;
}
export interface GhCache {
isFeatureAvailable: typeof ghCache.isFeatureAvailable;
restoreCache: typeof ghCache.restoreCache;
saveCache: (paths: string[], key: string) => Promise<string | number>;
}
export interface CacheProvider {
name: string;
cache: GhCache;
}
export function getCacheProvider(): CacheProvider {
const cacheProvider = core.getInput("cache-provider");
let cache: GhCache;
switch (cacheProvider) {
case "github":
cache = ghCache;
break;
case "buildjet":
cache = buildjetCache;
break;
case "warpbuild":
cache = warpbuildCache;
break;
default:
throw new Error(`The \`cache-provider\` \`${cacheProvider}\` is not valid.`);
}
return {
name: cacheProvider,
cache: cache,
};
}
export async function exists(path: string) {
try {
await fs.promises.access(path);
return true;
} catch {
return false;
}
}

View File

@ -1,57 +0,0 @@
import * as core from "@actions/core";
import path from "path";
import { getCmdOutput } from "./utils";
const SAVE_TARGETS = new Set(["lib", "proc-macro"]);
export class Workspace {
constructor(public root: string, public target: string) {}
async getPackages(filter: (p: Meta["packages"][0]) => boolean, ...extraArgs: string[]): Promise<Packages> {
let packages: Packages = [];
try {
core.debug(`collecting metadata for "${this.root}"`);
const meta: Meta = JSON.parse(
await getCmdOutput("cargo", ["metadata", "--all-features", "--format-version", "1", ...extraArgs], {
cwd: this.root,
env: { "CARGO_ENCODED_RUSTFLAGS": "" },
}),
);
core.debug(`workspace "${this.root}" has ${meta.packages.length} packages`);
for (const pkg of meta.packages.filter(filter)) {
const targets = pkg.targets.filter((t) => t.kind.some((kind) => SAVE_TARGETS.has(kind))).map((t) => t.name);
packages.push({ name: pkg.name, version: pkg.version, targets, path: path.dirname(pkg.manifest_path) });
}
} catch (err) {
console.error(err);
}
return packages;
}
public async getPackagesOutsideWorkspaceRoot(): Promise<Packages> {
return await this.getPackages((pkg) => !pkg.manifest_path.startsWith(this.root));
}
public async getWorkspaceMembers(): Promise<Packages> {
return await this.getPackages((_) => true, "--no-deps");
}
}
export interface PackageDefinition {
name: string;
version: string;
path: string;
targets: Array<string>;
}
export type Packages = Array<PackageDefinition>;
interface Meta {
packages: Array<{
name: string;
version: string;
manifest_path: string;
targets: Array<{ kind: Array<string>; name: string }>;
}>;
}

1780
tests/Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@ -1,17 +0,0 @@
[package]
publish = false
name = "rust-cache"
version = "0.1.0"
authors = ["Arpad Borsos <arpad.borsos@googlemail.com>"]
edition = "2021"
[dependencies]
reqwest = "0.12.1"
jsonpath_lib_polars_vendor = "0.0.1"
watto = { git = "https://github.com/getsentry/watto", rev = "39ccb9add289c1f23c89f40506f4a80b2f4011b9", features = ["strings"] }
[dev-dependencies]
trybuild = "1"
[target.'cfg(not(target_env = "msvc"))'.dependencies]
tikv-jemallocator = "0.6.0"

View File

@ -1,2 +0,0 @@
the `rust-toolchain` directory will be globbed,
and should not lead to any errors down the road

View File

@ -1,25 +0,0 @@
#[cfg(not(target_env = "msvc"))]
use tikv_jemallocator::Jemalloc;
#[cfg(not(target_env = "msvc"))]
#[global_allocator]
static GLOBAL: Jemalloc = Jemalloc;
fn main() {
println!("Hello, world!");
}
#[cfg(test)]
fn some_fn(input: bool) -> usize {
if input {
2 + 4
} else {
3_usize.saturating_add(5)
}
}
#[test]
fn some_test() {
assert_eq!(some_fn(true), 6);
assert_eq!(some_fn(false), 8);
}

View File

@ -1,6 +0,0 @@
#[test]
fn test_trybuild() {
let t = trybuild::TestCases::new();
t.pass("tests/trybuild/empty_main.rs");
t.compile_fail("tests/trybuild/fail_to_compile.rs");
}

View File

@ -1 +0,0 @@
fn main() {}

View File

@ -1,3 +0,0 @@
fn main() {
"foobar".foobar();
}

View File

@ -1,5 +0,0 @@
error[E0599]: no method named `foobar` found for reference `&'static str` in the current scope
--> tests/trybuild/fail_to_compile.rs:2:14
|
2 | "foobar".foobar();
| ^^^^^^ method not found in `&'static str`

View File

@ -1,2 +0,0 @@
[build]
target = "wasm32-unknown-unknown"

File diff suppressed because it is too large Load Diff

View File

@ -1,6 +0,0 @@
[workspace]
resolver = "2"
members = [
"crates/one",
"crates/two",
]

View File

@ -1,13 +0,0 @@
[package]
publish = false
name = "wasm-one"
version = "0.1.0"
edition = "2021"
[dependencies]
reqwest = "0.12"
async-std = "1"
tracing = "0.1"
tracing-futures = "0.2"
serde = "1"
serde_json = "1"

View File

@ -1,8 +0,0 @@
[package]
publish = false
name = "wasm-two"
version = "0.1.0"
edition = "2021"
[dependencies]
clap = "4"

View File

@ -1,3 +0,0 @@
fn main() {
println!("Hello, world!");
}