Compare commits

..

No commits in common. "master" and "v2.2.0" have entirely different histories.

37 changed files with 82211 additions and 239761 deletions

View File

@ -1,50 +0,0 @@
# https://docs.github.com/en/code-security/dependabot/dependabot-version-updates/optimizing-pr-creation-version-updates#setting-up-a-cooldown-period-for-dependency-updates
version: 2
updates:
- package-ecosystem: cargo
directories:
- tests
- tests/wasm-workspace
schedule:
interval: weekly
# https://docs.github.com/en/code-security/dependabot/working-with-dependabot/dependabot-options-reference#groups--
# 1 PR per week and group
groups:
cargo-major:
update-types: ["major"]
cargo-minor:
update-types: ["minor"]
cargo-patch:
update-types: ["patch"]
- package-ecosystem: github-actions
directory: /
schedule:
interval: weekly
groups:
actions:
# Combine all images of the last week
patterns: ["*"]
- package-ecosystem: npm
directory: /
schedule:
interval: weekly
groups:
prd-major:
dependency-type: "production"
update-types: ["major"]
prd-minor:
dependency-type: "production"
update-types: ["minor"]
prd-patch:
dependency-type: "production"
update-types: ["patch"]
dev-major:
dependency-type: "development"
update-types: ["major"]
dev-minor:
dependency-type: "development"
update-types: ["minor"]
dev-patch:
dependency-type: "development"
update-types: ["patch"]

View File

@ -1,33 +0,0 @@
name: buildjet
on: [push, pull_request]
jobs:
buildjet:
if: github.repository == 'Swatinem/rust-cache'
strategy:
fail-fast: false
matrix:
os: [ubuntu-latest, macos-latest, windows-latest]
name: Test buildjet provider on ${{ matrix.os }}
runs-on: ${{ matrix.os }}
env:
CARGO_TERM_COLOR: always
steps:
- uses: actions/checkout@v5
- run: rustup toolchain install stable --profile minimal --no-self-update
- uses: ./
with:
workspaces: tests
cache-provider: buildjet
- run: |
cargo check
cargo test
cargo build --release
working-directory: tests

View File

@ -1,46 +0,0 @@
name: check dist/
on:
push:
branches:
- master
paths-ignore:
- "**.md"
pull_request:
paths-ignore:
- "**.md"
workflow_dispatch:
jobs:
check-dist:
if: github.repository == 'Swatinem/rust-cache'
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v5
- name: Setup Node.js 20.x
uses: actions/setup-node@v4
with:
node-version: 20.x
cache: npm
- name: Install dependencies
run: npm ci
- name: Rebuild the dist/ directory
run: npm run prepare
- name: Compare the expected and actual dist/ directories
run: |
if [ "$(git diff dist/ | wc -l)" -gt "0" ]; then
echo "Detected uncommitted changes after build. See status below:"
git diff
exit 1
fi
id: diff
- uses: actions/upload-artifact@v4
if: ${{ failure() && steps.diff.conclusion == 'failure' }}
with:
name: dist
path: dist/

View File

@ -4,7 +4,6 @@ on: [push, pull_request]
jobs:
coverage:
if: github.repository == 'Swatinem/rust-cache'
strategy:
fail-fast: false
matrix:
@ -17,9 +16,13 @@ jobs:
CARGO_TERM_COLOR: always
steps:
- uses: actions/checkout@v5
- uses: actions/checkout@v3
- run: rustup toolchain install stable --profile minimal --component llvm-tools-preview --no-self-update
# When rustup is updated, it tries to replace its binary, which on Windows is somehow locked.
# This can result in the CI failure, see: https://github.com/rust-lang/rustup/issues/3029
- run: |
rustup set auto-self-update disable
rustup toolchain install stable --profile minimal --component llvm-tools-preview
- uses: taiki-e/install-action@cargo-llvm-cov

View File

@ -1,25 +0,0 @@
# https://docs.github.com/en/code-security/dependabot/working-with-dependabot/automating-dependabot-with-github-actions#enabling-automerge-on-a-pull-request
name: Dependabot Automation
on: pull_request
permissions:
contents: write
pull-requests: write
jobs:
automerge:
runs-on: ubuntu-latest
if: github.event.pull_request.user.login == 'dependabot[bot]' && github.repository == 'Swatinem/rust-cache'
steps:
- name: Fetch metadata
id: metadata
uses: dependabot/fetch-metadata@v2
with:
github-token: "${{ secrets.GITHUB_TOKEN }}"
- name: Auto-merge Patch PRs
if: steps.metadata.outputs.update-type == 'version-update:semver-patch'
run: gh pr merge --auto --merge "$PR_URL"
env:
PR_URL: ${{github.event.pull_request.html_url}}
GH_TOKEN: ${{secrets.GITHUB_TOKEN}}

View File

@ -1,32 +0,0 @@
name: git-registry
on: [push, pull_request]
jobs:
git-registry:
if: github.repository == 'Swatinem/rust-cache'
strategy:
fail-fast: false
matrix:
os: [ubuntu-latest, macos-latest, windows-latest]
name: Test cargo "git" registry on ${{ matrix.os }}
runs-on: ${{ matrix.os }}
env:
CARGO_TERM_COLOR: always
CARGO_REGISTRIES_CRATES_IO_PROTOCOL: git
steps:
- uses: actions/checkout@v5
- run: rustup toolchain install stable --profile minimal --no-self-update
- uses: ./
with:
workspaces: tests
- run: |
cargo check
cargo test
working-directory: tests

View File

@ -4,7 +4,6 @@ on: [push, pull_request]
jobs:
install:
if: github.repository == 'Swatinem/rust-cache'
strategy:
fail-fast: false
matrix:
@ -17,9 +16,13 @@ jobs:
CARGO_TERM_COLOR: always
steps:
- uses: actions/checkout@v5
- uses: actions/checkout@v3
- run: rustup toolchain install stable --profile minimal --no-self-update
# When rustup is updated, it tries to replace its binary, which on Windows is somehow locked.
# This can result in the CI failure, see: https://github.com/rust-lang/rustup/issues/3029
- run: |
rustup set auto-self-update disable
rustup toolchain install stable --profile minimal
- uses: ./

View File

@ -4,22 +4,25 @@ on: [push, pull_request]
jobs:
simple:
if: github.repository == 'Swatinem/rust-cache'
strategy:
fail-fast: false
matrix:
os: [ubuntu-latest, macos-latest, windows-latest]
name: Test `cargo check/test/build` on ${{ matrix.os }}
name: Test `cargo check/test` on ${{ matrix.os }}
runs-on: ${{ matrix.os }}
env:
CARGO_TERM_COLOR: always
steps:
- uses: actions/checkout@v5
- uses: actions/checkout@v3
- run: rustup toolchain install stable --profile minimal --no-self-update
# When rustup is updated, it tries to replace its binary, which on Windows is somehow locked.
# This can result in the CI failure, see: https://github.com/rust-lang/rustup/issues/3029
- run: |
rustup set auto-self-update disable
rustup toolchain install stable --profile minimal
- uses: ./
with:
@ -28,5 +31,4 @@ jobs:
- run: |
cargo check
cargo test
cargo build --release
working-directory: tests

36
.github/workflows/sparse-registry.yml vendored Normal file
View File

@ -0,0 +1,36 @@
name: sparse-registry
on: [push, pull_request]
jobs:
sparse-registry:
strategy:
fail-fast: false
matrix:
os: [ubuntu-latest, macos-latest, windows-latest]
name: Test `cargo check/test` with sparse registry on ${{ matrix.os }}
runs-on: ${{ matrix.os }}
env:
CARGO_TERM_COLOR: always
CARGO_UNSTABLE_SPARSE_REGISTRY: true
steps:
- uses: actions/checkout@v3
# When rustup is updated, it tries to replace its binary, which on Windows is somehow locked.
# This can result in the CI failure, see: https://github.com/rust-lang/rustup/issues/3029
- run: |
rustup set auto-self-update disable
rustup toolchain install nightly --profile minimal
rustup default nightly
- uses: ./
with:
workspaces: tests
- run: |
cargo check
cargo test
working-directory: tests

View File

@ -4,7 +4,6 @@ on: [push, pull_request]
jobs:
target-dir:
if: github.repository == 'Swatinem/rust-cache'
strategy:
fail-fast: false
matrix:
@ -17,9 +16,13 @@ jobs:
CARGO_TERM_COLOR: always
steps:
- uses: actions/checkout@v5
- uses: actions/checkout@v3
- run: rustup toolchain install stable --profile minimal --no-self-update
# When rustup is updated, it tries to replace its binary, which on Windows is somehow locked.
# This can result in the CI failure, see: https://github.com/rust-lang/rustup/issues/3029
- run: |
rustup set auto-self-update disable
rustup toolchain install stable --profile minimal
# the `workspaces` option has the format `$workspace -> $target-dir`
# and the `$target-dir` is relative to the `$workspace`.

View File

@ -4,7 +4,6 @@ on: [push, pull_request]
jobs:
workspaces:
if: github.repository == 'Swatinem/rust-cache'
strategy:
fail-fast: false
matrix:
@ -17,9 +16,13 @@ jobs:
CARGO_TERM_COLOR: always
steps:
- uses: actions/checkout@v5
- uses: actions/checkout@v3
- run: rustup toolchain install stable --profile minimal --target wasm32-unknown-unknown --no-self-update
# When rustup is updated, it tries to replace its binary, which on Windows is somehow locked.
# This can result in the CI failure, see: https://github.com/rust-lang/rustup/issues/3029
- run: |
rustup set auto-self-update disable
rustup toolchain install stable --profile minimal --target wasm32-unknown-unknown
- uses: ./
with:
@ -27,10 +30,8 @@ jobs:
tests
tests/wasm-workspace
- name: cargo check (tests)
- run: cargo check
working-directory: tests
run: cargo check
- name: cargo check (tests/wasm-workspace)
- run: cargo check
working-directory: tests/wasm-workspace
run: cargo check

View File

@ -1,87 +1,5 @@
# Changelog
## 2.8.0
- Add support for `warpbuild` cache provider
- Add new `cache-workspace-crates` feature
## 2.7.8
- Include CPU arch in the cache key
## 2.7.7
- Also cache `cargo install` metadata
## 2.7.6
- Allow opting out of caching $CARGO_HOME/bin
- Add runner OS in cache key
- Adds an option to do lookup-only of the cache
## 2.7.5
- Support Cargo.lock format cargo-lock v4
- Only run macOsWorkaround() on macOS
## 2.7.3
- Work around upstream problem that causes cache saving to hang for minutes.
## 2.7.2
- Only key by `Cargo.toml` and `Cargo.lock` files of workspace members.
## 2.7.1
- Update toml parser to fix parsing errors.
## 2.7.0
- Properly cache `trybuild` tests.
## 2.6.2
- Fix `toml` parsing.
## 2.6.1
- Fix hash contributions of `Cargo.lock`/`Cargo.toml` files.
## 2.6.0
- Add "buildjet" as a second `cache-provider` backend.
- Clean up sparse registry index.
- Do not clean up src of `-sys` crates.
- Remove `.cargo/credentials.toml` before saving.
## 2.5.1
- Fix hash contribution of `Cargo.lock`.
## 2.5.0
- feat: Rm workspace crates version before caching.
- feat: Add hash of `.cargo/config.toml` to key.
## 2.4.0
- Fix cache key stability.
- Use 8 character hash components to reduce the key length, making it more readable.
## 2.3.0
- Add `cache-all-crates` option, which enables caching of crates installed by workflows.
- Add installed packages to cache key, so changes to workflows that install rust tools are detected and cached properly.
- Fix cache restore failures due to upstream bug.
- Fix `EISDIR` error due to globed directories.
- Update runtime `@actions/cache`, `@actions/io` and dev `typescript` dependencies.
- Update `npm run prepare` so it creates distribution files with the right line endings.
## 2.2.1
- Update `@actions/cache` dependency to fix usage of `zstd` compression.
## 2.2.0
- Add new `save-if` option to always restore, but only conditionally save the cache.

View File

@ -6,7 +6,7 @@ sensible defaults.
## Example usage
```yaml
- uses: actions/checkout@v5
- uses: actions/checkout@v3
# selecting a toolchain either by action or manual `rustup` calls should happen
# before the plugin, as the cache uses the current rustc version as its cache key
@ -54,41 +54,11 @@ sensible defaults.
# default: "false"
cache-on-failure: ""
# Determines which crates are cached.
# If `true` all crates will be cached, otherwise only dependent crates will be cached.
# Useful if additional crates are used for CI tooling.
# default: "false"
cache-all-crates: ""
# Similar to cache-all-crates.
# If `true` the workspace crates will be cached.
# Useful if the workspace contains libraries that are only updated sporadically.
# default: "false"
cache-workspace-crates: ""
# Determines whether the cache should be saved.
# Determiners whether the cache should be saved.
# If `false`, the cache is only restored.
# Useful for jobs where the matrix is additive e.g. additional Cargo features,
# or when only runs from `master` should be saved to the cache.
# Useful for jobs where the matrix is additive e.g. additional Cargo features.
# default: "true"
save-if: ""
# To only cache runs from `master`:
save-if: ${{ github.ref == 'refs/heads/master' }}
# Determines whether the cache should be restored.
# If `true` the cache key will be checked and the `cache-hit` output will be set
# but the cache itself won't be restored
# default: "false"
lookup-only: ""
# Specifies what to use as the backend providing cache
# Can be set to "github", "buildjet", or "warpbuild"
# default: "github"
cache-provider: ""
# Determines whether to cache the ~/.cargo/bin directory.
# default: "true"
cache-bin: ""
```
Further examples are available in the [.github/workflows](./.github/workflows/) directory.
@ -109,8 +79,7 @@ repositories with only a `Cargo.toml` file have limited benefits, as cargo will
_always_ use the most up-to-date dependency versions, which may not be cached.
Usage with Stable Rust is most effective, as a cache is tied to the Rust version.
Using it with Nightly Rust is less effective as it will throw away the cache every day,
unless a specific nightly build is being pinned.
Using it with Nightly Rust is less effective as it will throw away the cache every day.
## Cache Details
@ -126,7 +95,6 @@ This cache is automatically keyed by:
- the value of some compiler-specific environment variables (eg. RUSTFLAGS, etc), and
- a hash of all `Cargo.lock` / `Cargo.toml` files found anywhere in the repository (if present).
- a hash of all `rust-toolchain` / `rust-toolchain.toml` files in the root of the repository (if present).
- a hash of all `.cargo/config.toml` files in the root of the repository (if present).
An additional input `key` can be provided if the builtin keys are not sufficient.
@ -162,7 +130,7 @@ otherwise corrupt the cache on macOS builds.
This specialized cache action is built on top of the upstream cache action
maintained by GitHub. The same restrictions and limits apply, which are
documented here:
[Caching dependencies to speed up workflows](https://docs.github.com/en/actions/using-workflows/caching-dependencies-to-speed-up-workflows)
https://docs.github.com/en/actions/using-workflows/caching-dependencies-to-speed-up-workflows
In particular, caches are currently limited to 10 GB in total and exceeding that
limit will cause eviction of older caches.
@ -185,7 +153,4 @@ to see those details as well as further details related to caching operations.
## Known issues
- The cache cleaning process currently removes all the files from `~/.cargo/bin`
that were present before the action ran (for example `rustc`), by default.
This can be an issue on long-running self-hosted runners, where such state
is expected to be preserved across runs. You can work around this by setting
`cache-bin: "false"`.
that were present before the action ran (for example `rustc`).

View File

@ -1,3 +1,4 @@
- better .cargo/bin handling:
- get a list of all the files on "pre"/"restore"
- move the files out of the way on "post"/"save" and move them back afterwards
- properly clean sparse registry

View File

@ -28,35 +28,15 @@ inputs:
cache-on-failure:
description: "Cache even if the build fails. Defaults to false."
required: false
cache-all-crates:
description: "Determines which crates are cached. If `true` all crates will be cached, otherwise only dependent crates will be cached."
required: false
default: "false"
cache-workspace-crates:
description: "Similar to cache-all-crates. If `true` the workspace crates will be cached."
required: false
default: "false"
save-if:
description: "Determiners whether the cache should be saved. If `false`, the cache is only restored."
required: false
default: "true"
cache-provider:
description: "Determines which provider to use for caching. Options are github, buildjet, or warpbuild. Defaults to github."
required: false
default: "github"
cache-bin:
description: "Determines whether to cache ${CARGO_HOME}/bin."
required: false
default: "true"
lookup-only:
description: "Check if a cache entry exists without downloading the cache"
required: false
default: "false"
outputs:
cache-hit:
description: "A boolean value that indicates an exact match was found."
runs:
using: "node20"
using: "node16"
main: "dist/restore/index.js"
post: "dist/save/index.js"
post-if: "success() || env.CACHE_ON_FAILURE == 'true'"

157896
dist/restore/index.js vendored

File diff suppressed because one or more lines are too long

157916
dist/save/index.js vendored

File diff suppressed because one or more lines are too long

2138
package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@ -1,7 +1,7 @@
{
"private": true,
"name": "rust-cache",
"version": "2.8.0",
"version": "2.2.0",
"description": "A GitHub Action that implements smart caching for rust/cargo projects with sensible defaults.",
"keywords": [
"actions",
@ -22,22 +22,17 @@
},
"homepage": "https://github.com/Swatinem/rust-cache#readme",
"dependencies": {
"@actions/buildjet-cache": "npm:github-actions.cache-buildjet@0.2.0",
"@actions/warpbuild-cache": "npm:github-actions.warp-cache@1.4.7",
"@actions/cache": "^4.0.5",
"@actions/core": "^1.11.1",
"@actions/cache": "^3.0.6",
"@actions/core": "^1.10.0",
"@actions/exec": "^1.1.1",
"@actions/glob": "^0.5.0",
"@actions/io": "^1.1.3",
"smol-toml": "^1.4.2"
"@actions/glob": "^0.3.0",
"@actions/io": "^1.1.2"
},
"devDependencies": {
"@types/node": "^22.16.0",
"@vercel/ncc": "^0.38.3",
"linefix": "^0.1.1",
"typescript": "5.8.3"
"@vercel/ncc": "^0.34.0",
"typescript": "4.8.4"
},
"scripts": {
"prepare": "ncc build --target es2020 -o dist/restore src/restore.ts && ncc build --target es2020 -o dist/save src/save.ts && linefix dist"
"prepare": "ncc build --target es2020 -o dist/restore src/restore.ts && ncc build --target es2020 -o dist/save src/save.ts"
}
}

View File

@ -3,8 +3,7 @@ import * as io from "@actions/io";
import fs from "fs";
import path from "path";
import { CARGO_HOME } from "./config";
import { exists } from "./utils";
import { CARGO_HOME, STATE_BINS } from "./config";
import { Packages } from "./workspace";
export async function cleanTargetDir(targetDir: string, packages: Packages, checkTimestamp = false) {
@ -35,26 +34,6 @@ export async function cleanTargetDir(targetDir: string, packages: Packages, chec
async function cleanProfileTarget(profileDir: string, packages: Packages, checkTimestamp = false) {
core.debug(`cleaning profile directory "${profileDir}"`);
// Quite a few testing utility crates store compilation artifacts as nested
// workspaces under `target/tests`. Notably, `target/tests/target` and
// `target/tests/trybuild`.
if (path.basename(profileDir) === "tests") {
try {
// https://github.com/vertexclique/kaos/blob/9876f6c890339741cc5be4b7cb9df72baa5a6d79/src/cargo.rs#L25
// https://github.com/eupn/macrotest/blob/c4151a5f9f545942f4971980b5d264ebcd0b1d11/src/cargo.rs#L27
cleanTargetDir(path.join(profileDir, "target"), packages, checkTimestamp);
} catch {}
try {
// https://github.com/dtolnay/trybuild/blob/eec8ca6cb9b8f53d0caf1aa499d99df52cae8b40/src/cargo.rs#L50
cleanTargetDir(path.join(profileDir, "trybuild"), packages, checkTimestamp);
} catch {}
// Delete everything else.
await rmExcept(profileDir, new Set(["target", "trybuild"]), checkTimestamp);
return;
}
let keepProfile = new Set(["build", ".fingerprint", "deps"]);
await rmExcept(profileDir, keepProfile);
@ -90,14 +69,9 @@ export async function getCargoBins(): Promise<Set<string>> {
return bins;
}
/**
* Clean the cargo bin directory, removing the binaries that existed
* when the action started, as they were not created by the build.
*
* @param oldBins The binaries that existed when the action started.
*/
export async function cleanBin(oldBins: Array<string>) {
export async function cleanBin() {
const bins = await getCargoBins();
const oldBins = JSON.parse(core.getState(STATE_BINS));
for (const bin of oldBins) {
bins.delete(bin);
@ -111,16 +85,12 @@ export async function cleanBin(oldBins: Array<string>) {
}
}
export async function cleanRegistry(packages: Packages, crates = true) {
// remove `.cargo/credentials.toml`
try {
const credentials = path.join(CARGO_HOME, ".cargo", "credentials.toml");
core.debug(`deleting "${credentials}"`);
await fs.promises.unlink(credentials);
} catch {}
export async function cleanRegistry(packages: Packages) {
// `.cargo/registry/src`
// we can remove this completely, as cargo will recreate this from `cache`
await rmRF(path.join(CARGO_HOME, "registry", "src"));
// `.cargo/registry/index`
let pkgSet = new Set(packages.map((p) => p.name));
const indexDir = await fs.promises.opendir(path.join(CARGO_HOME, "registry", "index"));
for await (const dirent of indexDir) {
if (dirent.isDirectory()) {
@ -131,38 +101,14 @@ export async function cleanRegistry(packages: Packages, crates = true) {
// for a git registry, we can remove `.cache`, as cargo will recreate it from git
if (await exists(path.join(dirPath, ".git"))) {
await rmRF(path.join(dirPath, ".cache"));
} else {
await cleanRegistryIndexCache(dirPath, pkgSet);
}
// TODO: else, clean `.cache` based on the `packages`
}
}
if (!crates) {
core.debug("skipping registry cache and src cleanup");
return;
}
// `.cargo/registry/src`
// Cargo usually re-creates these from the `.crate` cache below,
// but for some reason that does not work for `-sys` crates that check timestamps
// to decide if rebuilds are necessary.
pkgSet = new Set(packages.filter((p) => p.name.endsWith("-sys")).map((p) => `${p.name}-${p.version}`));
const srcDir = await fs.promises.opendir(path.join(CARGO_HOME, "registry", "src"));
for await (const dirent of srcDir) {
if (dirent.isDirectory()) {
// eg `.cargo/registry/src/github.com-1ecc6299db9ec823`
// or `.cargo/registry/src/index.crates.io-e139d0d48fed7772`
const dir = await fs.promises.opendir(path.join(srcDir.path, dirent.name));
for await (const dirent of dir) {
if (dirent.isDirectory() && !pkgSet.has(dirent.name)) {
await rmRF(path.join(dir.path, dirent.name));
}
}
}
}
const pkgSet = new Set(packages.map((p) => `${p.name}-${p.version}.crate`));
// `.cargo/registry/cache`
pkgSet = new Set(packages.map((p) => `${p.name}-${p.version}.crate`));
const cacheDir = await fs.promises.opendir(path.join(CARGO_HOME, "registry", "cache"));
for await (const dirent of cacheDir) {
if (dirent.isDirectory()) {
@ -179,28 +125,6 @@ export async function cleanRegistry(packages: Packages, crates = true) {
}
}
/// Recursively walks and cleans the index `.cache`
async function cleanRegistryIndexCache(dirName: string, keepPkg: Set<string>) {
let dirIsEmpty = true;
const cacheDir = await fs.promises.opendir(dirName);
for await (const dirent of cacheDir) {
if (dirent.isDirectory()) {
if (await cleanRegistryIndexCache(path.join(dirName, dirent.name), keepPkg)) {
await rm(dirName, dirent);
} else {
dirIsEmpty &&= false;
}
} else {
if (keepPkg.has(dirent.name)) {
dirIsEmpty &&= false;
} else {
await rm(dirName, dirent);
}
}
}
return dirIsEmpty;
}
export async function cleanGit(packages: Packages) {
const coPath = path.join(CARGO_HOME, "git", "checkouts");
const dbPath = path.join(CARGO_HOME, "git", "db");
@ -257,10 +181,10 @@ const ONE_WEEK = 7 * 24 * 3600 * 1000;
/**
* Removes all files or directories in `dirName` matching some criteria.
*
*
* When the `checkTimestamp` flag is set, this will also remove anything older
* than one week.
*
*
* Otherwise, it will remove everything that does not match any string in the
* `keepPrefix` set.
* The matching strips and trailing `-$hash` suffix.
@ -309,3 +233,12 @@ async function rmRF(dirName: string) {
core.debug(`deleting "${dirName}"`);
await io.rmRF(dirName);
}
async function exists(path: string) {
try {
await fs.promises.access(path);
return true;
} catch {
return false;
}
}

View File

@ -2,20 +2,19 @@ import * as core from "@actions/core";
import * as glob from "@actions/glob";
import crypto from "crypto";
import fs from "fs";
import fs_promises from "fs/promises";
import os from "os";
import path from "path";
import * as toml from "smol-toml";
import { getCargoBins } from "./cleanup";
import { CacheProvider, exists, getCmdOutput } from "./utils";
import { getCmdOutput } from "./utils";
import { Workspace } from "./workspace";
const HOME = os.homedir();
export const CARGO_HOME = process.env.CARGO_HOME || path.join(HOME, ".cargo");
const STATE_CONFIG = "RUST_CACHE_CONFIG";
const HASH_LENGTH = 8;
const STATE_LOCKFILE_HASH = "RUST_CACHE_LOCKFILE_HASH";
const STATE_LOCKFILES = "RUST_CACHE_LOCKFILES";
export const STATE_BINS = "RUST_CACHE_BINS";
export const STATE_KEY = "RUST_CACHE_KEY";
export class CacheConfig {
/** All the paths we want to cache */
@ -25,15 +24,9 @@ export class CacheConfig {
/** The secondary (restore) key that only contains the prefix and environment */
public restoreKey = "";
/** Whether to cache CARGO_HOME/.bin */
public cacheBin: boolean = true;
/** The workspace configurations */
public workspaces: Array<Workspace> = [];
/** The cargo binaries present during main step */
public cargoBins: Array<string> = [];
/** The prefix portion of the cache key */
private keyPrefix = "";
/** The rust version considered for the cache key */
@ -74,11 +67,6 @@ export class CacheConfig {
}
}
// Add runner OS and CPU architecture to the key to avoid cross-contamination of cache
const runnerOS = os.type();
const runnerArch = os.arch();
key += `-${runnerOS}-${runnerArch}`;
self.keyPrefix = key;
// Construct environment portion of the key:
@ -115,16 +103,20 @@ export class CacheConfig {
}
self.keyEnvs = keyEnvs;
key += `-${digest(hasher)}`;
key += `-${hasher.digest("hex")}`;
self.restoreKey = key;
// Construct the lockfiles portion of the key:
// This considers all the files found via globbing for various manifests
// and lockfiles.
// This part is computed in the "pre"/"restore" part of the job and persisted
// into the `state`. That state is loaded in the "post"/"save" part of the
// job so we have consistent values even though the "main" actions run
// might create/overwrite lockfiles.
self.cacheBin = core.getInput("cache-bin").toLowerCase() == "true";
let lockHash = core.getState(STATE_LOCKFILE_HASH);
let keyFiles: Array<string> = JSON.parse(core.getState(STATE_LOCKFILES) || "[]");
// Constructs the workspace config and paths to restore:
// The workspaces are given using a `$workspace -> $target` syntax.
@ -139,122 +131,36 @@ export class CacheConfig {
}
self.workspaces = workspaces;
let keyFiles = await globFiles(".cargo/config.toml\nrust-toolchain\nrust-toolchain.toml");
const parsedKeyFiles = []; // keyFiles that are parsed, pre-processed and hashed
if (!lockHash) {
keyFiles = keyFiles.concat(await globFiles("rust-toolchain\nrust-toolchain.toml"));
for (const workspace of workspaces) {
const root = workspace.root;
keyFiles.push(
...(await globFiles(
`${root}/**/Cargo.toml\n${root}/**/Cargo.lock\n${root}/**/rust-toolchain\n${root}/**/rust-toolchain.toml`,
)),
);
}
keyFiles.sort((a, b) => a.localeCompare(b));
hasher = crypto.createHash("sha1");
for (const workspace of workspaces) {
const root = workspace.root;
keyFiles.push(
...(await globFiles(
`${root}/**/.cargo/config.toml\n${root}/**/rust-toolchain\n${root}/**/rust-toolchain.toml`,
)),
);
const workspaceMembers = await workspace.getWorkspaceMembers();
const cargo_manifests = sort_and_uniq(workspaceMembers.map((member) => path.join(member.path, "Cargo.toml")));
for (const cargo_manifest of cargo_manifests) {
try {
const content = await fs_promises.readFile(cargo_manifest, { encoding: "utf8" });
// Use any since TomlPrimitive is not exposed
const parsed = toml.parse(content) as { [key: string]: any };
if ("package" in parsed) {
const pack = parsed.package;
if ("version" in pack) {
pack["version"] = "0.0.0";
}
}
for (const prefix of ["", "build-", "dev-"]) {
const section_name = `${prefix}dependencies`;
if (!(section_name in parsed)) {
continue;
}
const deps = parsed[section_name];
for (const key of Object.keys(deps)) {
const dep = deps[key];
try {
if ("path" in dep) {
dep.version = "0.0.0";
dep.path = "";
}
} catch (_e) {
// Not an object, probably a string (version),
// continue.
continue;
}
}
}
hasher.update(JSON.stringify(parsed));
parsedKeyFiles.push(cargo_manifest);
} catch (e) {
// Fallback to caching them as regular file
core.warning(`Error parsing Cargo.toml manifest, fallback to caching entire file: ${e}`);
keyFiles.push(cargo_manifest);
hasher = crypto.createHash("sha1");
for (const file of keyFiles) {
for await (const chunk of fs.createReadStream(file)) {
hasher.update(chunk);
}
}
lockHash = hasher.digest("hex");
const cargo_lock = path.join(workspace.root, "Cargo.lock");
if (await exists(cargo_lock)) {
try {
const content = await fs_promises.readFile(cargo_lock, { encoding: "utf8" });
const parsed = toml.parse(content);
if ((parsed.version !== 3 && parsed.version !== 4) || !("package" in parsed)) {
// Fallback to caching them as regular file since this action
// can only handle Cargo.lock format version 3
core.warning("Unsupported Cargo.lock format, fallback to caching entire file");
keyFiles.push(cargo_lock);
continue;
}
// Package without `[[package]].source` and `[[package]].checksum`
// are the one with `path = "..."` to crates within the workspace.
const packages = (parsed.package as any[]).filter((p: any) => "source" in p || "checksum" in p);
hasher.update(JSON.stringify(packages));
parsedKeyFiles.push(cargo_lock);
} catch (e) {
// Fallback to caching them as regular file
core.warning(`Error parsing Cargo.lock manifest, fallback to caching entire file: ${e}`);
keyFiles.push(cargo_lock);
}
}
}
keyFiles = sort_and_uniq(keyFiles);
for (const file of keyFiles) {
for await (const chunk of fs.createReadStream(file)) {
hasher.update(chunk);
}
core.saveState(STATE_LOCKFILE_HASH, lockHash);
core.saveState(STATE_LOCKFILES, JSON.stringify(keyFiles));
}
let lockHash = digest(hasher);
keyFiles.push(...parsedKeyFiles);
self.keyFiles = sort_and_uniq(keyFiles);
self.keyFiles = keyFiles;
key += `-${lockHash}`;
self.cacheKey = key;
self.cachePaths = [path.join(CARGO_HOME, "registry"), path.join(CARGO_HOME, "git")];
if (self.cacheBin) {
self.cachePaths = [
path.join(CARGO_HOME, "bin"),
path.join(CARGO_HOME, ".crates.toml"),
path.join(CARGO_HOME, ".crates2.json"),
...self.cachePaths,
];
}
self.cachePaths = [CARGO_HOME];
const cacheTargets = core.getInput("cache-targets").toLowerCase() || "true";
if (cacheTargets === "true") {
self.cachePaths.push(...workspaces.map((ws) => ws.target));
@ -265,40 +171,11 @@ export class CacheConfig {
self.cachePaths.push(dir);
}
const bins = await getCargoBins();
self.cargoBins = Array.from(bins.values());
return self;
}
/**
* Reads and returns the cache config from the action `state`.
*
* @throws {Error} if the state is not present.
* @returns {CacheConfig} the configuration.
* @see {@link CacheConfig#saveState}
* @see {@link CacheConfig#new}
*/
static fromState(): CacheConfig {
const source = core.getState(STATE_CONFIG);
if (!source) {
throw new Error("Cache configuration not found in state");
}
const self = new CacheConfig();
Object.assign(self, JSON.parse(source));
self.workspaces = self.workspaces.map((w: any) => new Workspace(w.root, w.target));
return self;
}
/**
* Prints the configuration to the action log.
*/
printInfo(cacheProvider: CacheProvider) {
printInfo() {
core.startGroup("Cache Configuration");
core.info(`Cache Provider:`);
core.info(` ${cacheProvider.name}`);
core.info(`Workspaces:`);
for (const workspace of this.workspaces) {
core.info(` ${workspace.root}`);
@ -324,33 +201,6 @@ export class CacheConfig {
}
core.endGroup();
}
/**
* Saves the configuration to the state store.
* This is used to restore the configuration in the post action.
*/
saveState() {
core.saveState(STATE_CONFIG, this);
}
}
/**
* Checks if the cache is up to date.
*
* @returns `true` if the cache is up to date, `false` otherwise.
*/
export function isCacheUpToDate(): boolean {
return core.getState(STATE_CONFIG) === "";
}
/**
* Returns a hex digest of the given hasher truncated to `HASH_LENGTH`.
*
* @param hasher The hasher to digest.
* @returns The hex digest.
*/
function digest(hasher: crypto.Hash): string {
return hasher.digest("hex").substring(0, HASH_LENGTH);
}
interface RustVersion {
@ -373,25 +223,5 @@ async function globFiles(pattern: string): Promise<string[]> {
const globber = await glob.create(pattern, {
followSymbolicLinks: false,
});
// fs.statSync resolve the symbolic link and returns stat for the
// file it pointed to, so isFile would make sure the resolved
// file is actually a regular file.
return (await globber.glob()).filter((file) => fs.statSync(file).isFile());
}
function sort_and_uniq(a: string[]) {
return a
.sort((a, b) => a.localeCompare(b))
.reduce((accumulator: string[], currentValue: string) => {
const len = accumulator.length;
// If accumulator is empty or its last element != currentValue
// Since array is already sorted, elements with the same value
// are grouped together to be continugous in space.
//
// If currentValue != last element, then it must be unique.
if (len == 0 || accumulator[len - 1].localeCompare(currentValue) != 0) {
accumulator.push(currentValue);
}
return accumulator;
}, []);
return await globber.glob();
}

View File

@ -1,20 +1,18 @@
import * as cache from "@actions/cache";
import * as core from "@actions/core";
import { cleanTargetDir } from "./cleanup";
import { CacheConfig } from "./config";
import { getCacheProvider, reportError } from "./utils";
import { cleanTargetDir, getCargoBins } from "./cleanup";
import { CacheConfig, STATE_BINS, STATE_KEY } from "./config";
process.on("uncaughtException", (e) => {
core.error(e.message);
core.info(`[warning] ${e.message}`);
if (e.stack) {
core.error(e.stack);
core.info(e.stack);
}
});
async function run() {
const cacheProvider = getCacheProvider();
if (!cacheProvider.cache.isFeatureAvailable()) {
if (!cache.isFeatureAvailable()) {
setCacheHitOutput(false);
return;
}
@ -24,51 +22,43 @@ async function run() {
if (cacheOnFailure !== "true") {
cacheOnFailure = "false";
}
var lookupOnly = core.getInput("lookup-only").toLowerCase() === "true";
core.exportVariable("CACHE_ON_FAILURE", cacheOnFailure);
core.exportVariable("CARGO_INCREMENTAL", 0);
const config = await CacheConfig.new();
config.printInfo(cacheProvider);
config.printInfo();
core.info("");
core.info(`... ${lookupOnly ? "Checking" : "Restoring"} cache ...`);
const bins = await getCargoBins();
core.saveState(STATE_BINS, JSON.stringify([...bins]));
core.info(`... Restoring cache ...`);
const key = config.cacheKey;
// Pass a copy of cachePaths to avoid mutating the original array as reported by:
// https://github.com/actions/toolkit/pull/1378
// TODO: remove this once the underlying bug is fixed.
const restoreKey = await cacheProvider.cache.restoreCache(config.cachePaths.slice(), key, [config.restoreKey], {
lookupOnly,
});
const restoreKey = await cache.restoreCache(config.cachePaths, key, [config.restoreKey]);
if (restoreKey) {
const match = restoreKey === key;
core.info(`${lookupOnly ? "Found" : "Restored from"} cache key "${restoreKey}" full match: ${match}.`);
if (!match) {
core.info(`Restored from cache key "${restoreKey}".`);
core.saveState(STATE_KEY, restoreKey);
if (restoreKey !== key) {
// pre-clean the target directory on cache mismatch
for (const workspace of config.workspaces) {
try {
await cleanTargetDir(workspace.target, [], true);
} catch {}
}
// We restored the cache but it is not a full match.
config.saveState();
}
setCacheHitOutput(match);
setCacheHitOutput(restoreKey === key);
} else {
core.info("No cache found.");
config.saveState();
setCacheHitOutput(false);
}
} catch (e) {
setCacheHitOutput(false);
reportError(e);
core.info(`[warning] ${(e as any).stack}`);
}
process.exit();
}
function setCacheHitOutput(cacheHit: boolean): void {

View File

@ -1,91 +1,75 @@
import * as cache from "@actions/cache";
import * as core from "@actions/core";
import * as exec from "@actions/exec";
import { cleanBin, cleanGit, cleanRegistry, cleanTargetDir } from "./cleanup";
import { CacheConfig, isCacheUpToDate } from "./config";
import { getCacheProvider, reportError } from "./utils";
import { CacheConfig, STATE_KEY } from "./config";
process.on("uncaughtException", (e) => {
core.error(e.message);
core.info(`[warning] ${e.message}`);
if (e.stack) {
core.error(e.stack);
core.info(e.stack);
}
});
async function run() {
const cacheProvider = getCacheProvider();
const save = core.getInput("save-if").toLowerCase() || "true";
if (!(cacheProvider.cache.isFeatureAvailable() && save === "true")) {
if (!(cache.isFeatureAvailable() && save === "true")) {
return;
}
try {
if (isCacheUpToDate()) {
const config = await CacheConfig.new();
config.printInfo();
core.info("");
if (core.getState(STATE_KEY) === config.cacheKey) {
core.info(`Cache up-to-date.`);
return;
}
const config = CacheConfig.fromState();
config.printInfo(cacheProvider);
core.info("");
// TODO: remove this once https://github.com/actions/toolkit/pull/553 lands
if (process.env["RUNNER_OS"] == "macOS") {
await macOsWorkaround();
}
await macOsWorkaround();
const workspaceCrates = core.getInput("cache-workspace-crates").toLowerCase() || "false";
const allPackages = [];
for (const workspace of config.workspaces) {
const packages = await workspace.getPackagesOutsideWorkspaceRoot();
if (workspaceCrates === "true") {
const wsMembers = await workspace.getWorkspaceMembers();
packages.push(...wsMembers);
}
const packages = await workspace.getPackages();
allPackages.push(...packages);
try {
core.info(`... Cleaning ${workspace.target} ...`);
await cleanTargetDir(workspace.target, packages);
} catch (e) {
core.debug(`${(e as any).stack}`);
core.info(`[warning] ${(e as any).stack}`);
}
}
try {
const crates = core.getInput("cache-all-crates").toLowerCase() || "false";
core.info(`... Cleaning cargo registry (cache-all-crates: ${crates}) ...`);
await cleanRegistry(allPackages, crates !== "true");
core.info(`... Cleaning cargo registry ...`);
await cleanRegistry(allPackages);
} catch (e) {
core.debug(`${(e as any).stack}`);
core.info(`[warning] ${(e as any).stack}`);
}
if (config.cacheBin) {
try {
core.info(`... Cleaning cargo/bin ...`);
await cleanBin(config.cargoBins);
} catch (e) {
core.debug(`${(e as any).stack}`);
}
try {
core.info(`... Cleaning cargo/bin ...`);
await cleanBin();
} catch (e) {
core.info(`[warning] ${(e as any).stack}`);
}
try {
core.info(`... Cleaning cargo git cache ...`);
await cleanGit(allPackages);
} catch (e) {
core.debug(`${(e as any).stack}`);
core.info(`[warning] ${(e as any).stack}`);
}
core.info(`... Saving cache ...`);
// Pass a copy of cachePaths to avoid mutating the original array as reported by:
// https://github.com/actions/toolkit/pull/1378
// TODO: remove this once the underlying bug is fixed.
await cacheProvider.cache.saveCache(config.cachePaths.slice(), config.cacheKey);
await cache.saveCache(config.cachePaths, config.cacheKey);
} catch (e) {
reportError(e);
core.info(`[warning] ${(e as any).stack}`);
}
process.exit();
}
run();

View File

@ -1,19 +1,5 @@
import * as core from "@actions/core";
import * as exec from "@actions/exec";
import * as buildjetCache from "@actions/buildjet-cache";
import * as warpbuildCache from "@actions/warpbuild-cache";
import * as ghCache from "@actions/cache";
import fs from "fs";
export function reportError(e: any) {
const { commandFailed } = e;
if (commandFailed) {
core.error(`Command failed: ${commandFailed.command}`);
core.error(commandFailed.stderr);
} else {
core.error(`${e.stack}`);
}
}
export async function getCmdOutput(
cmd: string,
@ -36,54 +22,9 @@ export async function getCmdOutput(
...options,
});
} catch (e) {
(e as any).commandFailed = {
command: `${cmd} ${args.join(" ")}`,
stderr,
};
core.info(`[warning] Command failed: ${cmd} ${args.join(" ")}`);
core.info(`[warning] ${stderr}`);
throw e;
}
return stdout;
}
export interface GhCache {
isFeatureAvailable: typeof ghCache.isFeatureAvailable;
restoreCache: typeof ghCache.restoreCache;
saveCache: (paths: string[], key: string) => Promise<string | number>;
}
export interface CacheProvider {
name: string;
cache: GhCache;
}
export function getCacheProvider(): CacheProvider {
const cacheProvider = core.getInput("cache-provider");
let cache: GhCache;
switch (cacheProvider) {
case "github":
cache = ghCache;
break;
case "buildjet":
cache = buildjetCache;
break;
case "warpbuild":
cache = warpbuildCache;
break;
default:
throw new Error(`The \`cache-provider\` \`${cacheProvider}\` is not valid.`);
}
return {
name: cacheProvider,
cache: cache,
};
}
export async function exists(path: string) {
try {
await fs.promises.access(path);
return true;
} catch {
return false;
}
}

View File

@ -8,34 +8,26 @@ const SAVE_TARGETS = new Set(["lib", "proc-macro"]);
export class Workspace {
constructor(public root: string, public target: string) {}
async getPackages(filter: (p: Meta["packages"][0]) => boolean, ...extraArgs: string[]): Promise<Packages> {
public async getPackages(): Promise<Packages> {
let packages: Packages = [];
try {
core.debug(`collecting metadata for "${this.root}"`);
const meta: Meta = JSON.parse(
await getCmdOutput("cargo", ["metadata", "--all-features", "--format-version", "1", ...extraArgs], {
await getCmdOutput("cargo", ["metadata", "--all-features", "--format-version", "1"], {
cwd: this.root,
env: { "CARGO_ENCODED_RUSTFLAGS": "" },
}),
);
core.debug(`workspace "${this.root}" has ${meta.packages.length} packages`);
for (const pkg of meta.packages.filter(filter)) {
for (const pkg of meta.packages) {
if (pkg.manifest_path.startsWith(this.root)) {
continue;
}
const targets = pkg.targets.filter((t) => t.kind.some((kind) => SAVE_TARGETS.has(kind))).map((t) => t.name);
packages.push({ name: pkg.name, version: pkg.version, targets, path: path.dirname(pkg.manifest_path) });
}
} catch (err) {
console.error(err);
}
} catch {}
return packages;
}
public async getPackagesOutsideWorkspaceRoot(): Promise<Packages> {
return await this.getPackages((pkg) => !pkg.manifest_path.startsWith(this.root));
}
public async getWorkspaceMembers(): Promise<Packages> {
return await this.getPackages((_) => true, "--no-deps");
}
}
export interface PackageDefinition {

1411
tests/Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@ -6,12 +6,5 @@ authors = ["Arpad Borsos <arpad.borsos@googlemail.com>"]
edition = "2021"
[dependencies]
reqwest = "0.12.1"
jsonpath_lib_polars_vendor = "0.0.1"
watto = { git = "https://github.com/getsentry/watto", rev = "39ccb9add289c1f23c89f40506f4a80b2f4011b9", features = ["strings"] }
[dev-dependencies]
trybuild = "1"
[target.'cfg(not(target_env = "msvc"))'.dependencies]
tikv-jemallocator = "0.6.0"
reqwest = "0.11.11"
watto = { git = "https://github.com/getsentry/watto", rev = "d71c8218506bddba102a124a460d64da25e303dc", features = ["strings"] }

View File

@ -1,2 +0,0 @@
the `rust-toolchain` directory will be globbed,
and should not lead to any errors down the road

View File

@ -1,10 +1,3 @@
#[cfg(not(target_env = "msvc"))]
use tikv_jemallocator::Jemalloc;
#[cfg(not(target_env = "msvc"))]
#[global_allocator]
static GLOBAL: Jemalloc = Jemalloc;
fn main() {
println!("Hello, world!");
}

View File

@ -1,6 +0,0 @@
#[test]
fn test_trybuild() {
let t = trybuild::TestCases::new();
t.pass("tests/trybuild/empty_main.rs");
t.compile_fail("tests/trybuild/fail_to_compile.rs");
}

View File

@ -1 +0,0 @@
fn main() {}

View File

@ -1,3 +0,0 @@
fn main() {
"foobar".foobar();
}

View File

@ -1,5 +0,0 @@
error[E0599]: no method named `foobar` found for reference `&'static str` in the current scope
--> tests/trybuild/fail_to_compile.rs:2:14
|
2 | "foobar".foobar();
| ^^^^^^ method not found in `&'static str`

File diff suppressed because it is too large Load Diff

View File

@ -1,5 +1,4 @@
[workspace]
resolver = "2"
members = [
"crates/one",
"crates/two",

View File

@ -5,7 +5,7 @@ version = "0.1.0"
edition = "2021"
[dependencies]
reqwest = "0.12"
reqwest = "0.11.0"
async-std = "1"
tracing = "0.1"
tracing-futures = "0.2"

View File

@ -5,4 +5,4 @@ version = "0.1.0"
edition = "2021"
[dependencies]
clap = "4"
clap = "3"