Compare commits

..

No commits in common. "master" and "v2.4.0" have entirely different histories.

38 changed files with 70917 additions and 237333 deletions

View File

@ -1,50 +0,0 @@
# https://docs.github.com/en/code-security/dependabot/dependabot-version-updates/optimizing-pr-creation-version-updates#setting-up-a-cooldown-period-for-dependency-updates
version: 2
updates:
- package-ecosystem: cargo
directories:
- tests
- tests/wasm-workspace
schedule:
interval: weekly
# https://docs.github.com/en/code-security/dependabot/working-with-dependabot/dependabot-options-reference#groups--
# 1 PR per week and group
groups:
cargo-major:
update-types: ["major"]
cargo-minor:
update-types: ["minor"]
cargo-patch:
update-types: ["patch"]
- package-ecosystem: github-actions
directory: /
schedule:
interval: weekly
groups:
actions:
# Combine all images of the last week
patterns: ["*"]
- package-ecosystem: npm
directory: /
schedule:
interval: weekly
groups:
prd-major:
dependency-type: "production"
update-types: ["major"]
prd-minor:
dependency-type: "production"
update-types: ["minor"]
prd-patch:
dependency-type: "production"
update-types: ["patch"]
dev-major:
dependency-type: "development"
update-types: ["major"]
dev-minor:
dependency-type: "development"
update-types: ["minor"]
dev-patch:
dependency-type: "development"
update-types: ["patch"]

View File

@ -1,33 +0,0 @@
name: buildjet
on: [push, pull_request]
jobs:
buildjet:
if: github.repository == 'Swatinem/rust-cache'
strategy:
fail-fast: false
matrix:
os: [ubuntu-latest, macos-latest, windows-latest]
name: Test buildjet provider on ${{ matrix.os }}
runs-on: ${{ matrix.os }}
env:
CARGO_TERM_COLOR: always
steps:
- uses: actions/checkout@v5
- run: rustup toolchain install stable --profile minimal --no-self-update
- uses: ./
with:
workspaces: tests
cache-provider: buildjet
- run: |
cargo check
cargo test
cargo build --release
working-directory: tests

View File

@ -5,23 +5,22 @@ on:
branches:
- master
paths-ignore:
- "**.md"
- '**.md'
pull_request:
paths-ignore:
- "**.md"
- '**.md'
workflow_dispatch:
jobs:
check-dist:
if: github.repository == 'Swatinem/rust-cache'
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v5
- uses: actions/checkout@v3
- name: Setup Node.js 20.x
uses: actions/setup-node@v4
- name: Setup Node.js 16.x
uses: actions/setup-node@v3
with:
node-version: 20.x
node-version: 16.x
cache: npm
- name: Install dependencies
@ -39,7 +38,7 @@ jobs:
fi
id: diff
- uses: actions/upload-artifact@v4
- uses: actions/upload-artifact@v3
if: ${{ failure() && steps.diff.conclusion == 'failure' }}
with:
name: dist

View File

@ -4,7 +4,6 @@ on: [push, pull_request]
jobs:
coverage:
if: github.repository == 'Swatinem/rust-cache'
strategy:
fail-fast: false
matrix:
@ -17,9 +16,13 @@ jobs:
CARGO_TERM_COLOR: always
steps:
- uses: actions/checkout@v5
- uses: actions/checkout@v3
- run: rustup toolchain install stable --profile minimal --component llvm-tools-preview --no-self-update
# When rustup is updated, it tries to replace its binary, which on Windows is somehow locked.
# This can result in the CI failure, see: https://github.com/rust-lang/rustup/issues/3029
- run: |
rustup set auto-self-update disable
rustup toolchain install stable --profile minimal --component llvm-tools-preview
- uses: taiki-e/install-action@cargo-llvm-cov

View File

@ -1,25 +0,0 @@
# https://docs.github.com/en/code-security/dependabot/working-with-dependabot/automating-dependabot-with-github-actions#enabling-automerge-on-a-pull-request
name: Dependabot Automation
on: pull_request
permissions:
contents: write
pull-requests: write
jobs:
automerge:
runs-on: ubuntu-latest
if: github.event.pull_request.user.login == 'dependabot[bot]' && github.repository == 'Swatinem/rust-cache'
steps:
- name: Fetch metadata
id: metadata
uses: dependabot/fetch-metadata@v2
with:
github-token: "${{ secrets.GITHUB_TOKEN }}"
- name: Auto-merge Patch PRs
if: steps.metadata.outputs.update-type == 'version-update:semver-patch'
run: gh pr merge --auto --merge "$PR_URL"
env:
PR_URL: ${{github.event.pull_request.html_url}}
GH_TOKEN: ${{secrets.GITHUB_TOKEN}}

View File

@ -1,32 +0,0 @@
name: git-registry
on: [push, pull_request]
jobs:
git-registry:
if: github.repository == 'Swatinem/rust-cache'
strategy:
fail-fast: false
matrix:
os: [ubuntu-latest, macos-latest, windows-latest]
name: Test cargo "git" registry on ${{ matrix.os }}
runs-on: ${{ matrix.os }}
env:
CARGO_TERM_COLOR: always
CARGO_REGISTRIES_CRATES_IO_PROTOCOL: git
steps:
- uses: actions/checkout@v5
- run: rustup toolchain install stable --profile minimal --no-self-update
- uses: ./
with:
workspaces: tests
- run: |
cargo check
cargo test
working-directory: tests

View File

@ -0,0 +1,23 @@
name: globbed-directories
on: [push, pull_request]
jobs:
simple:
strategy:
fail-fast: false
matrix:
os: [ubuntu-latest]
name: Test `cargo check/test` on ${{ matrix.os }}
runs-on: ${{ matrix.os }}
env:
CARGO_TERM_COLOR: always
steps:
- uses: actions/checkout@v3
- name: Create folder that will be globbed
run: |
mkdir -p folder/rust-toolchain
- uses: ./

View File

@ -4,7 +4,6 @@ on: [push, pull_request]
jobs:
install:
if: github.repository == 'Swatinem/rust-cache'
strategy:
fail-fast: false
matrix:
@ -17,9 +16,13 @@ jobs:
CARGO_TERM_COLOR: always
steps:
- uses: actions/checkout@v5
- uses: actions/checkout@v3
- run: rustup toolchain install stable --profile minimal --no-self-update
# When rustup is updated, it tries to replace its binary, which on Windows is somehow locked.
# This can result in the CI failure, see: https://github.com/rust-lang/rustup/issues/3029
- run: |
rustup set auto-self-update disable
rustup toolchain install stable --profile minimal
- uses: ./

View File

@ -4,22 +4,25 @@ on: [push, pull_request]
jobs:
simple:
if: github.repository == 'Swatinem/rust-cache'
strategy:
fail-fast: false
matrix:
os: [ubuntu-latest, macos-latest, windows-latest]
name: Test `cargo check/test/build` on ${{ matrix.os }}
name: Test `cargo check/test` on ${{ matrix.os }}
runs-on: ${{ matrix.os }}
env:
CARGO_TERM_COLOR: always
steps:
- uses: actions/checkout@v5
- uses: actions/checkout@v3
- run: rustup toolchain install stable --profile minimal --no-self-update
# When rustup is updated, it tries to replace its binary, which on Windows is somehow locked.
# This can result in the CI failure, see: https://github.com/rust-lang/rustup/issues/3029
- run: |
rustup set auto-self-update disable
rustup toolchain install stable --profile minimal
- uses: ./
with:
@ -28,5 +31,4 @@ jobs:
- run: |
cargo check
cargo test
cargo build --release
working-directory: tests

36
.github/workflows/sparse-registry.yml vendored Normal file
View File

@ -0,0 +1,36 @@
name: sparse-registry
on: [push, pull_request]
jobs:
sparse-registry:
strategy:
fail-fast: false
matrix:
os: [ubuntu-latest, macos-latest, windows-latest]
name: Test `cargo check/test` with sparse registry on ${{ matrix.os }}
runs-on: ${{ matrix.os }}
env:
CARGO_TERM_COLOR: always
CARGO_UNSTABLE_SPARSE_REGISTRY: true
steps:
- uses: actions/checkout@v3
# When rustup is updated, it tries to replace its binary, which on Windows is somehow locked.
# This can result in the CI failure, see: https://github.com/rust-lang/rustup/issues/3029
- run: |
rustup set auto-self-update disable
rustup toolchain install nightly --profile minimal
rustup default nightly
- uses: ./
with:
workspaces: tests
- run: |
cargo check
cargo test
working-directory: tests

View File

@ -4,7 +4,6 @@ on: [push, pull_request]
jobs:
target-dir:
if: github.repository == 'Swatinem/rust-cache'
strategy:
fail-fast: false
matrix:
@ -17,9 +16,13 @@ jobs:
CARGO_TERM_COLOR: always
steps:
- uses: actions/checkout@v5
- uses: actions/checkout@v3
- run: rustup toolchain install stable --profile minimal --no-self-update
# When rustup is updated, it tries to replace its binary, which on Windows is somehow locked.
# This can result in the CI failure, see: https://github.com/rust-lang/rustup/issues/3029
- run: |
rustup set auto-self-update disable
rustup toolchain install stable --profile minimal
# the `workspaces` option has the format `$workspace -> $target-dir`
# and the `$target-dir` is relative to the `$workspace`.

View File

@ -4,7 +4,6 @@ on: [push, pull_request]
jobs:
workspaces:
if: github.repository == 'Swatinem/rust-cache'
strategy:
fail-fast: false
matrix:
@ -17,9 +16,13 @@ jobs:
CARGO_TERM_COLOR: always
steps:
- uses: actions/checkout@v5
- uses: actions/checkout@v3
- run: rustup toolchain install stable --profile minimal --target wasm32-unknown-unknown --no-self-update
# When rustup is updated, it tries to replace its binary, which on Windows is somehow locked.
# This can result in the CI failure, see: https://github.com/rust-lang/rustup/issues/3029
- run: |
rustup set auto-self-update disable
rustup toolchain install stable --profile minimal --target wasm32-unknown-unknown
- uses: ./
with:
@ -27,10 +30,8 @@ jobs:
tests
tests/wasm-workspace
- name: cargo check (tests)
- run: cargo check
working-directory: tests
run: cargo check
- name: cargo check (tests/wasm-workspace)
- run: cargo check
working-directory: tests/wasm-workspace
run: cargo check

View File

@ -1,69 +1,5 @@
# Changelog
## 2.8.0
- Add support for `warpbuild` cache provider
- Add new `cache-workspace-crates` feature
## 2.7.8
- Include CPU arch in the cache key
## 2.7.7
- Also cache `cargo install` metadata
## 2.7.6
- Allow opting out of caching $CARGO_HOME/bin
- Add runner OS in cache key
- Adds an option to do lookup-only of the cache
## 2.7.5
- Support Cargo.lock format cargo-lock v4
- Only run macOsWorkaround() on macOS
## 2.7.3
- Work around upstream problem that causes cache saving to hang for minutes.
## 2.7.2
- Only key by `Cargo.toml` and `Cargo.lock` files of workspace members.
## 2.7.1
- Update toml parser to fix parsing errors.
## 2.7.0
- Properly cache `trybuild` tests.
## 2.6.2
- Fix `toml` parsing.
## 2.6.1
- Fix hash contributions of `Cargo.lock`/`Cargo.toml` files.
## 2.6.0
- Add "buildjet" as a second `cache-provider` backend.
- Clean up sparse registry index.
- Do not clean up src of `-sys` crates.
- Remove `.cargo/credentials.toml` before saving.
## 2.5.1
- Fix hash contribution of `Cargo.lock`.
## 2.5.0
- feat: Rm workspace crates version before caching.
- feat: Add hash of `.cargo/config.toml` to key.
## 2.4.0
- Fix cache key stability.

View File

@ -6,7 +6,7 @@ sensible defaults.
## Example usage
```yaml
- uses: actions/checkout@v5
- uses: actions/checkout@v3
# selecting a toolchain either by action or manual `rustup` calls should happen
# before the plugin, as the cache uses the current rustc version as its cache key
@ -60,35 +60,11 @@ sensible defaults.
# default: "false"
cache-all-crates: ""
# Similar to cache-all-crates.
# If `true` the workspace crates will be cached.
# Useful if the workspace contains libraries that are only updated sporadically.
# default: "false"
cache-workspace-crates: ""
# Determines whether the cache should be saved.
# Determiners whether the cache should be saved.
# If `false`, the cache is only restored.
# Useful for jobs where the matrix is additive e.g. additional Cargo features,
# or when only runs from `master` should be saved to the cache.
# Useful for jobs where the matrix is additive e.g. additional Cargo features.
# default: "true"
save-if: ""
# To only cache runs from `master`:
save-if: ${{ github.ref == 'refs/heads/master' }}
# Determines whether the cache should be restored.
# If `true` the cache key will be checked and the `cache-hit` output will be set
# but the cache itself won't be restored
# default: "false"
lookup-only: ""
# Specifies what to use as the backend providing cache
# Can be set to "github", "buildjet", or "warpbuild"
# default: "github"
cache-provider: ""
# Determines whether to cache the ~/.cargo/bin directory.
# default: "true"
cache-bin: ""
```
Further examples are available in the [.github/workflows](./.github/workflows/) directory.
@ -109,8 +85,7 @@ repositories with only a `Cargo.toml` file have limited benefits, as cargo will
_always_ use the most up-to-date dependency versions, which may not be cached.
Usage with Stable Rust is most effective, as a cache is tied to the Rust version.
Using it with Nightly Rust is less effective as it will throw away the cache every day,
unless a specific nightly build is being pinned.
Using it with Nightly Rust is less effective as it will throw away the cache every day.
## Cache Details
@ -126,7 +101,6 @@ This cache is automatically keyed by:
- the value of some compiler-specific environment variables (eg. RUSTFLAGS, etc), and
- a hash of all `Cargo.lock` / `Cargo.toml` files found anywhere in the repository (if present).
- a hash of all `rust-toolchain` / `rust-toolchain.toml` files in the root of the repository (if present).
- a hash of all `.cargo/config.toml` files in the root of the repository (if present).
An additional input `key` can be provided if the builtin keys are not sufficient.
@ -185,7 +159,4 @@ to see those details as well as further details related to caching operations.
## Known issues
- The cache cleaning process currently removes all the files from `~/.cargo/bin`
that were present before the action ran (for example `rustc`), by default.
This can be an issue on long-running self-hosted runners, where such state
is expected to be preserved across runs. You can work around this by setting
`cache-bin: "false"`.
that were present before the action ran (for example `rustc`).

View File

@ -1,3 +1,4 @@
- better .cargo/bin handling:
- get a list of all the files on "pre"/"restore"
- move the files out of the way on "post"/"save" and move them back afterwards
- properly clean sparse registry

View File

@ -32,31 +32,15 @@ inputs:
description: "Determines which crates are cached. If `true` all crates will be cached, otherwise only dependent crates will be cached."
required: false
default: "false"
cache-workspace-crates:
description: "Similar to cache-all-crates. If `true` the workspace crates will be cached."
required: false
default: "false"
save-if:
description: "Determiners whether the cache should be saved. If `false`, the cache is only restored."
required: false
default: "true"
cache-provider:
description: "Determines which provider to use for caching. Options are github, buildjet, or warpbuild. Defaults to github."
required: false
default: "github"
cache-bin:
description: "Determines whether to cache ${CARGO_HOME}/bin."
required: false
default: "true"
lookup-only:
description: "Check if a cache entry exists without downloading the cache"
required: false
default: "false"
outputs:
cache-hit:
description: "A boolean value that indicates an exact match was found."
runs:
using: "node20"
using: "node16"
main: "dist/restore/index.js"
post: "dist/save/index.js"
post-if: "success() || env.CACHE_ON_FAILURE == 'true'"

150870
dist/restore/index.js vendored

File diff suppressed because one or more lines are too long

150880
dist/save/index.js vendored

File diff suppressed because one or more lines are too long

1943
package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@ -1,7 +1,7 @@
{
"private": true,
"name": "rust-cache",
"version": "2.8.0",
"version": "2.4.0",
"description": "A GitHub Action that implements smart caching for rust/cargo projects with sensible defaults.",
"keywords": [
"actions",
@ -22,20 +22,16 @@
},
"homepage": "https://github.com/Swatinem/rust-cache#readme",
"dependencies": {
"@actions/buildjet-cache": "npm:github-actions.cache-buildjet@0.2.0",
"@actions/warpbuild-cache": "npm:github-actions.warp-cache@1.4.7",
"@actions/cache": "^4.0.5",
"@actions/core": "^1.11.1",
"@actions/cache": "^3.2.1",
"@actions/core": "^1.10.0",
"@actions/exec": "^1.1.1",
"@actions/glob": "^0.5.0",
"@actions/io": "^1.1.3",
"smol-toml": "^1.4.2"
"@actions/glob": "^0.4.0",
"@actions/io": "^1.1.3"
},
"devDependencies": {
"@types/node": "^22.16.0",
"@vercel/ncc": "^0.38.3",
"@vercel/ncc": "^0.36.1",
"linefix": "^0.1.1",
"typescript": "5.8.3"
"typescript": "5.0.4"
},
"scripts": {
"prepare": "ncc build --target es2020 -o dist/restore src/restore.ts && ncc build --target es2020 -o dist/save src/save.ts && linefix dist"

View File

@ -4,7 +4,6 @@ import fs from "fs";
import path from "path";
import { CARGO_HOME } from "./config";
import { exists } from "./utils";
import { Packages } from "./workspace";
export async function cleanTargetDir(targetDir: string, packages: Packages, checkTimestamp = false) {
@ -35,26 +34,6 @@ export async function cleanTargetDir(targetDir: string, packages: Packages, chec
async function cleanProfileTarget(profileDir: string, packages: Packages, checkTimestamp = false) {
core.debug(`cleaning profile directory "${profileDir}"`);
// Quite a few testing utility crates store compilation artifacts as nested
// workspaces under `target/tests`. Notably, `target/tests/target` and
// `target/tests/trybuild`.
if (path.basename(profileDir) === "tests") {
try {
// https://github.com/vertexclique/kaos/blob/9876f6c890339741cc5be4b7cb9df72baa5a6d79/src/cargo.rs#L25
// https://github.com/eupn/macrotest/blob/c4151a5f9f545942f4971980b5d264ebcd0b1d11/src/cargo.rs#L27
cleanTargetDir(path.join(profileDir, "target"), packages, checkTimestamp);
} catch {}
try {
// https://github.com/dtolnay/trybuild/blob/eec8ca6cb9b8f53d0caf1aa499d99df52cae8b40/src/cargo.rs#L50
cleanTargetDir(path.join(profileDir, "trybuild"), packages, checkTimestamp);
} catch {}
// Delete everything else.
await rmExcept(profileDir, new Set(["target", "trybuild"]), checkTimestamp);
return;
}
let keepProfile = new Set(["build", ".fingerprint", "deps"]);
await rmExcept(profileDir, keepProfile);
@ -112,15 +91,11 @@ export async function cleanBin(oldBins: Array<string>) {
}
export async function cleanRegistry(packages: Packages, crates = true) {
// remove `.cargo/credentials.toml`
try {
const credentials = path.join(CARGO_HOME, ".cargo", "credentials.toml");
core.debug(`deleting "${credentials}"`);
await fs.promises.unlink(credentials);
} catch {}
// `.cargo/registry/src`
// we can remove this completely, as cargo will recreate this from `cache`
await rmRF(path.join(CARGO_HOME, "registry", "src"));
// `.cargo/registry/index`
let pkgSet = new Set(packages.map((p) => p.name));
const indexDir = await fs.promises.opendir(path.join(CARGO_HOME, "registry", "index"));
for await (const dirent of indexDir) {
if (dirent.isDirectory()) {
@ -131,38 +106,19 @@ export async function cleanRegistry(packages: Packages, crates = true) {
// for a git registry, we can remove `.cache`, as cargo will recreate it from git
if (await exists(path.join(dirPath, ".git"))) {
await rmRF(path.join(dirPath, ".cache"));
} else {
await cleanRegistryIndexCache(dirPath, pkgSet);
}
// TODO: else, clean `.cache` based on the `packages`
}
}
if (!crates) {
core.debug("skipping registry cache and src cleanup");
core.debug(`skipping crate cleanup`);
return;
}
// `.cargo/registry/src`
// Cargo usually re-creates these from the `.crate` cache below,
// but for some reason that does not work for `-sys` crates that check timestamps
// to decide if rebuilds are necessary.
pkgSet = new Set(packages.filter((p) => p.name.endsWith("-sys")).map((p) => `${p.name}-${p.version}`));
const srcDir = await fs.promises.opendir(path.join(CARGO_HOME, "registry", "src"));
for await (const dirent of srcDir) {
if (dirent.isDirectory()) {
// eg `.cargo/registry/src/github.com-1ecc6299db9ec823`
// or `.cargo/registry/src/index.crates.io-e139d0d48fed7772`
const dir = await fs.promises.opendir(path.join(srcDir.path, dirent.name));
for await (const dirent of dir) {
if (dirent.isDirectory() && !pkgSet.has(dirent.name)) {
await rmRF(path.join(dir.path, dirent.name));
}
}
}
}
const pkgSet = new Set(packages.map((p) => `${p.name}-${p.version}.crate`));
// `.cargo/registry/cache`
pkgSet = new Set(packages.map((p) => `${p.name}-${p.version}.crate`));
const cacheDir = await fs.promises.opendir(path.join(CARGO_HOME, "registry", "cache"));
for await (const dirent of cacheDir) {
if (dirent.isDirectory()) {
@ -179,28 +135,6 @@ export async function cleanRegistry(packages: Packages, crates = true) {
}
}
/// Recursively walks and cleans the index `.cache`
async function cleanRegistryIndexCache(dirName: string, keepPkg: Set<string>) {
let dirIsEmpty = true;
const cacheDir = await fs.promises.opendir(dirName);
for await (const dirent of cacheDir) {
if (dirent.isDirectory()) {
if (await cleanRegistryIndexCache(path.join(dirName, dirent.name), keepPkg)) {
await rm(dirName, dirent);
} else {
dirIsEmpty &&= false;
}
} else {
if (keepPkg.has(dirent.name)) {
dirIsEmpty &&= false;
} else {
await rm(dirName, dirent);
}
}
}
return dirIsEmpty;
}
export async function cleanGit(packages: Packages) {
const coPath = path.join(CARGO_HOME, "git", "checkouts");
const dbPath = path.join(CARGO_HOME, "git", "db");
@ -309,3 +243,12 @@ async function rmRF(dirName: string) {
core.debug(`deleting "${dirName}"`);
await io.rmRF(dirName);
}
async function exists(path: string) {
try {
await fs.promises.access(path);
return true;
} catch {
return false;
}
}

View File

@ -2,14 +2,12 @@ import * as core from "@actions/core";
import * as glob from "@actions/glob";
import crypto from "crypto";
import fs from "fs";
import fs_promises from "fs/promises";
import os from "os";
import path from "path";
import * as toml from "smol-toml";
import { getCargoBins } from "./cleanup";
import { CacheProvider, exists, getCmdOutput } from "./utils";
import { getCmdOutput } from "./utils";
import { Workspace } from "./workspace";
import { getCargoBins } from "./cleanup";
const HOME = os.homedir();
export const CARGO_HOME = process.env.CARGO_HOME || path.join(HOME, ".cargo");
@ -25,9 +23,6 @@ export class CacheConfig {
/** The secondary (restore) key that only contains the prefix and environment */
public restoreKey = "";
/** Whether to cache CARGO_HOME/.bin */
public cacheBin: boolean = true;
/** The workspace configurations */
public workspaces: Array<Workspace> = [];
@ -74,11 +69,6 @@ export class CacheConfig {
}
}
// Add runner OS and CPU architecture to the key to avoid cross-contamination of cache
const runnerOS = os.type();
const runnerArch = os.arch();
key += `-${runnerOS}-${runnerArch}`;
self.keyPrefix = key;
// Construct environment portion of the key:
@ -124,8 +114,6 @@ export class CacheConfig {
// This considers all the files found via globbing for various manifests
// and lockfiles.
self.cacheBin = core.getInput("cache-bin").toLowerCase() == "true";
// Constructs the workspace config and paths to restore:
// The workspaces are given using a `$workspace -> $target` syntax.
@ -139,122 +127,32 @@ export class CacheConfig {
}
self.workspaces = workspaces;
let keyFiles = await globFiles(".cargo/config.toml\nrust-toolchain\nrust-toolchain.toml");
const parsedKeyFiles = []; // keyFiles that are parsed, pre-processed and hashed
hasher = crypto.createHash("sha1");
let keyFiles = await globFiles("rust-toolchain\nrust-toolchain.toml");
for (const workspace of workspaces) {
const root = workspace.root;
keyFiles.push(
...(await globFiles(
`${root}/**/.cargo/config.toml\n${root}/**/rust-toolchain\n${root}/**/rust-toolchain.toml`,
`${root}/**/Cargo.toml\n${root}/**/Cargo.lock\n${root}/**/rust-toolchain\n${root}/**/rust-toolchain.toml`,
)),
);
const workspaceMembers = await workspace.getWorkspaceMembers();
const cargo_manifests = sort_and_uniq(workspaceMembers.map((member) => path.join(member.path, "Cargo.toml")));
for (const cargo_manifest of cargo_manifests) {
try {
const content = await fs_promises.readFile(cargo_manifest, { encoding: "utf8" });
// Use any since TomlPrimitive is not exposed
const parsed = toml.parse(content) as { [key: string]: any };
if ("package" in parsed) {
const pack = parsed.package;
if ("version" in pack) {
pack["version"] = "0.0.0";
}
}
keyFiles = keyFiles.filter(file => !fs.statSync(file).isDirectory());
keyFiles.sort((a, b) => a.localeCompare(b));
for (const prefix of ["", "build-", "dev-"]) {
const section_name = `${prefix}dependencies`;
if (!(section_name in parsed)) {
continue;
}
const deps = parsed[section_name];
for (const key of Object.keys(deps)) {
const dep = deps[key];
try {
if ("path" in dep) {
dep.version = "0.0.0";
dep.path = "";
}
} catch (_e) {
// Not an object, probably a string (version),
// continue.
continue;
}
}
}
hasher.update(JSON.stringify(parsed));
parsedKeyFiles.push(cargo_manifest);
} catch (e) {
// Fallback to caching them as regular file
core.warning(`Error parsing Cargo.toml manifest, fallback to caching entire file: ${e}`);
keyFiles.push(cargo_manifest);
}
}
const cargo_lock = path.join(workspace.root, "Cargo.lock");
if (await exists(cargo_lock)) {
try {
const content = await fs_promises.readFile(cargo_lock, { encoding: "utf8" });
const parsed = toml.parse(content);
if ((parsed.version !== 3 && parsed.version !== 4) || !("package" in parsed)) {
// Fallback to caching them as regular file since this action
// can only handle Cargo.lock format version 3
core.warning("Unsupported Cargo.lock format, fallback to caching entire file");
keyFiles.push(cargo_lock);
continue;
}
// Package without `[[package]].source` and `[[package]].checksum`
// are the one with `path = "..."` to crates within the workspace.
const packages = (parsed.package as any[]).filter((p: any) => "source" in p || "checksum" in p);
hasher.update(JSON.stringify(packages));
parsedKeyFiles.push(cargo_lock);
} catch (e) {
// Fallback to caching them as regular file
core.warning(`Error parsing Cargo.lock manifest, fallback to caching entire file: ${e}`);
keyFiles.push(cargo_lock);
}
}
}
keyFiles = sort_and_uniq(keyFiles);
hasher = crypto.createHash("sha1");
for (const file of keyFiles) {
for await (const chunk of fs.createReadStream(file)) {
hasher.update(chunk);
}
}
let lockHash = digest(hasher);
keyFiles.push(...parsedKeyFiles);
self.keyFiles = sort_and_uniq(keyFiles);
self.keyFiles = keyFiles;
key += `-${lockHash}`;
self.cacheKey = key;
self.cachePaths = [path.join(CARGO_HOME, "registry"), path.join(CARGO_HOME, "git")];
if (self.cacheBin) {
self.cachePaths = [
path.join(CARGO_HOME, "bin"),
path.join(CARGO_HOME, ".crates.toml"),
path.join(CARGO_HOME, ".crates2.json"),
...self.cachePaths,
];
}
self.cachePaths = [CARGO_HOME];
const cacheTargets = core.getInput("cache-targets").toLowerCase() || "true";
if (cacheTargets === "true") {
self.cachePaths.push(...workspaces.map((ws) => ws.target));
@ -287,7 +185,8 @@ export class CacheConfig {
const self = new CacheConfig();
Object.assign(self, JSON.parse(source));
self.workspaces = self.workspaces.map((w: any) => new Workspace(w.root, w.target));
self.workspaces = self.workspaces
.map((w: any) => new Workspace(w.root, w.target));
return self;
}
@ -295,10 +194,8 @@ export class CacheConfig {
/**
* Prints the configuration to the action log.
*/
printInfo(cacheProvider: CacheProvider) {
printInfo() {
core.startGroup("Cache Configuration");
core.info(`Cache Provider:`);
core.info(` ${cacheProvider.name}`);
core.info(`Workspaces:`);
for (const workspace of this.workspaces) {
core.info(` ${workspace.root}`);
@ -373,25 +270,5 @@ async function globFiles(pattern: string): Promise<string[]> {
const globber = await glob.create(pattern, {
followSymbolicLinks: false,
});
// fs.statSync resolve the symbolic link and returns stat for the
// file it pointed to, so isFile would make sure the resolved
// file is actually a regular file.
return (await globber.glob()).filter((file) => fs.statSync(file).isFile());
}
function sort_and_uniq(a: string[]) {
return a
.sort((a, b) => a.localeCompare(b))
.reduce((accumulator: string[], currentValue: string) => {
const len = accumulator.length;
// If accumulator is empty or its last element != currentValue
// Since array is already sorted, elements with the same value
// are grouped together to be continugous in space.
//
// If currentValue != last element, then it must be unique.
if (len == 0 || accumulator[len - 1].localeCompare(currentValue) != 0) {
accumulator.push(currentValue);
}
return accumulator;
}, []);
return await globber.glob();
}

View File

@ -1,8 +1,8 @@
import * as cache from "@actions/cache";
import * as core from "@actions/core";
import { cleanTargetDir } from "./cleanup";
import { CacheConfig } from "./config";
import { getCacheProvider, reportError } from "./utils";
process.on("uncaughtException", (e) => {
core.error(e.message);
@ -12,9 +12,7 @@ process.on("uncaughtException", (e) => {
});
async function run() {
const cacheProvider = getCacheProvider();
if (!cacheProvider.cache.isFeatureAvailable()) {
if (!cache.isFeatureAvailable()) {
setCacheHitOutput(false);
return;
}
@ -24,26 +22,22 @@ async function run() {
if (cacheOnFailure !== "true") {
cacheOnFailure = "false";
}
var lookupOnly = core.getInput("lookup-only").toLowerCase() === "true";
core.exportVariable("CACHE_ON_FAILURE", cacheOnFailure);
core.exportVariable("CARGO_INCREMENTAL", 0);
const config = await CacheConfig.new();
config.printInfo(cacheProvider);
config.printInfo();
core.info("");
core.info(`... ${lookupOnly ? "Checking" : "Restoring"} cache ...`);
core.info(`... Restoring cache ...`);
const key = config.cacheKey;
// Pass a copy of cachePaths to avoid mutating the original array as reported by:
// https://github.com/actions/toolkit/pull/1378
// TODO: remove this once the underlying bug is fixed.
const restoreKey = await cacheProvider.cache.restoreCache(config.cachePaths.slice(), key, [config.restoreKey], {
lookupOnly,
});
const restoreKey = await cache.restoreCache(config.cachePaths.slice(), key, [config.restoreKey]);
if (restoreKey) {
const match = restoreKey === key;
core.info(`${lookupOnly ? "Found" : "Restored from"} cache key "${restoreKey}" full match: ${match}.`);
core.info(`Restored from cache key "${restoreKey}" full match: ${match}.`);
if (!match) {
// pre-clean the target directory on cache mismatch
for (const workspace of config.workspaces) {
@ -66,9 +60,8 @@ async function run() {
} catch (e) {
setCacheHitOutput(false);
reportError(e);
core.error(`${(e as any).stack}`);
}
process.exit();
}
function setCacheHitOutput(cacheHit: boolean): void {

View File

@ -1,9 +1,9 @@
import * as cache from "@actions/cache";
import * as core from "@actions/core";
import * as exec from "@actions/exec";
import { cleanBin, cleanGit, cleanRegistry, cleanTargetDir } from "./cleanup";
import { CacheConfig, isCacheUpToDate } from "./config";
import { getCacheProvider, reportError } from "./utils";
process.on("uncaughtException", (e) => {
core.error(e.message);
@ -13,11 +13,9 @@ process.on("uncaughtException", (e) => {
});
async function run() {
const cacheProvider = getCacheProvider();
const save = core.getInput("save-if").toLowerCase() || "true";
if (!(cacheProvider.cache.isFeatureAvailable() && save === "true")) {
if (!(cache.isFeatureAvailable() && save === "true")) {
return;
}
@ -28,64 +26,54 @@ async function run() {
}
const config = CacheConfig.fromState();
config.printInfo(cacheProvider);
config.printInfo();
core.info("");
// TODO: remove this once https://github.com/actions/toolkit/pull/553 lands
if (process.env["RUNNER_OS"] == "macOS") {
await macOsWorkaround();
}
const workspaceCrates = core.getInput("cache-workspace-crates").toLowerCase() || "false";
const allPackages = [];
for (const workspace of config.workspaces) {
const packages = await workspace.getPackagesOutsideWorkspaceRoot();
if (workspaceCrates === "true") {
const wsMembers = await workspace.getWorkspaceMembers();
packages.push(...wsMembers);
}
const packages = await workspace.getPackages();
allPackages.push(...packages);
try {
core.info(`... Cleaning ${workspace.target} ...`);
await cleanTargetDir(workspace.target, packages);
} catch (e) {
core.debug(`${(e as any).stack}`);
core.error(`${(e as any).stack}`);
}
}
try {
const crates = core.getInput("cache-all-crates").toLowerCase() || "false";
core.info(`... Cleaning cargo registry (cache-all-crates: ${crates}) ...`);
const crates = core.getInput("cache-all-crates").toLowerCase() || "false"
core.info(`... Cleaning cargo registry cache-all-crates: ${crates} ...`);
await cleanRegistry(allPackages, crates !== "true");
} catch (e) {
core.debug(`${(e as any).stack}`);
core.error(`${(e as any).stack}`);
}
if (config.cacheBin) {
try {
core.info(`... Cleaning cargo/bin ...`);
await cleanBin(config.cargoBins);
} catch (e) {
core.debug(`${(e as any).stack}`);
}
core.error(`${(e as any).stack}`);
}
try {
core.info(`... Cleaning cargo git cache ...`);
await cleanGit(allPackages);
} catch (e) {
core.debug(`${(e as any).stack}`);
core.error(`${(e as any).stack}`);
}
core.info(`... Saving cache ...`);
// Pass a copy of cachePaths to avoid mutating the original array as reported by:
// https://github.com/actions/toolkit/pull/1378
// TODO: remove this once the underlying bug is fixed.
await cacheProvider.cache.saveCache(config.cachePaths.slice(), config.cacheKey);
await cache.saveCache(config.cachePaths.slice(), config.cacheKey);
} catch (e) {
reportError(e);
core.error(`${(e as any).stack}`);
}
process.exit();
}
run();

View File

@ -1,19 +1,5 @@
import * as core from "@actions/core";
import * as exec from "@actions/exec";
import * as buildjetCache from "@actions/buildjet-cache";
import * as warpbuildCache from "@actions/warpbuild-cache";
import * as ghCache from "@actions/cache";
import fs from "fs";
export function reportError(e: any) {
const { commandFailed } = e;
if (commandFailed) {
core.error(`Command failed: ${commandFailed.command}`);
core.error(commandFailed.stderr);
} else {
core.error(`${e.stack}`);
}
}
export async function getCmdOutput(
cmd: string,
@ -36,54 +22,9 @@ export async function getCmdOutput(
...options,
});
} catch (e) {
(e as any).commandFailed = {
command: `${cmd} ${args.join(" ")}`,
stderr,
};
core.error(`Command failed: ${cmd} ${args.join(" ")}`);
core.error(stderr);
throw e;
}
return stdout;
}
export interface GhCache {
isFeatureAvailable: typeof ghCache.isFeatureAvailable;
restoreCache: typeof ghCache.restoreCache;
saveCache: (paths: string[], key: string) => Promise<string | number>;
}
export interface CacheProvider {
name: string;
cache: GhCache;
}
export function getCacheProvider(): CacheProvider {
const cacheProvider = core.getInput("cache-provider");
let cache: GhCache;
switch (cacheProvider) {
case "github":
cache = ghCache;
break;
case "buildjet":
cache = buildjetCache;
break;
case "warpbuild":
cache = warpbuildCache;
break;
default:
throw new Error(`The \`cache-provider\` \`${cacheProvider}\` is not valid.`);
}
return {
name: cacheProvider,
cache: cache,
};
}
export async function exists(path: string) {
try {
await fs.promises.access(path);
return true;
} catch {
return false;
}
}

View File

@ -8,34 +8,26 @@ const SAVE_TARGETS = new Set(["lib", "proc-macro"]);
export class Workspace {
constructor(public root: string, public target: string) {}
async getPackages(filter: (p: Meta["packages"][0]) => boolean, ...extraArgs: string[]): Promise<Packages> {
public async getPackages(): Promise<Packages> {
let packages: Packages = [];
try {
core.debug(`collecting metadata for "${this.root}"`);
const meta: Meta = JSON.parse(
await getCmdOutput("cargo", ["metadata", "--all-features", "--format-version", "1", ...extraArgs], {
await getCmdOutput("cargo", ["metadata", "--all-features", "--format-version", "1"], {
cwd: this.root,
env: { "CARGO_ENCODED_RUSTFLAGS": "" },
}),
);
core.debug(`workspace "${this.root}" has ${meta.packages.length} packages`);
for (const pkg of meta.packages.filter(filter)) {
for (const pkg of meta.packages) {
if (pkg.manifest_path.startsWith(this.root)) {
continue;
}
const targets = pkg.targets.filter((t) => t.kind.some((kind) => SAVE_TARGETS.has(kind))).map((t) => t.name);
packages.push({ name: pkg.name, version: pkg.version, targets, path: path.dirname(pkg.manifest_path) });
}
} catch (err) {
console.error(err);
}
} catch {}
return packages;
}
public async getPackagesOutsideWorkspaceRoot(): Promise<Packages> {
return await this.getPackages((pkg) => !pkg.manifest_path.startsWith(this.root));
}
public async getWorkspaceMembers(): Promise<Packages> {
return await this.getPackages((_) => true, "--no-deps");
}
}
export interface PackageDefinition {

1411
tests/Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@ -6,12 +6,5 @@ authors = ["Arpad Borsos <arpad.borsos@googlemail.com>"]
edition = "2021"
[dependencies]
reqwest = "0.12.1"
jsonpath_lib_polars_vendor = "0.0.1"
watto = { git = "https://github.com/getsentry/watto", rev = "39ccb9add289c1f23c89f40506f4a80b2f4011b9", features = ["strings"] }
[dev-dependencies]
trybuild = "1"
[target.'cfg(not(target_env = "msvc"))'.dependencies]
tikv-jemallocator = "0.6.0"
reqwest = "0.11.11"
watto = { git = "https://github.com/getsentry/watto", rev = "d71c8218506bddba102a124a460d64da25e303dc", features = ["strings"] }

View File

@ -1,2 +0,0 @@
the `rust-toolchain` directory will be globbed,
and should not lead to any errors down the road

View File

@ -1,10 +1,3 @@
#[cfg(not(target_env = "msvc"))]
use tikv_jemallocator::Jemalloc;
#[cfg(not(target_env = "msvc"))]
#[global_allocator]
static GLOBAL: Jemalloc = Jemalloc;
fn main() {
println!("Hello, world!");
}

View File

@ -1,6 +0,0 @@
#[test]
fn test_trybuild() {
let t = trybuild::TestCases::new();
t.pass("tests/trybuild/empty_main.rs");
t.compile_fail("tests/trybuild/fail_to_compile.rs");
}

View File

@ -1 +0,0 @@
fn main() {}

View File

@ -1,3 +0,0 @@
fn main() {
"foobar".foobar();
}

View File

@ -1,5 +0,0 @@
error[E0599]: no method named `foobar` found for reference `&'static str` in the current scope
--> tests/trybuild/fail_to_compile.rs:2:14
|
2 | "foobar".foobar();
| ^^^^^^ method not found in `&'static str`

File diff suppressed because it is too large Load Diff

View File

@ -1,5 +1,4 @@
[workspace]
resolver = "2"
members = [
"crates/one",
"crates/two",

View File

@ -5,7 +5,7 @@ version = "0.1.0"
edition = "2021"
[dependencies]
reqwest = "0.12"
reqwest = "0.11.0"
async-std = "1"
tracing = "0.1"
tracing-futures = "0.2"

View File

@ -5,4 +5,4 @@ version = "0.1.0"
edition = "2021"
[dependencies]
clap = "4"
clap = "3"