mirror of
https://github.com/Swatinem/rust-cache.git
synced 2025-08-14 20:55:13 +00:00
Compare commits
175 Commits
Author | SHA1 | Date | |
---|---|---|---|
|
267a8a94c9 | ||
|
46cb408fba | ||
|
203c9eb82a | ||
|
a8b36e3f74 | ||
|
7e1e2d0a10 | ||
|
98c8021b55 | ||
|
14d3bc39c4 | ||
|
52ea1434f8 | ||
|
eaa85be6b1 | ||
|
901019c0f8 | ||
|
9d47c6ad4b | ||
|
27b8ea9368 | ||
|
f0deed1e0e | ||
|
008623fb83 | ||
|
720f7e45cc | ||
|
4b1f006ad2 | ||
|
e8e63cdbf2 | ||
|
9a2e0d3212 | ||
|
c00f3025ca | ||
|
68b3cb7503 | ||
|
82a92a6e8f | ||
|
598fe25fa1 | ||
|
8f842c2d45 | ||
|
96a8d65dba | ||
|
9bdad043e8 | ||
|
f7a52f6914 | ||
|
2bceda3912 | ||
|
640a22190e | ||
|
1582741630 | ||
|
23bce251a8 | ||
|
378c8285a4 | ||
|
a226033982 | ||
|
d30f1144e8 | ||
|
b1db5f9d5f | ||
|
3cf7f8cc28 | ||
|
e03705e031 | ||
|
b86d1c6caa | ||
|
f27990c89a | ||
|
a95ba19544 | ||
|
82c8487d00 | ||
|
67c46e7159 | ||
|
44b6087283 | ||
|
e207df5d26 | ||
|
decb69d790 | ||
|
ab6b2769d1 | ||
|
578b235f6e | ||
|
5113490c3f | ||
|
c0e052c18c | ||
|
4e0f4b19dd | ||
|
b919e1427f | ||
|
b8a6852b4f | ||
|
80c47cc945 | ||
|
5ec9842c14 | ||
|
3312b3ab47 | ||
|
f6987ea139 | ||
|
e97a782690 | ||
|
b00faf5858 | ||
|
9de8f90afb | ||
|
fd201ad913 | ||
|
cf3f88254c | ||
|
4fedae9bcf | ||
|
1e604afb09 | ||
|
dd05243424 | ||
|
65dbc54a5d | ||
|
be7377e68e | ||
|
2656b87321 | ||
|
715970feed | ||
|
3d4000164d | ||
|
988c164c3d | ||
|
bb80d0f127 | ||
|
ad97570a01 | ||
|
060bda31e0 | ||
|
865fd1f6db | ||
|
7c7e41ab01 | ||
|
68aeeba167 | ||
|
def0926359 | ||
|
827c240e23 | ||
|
5e9fae966f | ||
|
127a0e9568 | ||
|
99229f978c | ||
|
6fd3edff69 | ||
|
a1c019f71a | ||
|
664ce0090f | ||
|
359a70e43a | ||
|
ecee04e7b3 | ||
|
b894d59a8d | ||
|
e78327dd9e | ||
|
ccdddcc049 | ||
|
b5ec9edd91 | ||
|
3f2513fdf4 | ||
|
19c46583c5 | ||
|
b8e72aae83 | ||
|
22c9328bcb | ||
|
d4d463bd9b | ||
|
c4652c677c | ||
|
76686c56f2 | ||
|
1b43d2f2c3 | ||
|
20b9201e8a | ||
|
0d72e5f9a0 | ||
|
86531941c2 | ||
|
be4be3720d | ||
|
213334cd98 | ||
|
b8d4fa4eaf | ||
|
90429b3dea | ||
|
6720f05bc4 | ||
|
5733786579 | ||
|
622616010e | ||
|
0497f9301f | ||
|
7b8626742a | ||
|
911d8e9e55 | ||
|
875be5ce2d | ||
|
07a2ee71bc | ||
|
7c190ef171 | ||
|
fffd6895b2 | ||
|
86bae2494f | ||
|
827b33fbd0 | ||
|
36af5cb1ae | ||
|
cb43a50800 | ||
|
f6e3aa3e12 | ||
|
11be10514b | ||
|
e1846a926d | ||
|
6ed4c28a7c | ||
|
5df06440c6 | ||
|
3fedbbb115 | ||
|
fc84c9d9a3 | ||
|
260a713186 | ||
|
fa61956921 | ||
|
81d053bdb0 | ||
|
5040f39404 | ||
|
2055a01dcd | ||
|
cb2cf0cc7c | ||
|
74e8e24b6d | ||
|
f8f67b7515 | ||
|
5b2b053862 | ||
|
3bb3a9a087 | ||
|
d127014599 | ||
|
801365cd81 | ||
|
c5ed9ba6b7 | ||
|
536c94f32c | ||
|
842ef286ff | ||
|
1b344a0a23 | ||
|
31c41a926e | ||
|
ebd95456c3 | ||
|
3b8bbcb11d | ||
|
f82d41bcc2 | ||
|
063471b9dd | ||
|
ce325b6065 | ||
|
da42bbe56d | ||
|
a9bca6b5a6 | ||
|
b17d52110e | ||
|
b495963495 | ||
|
83aad8d470 | ||
|
958028d559 | ||
|
27793b3b80 | ||
|
be44a3e6ff | ||
|
2639a56bb8 | ||
|
cbcc887094 | ||
|
ae893481e8 | ||
|
d7bda0e369 | ||
|
9c05405335 | ||
|
08d3994b7a | ||
|
9e10a44ea3 | ||
|
fb2efae33d | ||
|
da5df52d2f | ||
|
0eea7b85d4 | ||
|
645c6972a6 | ||
|
6ccf2463db | ||
|
9cc357c650 | ||
|
9de90d2338 | ||
|
292ef23e77 | ||
|
5f6034beb8 | ||
|
b740ae5d3a | ||
|
e8e3c57b3b | ||
|
f77cb1be47 | ||
|
2bcc375de8 |
50
.github/dependabot.yaml
vendored
Normal file
50
.github/dependabot.yaml
vendored
Normal file
@ -0,0 +1,50 @@
|
|||||||
|
# https://docs.github.com/en/code-security/dependabot/dependabot-version-updates/optimizing-pr-creation-version-updates#setting-up-a-cooldown-period-for-dependency-updates
|
||||||
|
|
||||||
|
version: 2
|
||||||
|
updates:
|
||||||
|
- package-ecosystem: cargo
|
||||||
|
directories:
|
||||||
|
- tests
|
||||||
|
- tests/wasm-workspace
|
||||||
|
schedule:
|
||||||
|
interval: weekly
|
||||||
|
# https://docs.github.com/en/code-security/dependabot/working-with-dependabot/dependabot-options-reference#groups--
|
||||||
|
# 1 PR per week and group
|
||||||
|
groups:
|
||||||
|
cargo-major:
|
||||||
|
update-types: ["major"]
|
||||||
|
cargo-minor:
|
||||||
|
update-types: ["minor"]
|
||||||
|
cargo-patch:
|
||||||
|
update-types: ["patch"]
|
||||||
|
- package-ecosystem: github-actions
|
||||||
|
directory: /
|
||||||
|
schedule:
|
||||||
|
interval: weekly
|
||||||
|
groups:
|
||||||
|
actions:
|
||||||
|
# Combine all images of the last week
|
||||||
|
patterns: ["*"]
|
||||||
|
- package-ecosystem: npm
|
||||||
|
directory: /
|
||||||
|
schedule:
|
||||||
|
interval: weekly
|
||||||
|
groups:
|
||||||
|
prd-major:
|
||||||
|
dependency-type: "production"
|
||||||
|
update-types: ["major"]
|
||||||
|
prd-minor:
|
||||||
|
dependency-type: "production"
|
||||||
|
update-types: ["minor"]
|
||||||
|
prd-patch:
|
||||||
|
dependency-type: "production"
|
||||||
|
update-types: ["patch"]
|
||||||
|
dev-major:
|
||||||
|
dependency-type: "development"
|
||||||
|
update-types: ["major"]
|
||||||
|
dev-minor:
|
||||||
|
dependency-type: "development"
|
||||||
|
update-types: ["minor"]
|
||||||
|
dev-patch:
|
||||||
|
dependency-type: "development"
|
||||||
|
update-types: ["patch"]
|
33
.github/workflows/buildjet.yml
vendored
Normal file
33
.github/workflows/buildjet.yml
vendored
Normal file
@ -0,0 +1,33 @@
|
|||||||
|
name: buildjet
|
||||||
|
|
||||||
|
on: [push, pull_request]
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
buildjet:
|
||||||
|
if: github.repository == 'Swatinem/rust-cache'
|
||||||
|
strategy:
|
||||||
|
fail-fast: false
|
||||||
|
matrix:
|
||||||
|
os: [ubuntu-latest, macos-latest, windows-latest]
|
||||||
|
|
||||||
|
name: Test buildjet provider on ${{ matrix.os }}
|
||||||
|
runs-on: ${{ matrix.os }}
|
||||||
|
|
||||||
|
env:
|
||||||
|
CARGO_TERM_COLOR: always
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v5
|
||||||
|
|
||||||
|
- run: rustup toolchain install stable --profile minimal --no-self-update
|
||||||
|
|
||||||
|
- uses: ./
|
||||||
|
with:
|
||||||
|
workspaces: tests
|
||||||
|
cache-provider: buildjet
|
||||||
|
|
||||||
|
- run: |
|
||||||
|
cargo check
|
||||||
|
cargo test
|
||||||
|
cargo build --release
|
||||||
|
working-directory: tests
|
46
.github/workflows/check-dist.yml
vendored
Normal file
46
.github/workflows/check-dist.yml
vendored
Normal file
@ -0,0 +1,46 @@
|
|||||||
|
name: check dist/
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches:
|
||||||
|
- master
|
||||||
|
paths-ignore:
|
||||||
|
- "**.md"
|
||||||
|
pull_request:
|
||||||
|
paths-ignore:
|
||||||
|
- "**.md"
|
||||||
|
workflow_dispatch:
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
check-dist:
|
||||||
|
if: github.repository == 'Swatinem/rust-cache'
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v5
|
||||||
|
|
||||||
|
- name: Setup Node.js 20.x
|
||||||
|
uses: actions/setup-node@v4
|
||||||
|
with:
|
||||||
|
node-version: 20.x
|
||||||
|
cache: npm
|
||||||
|
|
||||||
|
- name: Install dependencies
|
||||||
|
run: npm ci
|
||||||
|
|
||||||
|
- name: Rebuild the dist/ directory
|
||||||
|
run: npm run prepare
|
||||||
|
|
||||||
|
- name: Compare the expected and actual dist/ directories
|
||||||
|
run: |
|
||||||
|
if [ "$(git diff dist/ | wc -l)" -gt "0" ]; then
|
||||||
|
echo "Detected uncommitted changes after build. See status below:"
|
||||||
|
git diff
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
id: diff
|
||||||
|
|
||||||
|
- uses: actions/upload-artifact@v4
|
||||||
|
if: ${{ failure() && steps.diff.conclusion == 'failure' }}
|
||||||
|
with:
|
||||||
|
name: dist
|
||||||
|
path: dist/
|
31
.github/workflows/coverage.yml
vendored
Normal file
31
.github/workflows/coverage.yml
vendored
Normal file
@ -0,0 +1,31 @@
|
|||||||
|
name: coverage
|
||||||
|
|
||||||
|
on: [push, pull_request]
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
coverage:
|
||||||
|
if: github.repository == 'Swatinem/rust-cache'
|
||||||
|
strategy:
|
||||||
|
fail-fast: false
|
||||||
|
matrix:
|
||||||
|
os: [ubuntu-latest, macos-latest, windows-latest]
|
||||||
|
|
||||||
|
name: Test `cargo-llvm-cov` on ${{ matrix.os }}
|
||||||
|
runs-on: ${{ matrix.os }}
|
||||||
|
|
||||||
|
env:
|
||||||
|
CARGO_TERM_COLOR: always
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v5
|
||||||
|
|
||||||
|
- run: rustup toolchain install stable --profile minimal --component llvm-tools-preview --no-self-update
|
||||||
|
|
||||||
|
- uses: taiki-e/install-action@cargo-llvm-cov
|
||||||
|
|
||||||
|
- uses: ./
|
||||||
|
with:
|
||||||
|
workspaces: tests
|
||||||
|
|
||||||
|
- run: cargo llvm-cov --all-features --workspace
|
||||||
|
working-directory: tests
|
25
.github/workflows/dependabot.yml
vendored
Normal file
25
.github/workflows/dependabot.yml
vendored
Normal file
@ -0,0 +1,25 @@
|
|||||||
|
# https://docs.github.com/en/code-security/dependabot/working-with-dependabot/automating-dependabot-with-github-actions#enabling-automerge-on-a-pull-request
|
||||||
|
|
||||||
|
name: Dependabot Automation
|
||||||
|
on: pull_request
|
||||||
|
|
||||||
|
permissions:
|
||||||
|
contents: write
|
||||||
|
pull-requests: write
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
automerge:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
if: github.event.pull_request.user.login == 'dependabot[bot]' && github.repository == 'Swatinem/rust-cache'
|
||||||
|
steps:
|
||||||
|
- name: Fetch metadata
|
||||||
|
id: metadata
|
||||||
|
uses: dependabot/fetch-metadata@v2
|
||||||
|
with:
|
||||||
|
github-token: "${{ secrets.GITHUB_TOKEN }}"
|
||||||
|
- name: Auto-merge Patch PRs
|
||||||
|
if: steps.metadata.outputs.update-type == 'version-update:semver-patch'
|
||||||
|
run: gh pr merge --auto --merge "$PR_URL"
|
||||||
|
env:
|
||||||
|
PR_URL: ${{github.event.pull_request.html_url}}
|
||||||
|
GH_TOKEN: ${{secrets.GITHUB_TOKEN}}
|
32
.github/workflows/git-registry.yml
vendored
Normal file
32
.github/workflows/git-registry.yml
vendored
Normal file
@ -0,0 +1,32 @@
|
|||||||
|
name: git-registry
|
||||||
|
|
||||||
|
on: [push, pull_request]
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
git-registry:
|
||||||
|
if: github.repository == 'Swatinem/rust-cache'
|
||||||
|
strategy:
|
||||||
|
fail-fast: false
|
||||||
|
matrix:
|
||||||
|
os: [ubuntu-latest, macos-latest, windows-latest]
|
||||||
|
|
||||||
|
name: Test cargo "git" registry on ${{ matrix.os }}
|
||||||
|
runs-on: ${{ matrix.os }}
|
||||||
|
|
||||||
|
env:
|
||||||
|
CARGO_TERM_COLOR: always
|
||||||
|
CARGO_REGISTRIES_CRATES_IO_PROTOCOL: git
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v5
|
||||||
|
|
||||||
|
- run: rustup toolchain install stable --profile minimal --no-self-update
|
||||||
|
|
||||||
|
- uses: ./
|
||||||
|
with:
|
||||||
|
workspaces: tests
|
||||||
|
|
||||||
|
- run: |
|
||||||
|
cargo check
|
||||||
|
cargo test
|
||||||
|
working-directory: tests
|
26
.github/workflows/install.yml
vendored
Normal file
26
.github/workflows/install.yml
vendored
Normal file
@ -0,0 +1,26 @@
|
|||||||
|
name: install
|
||||||
|
|
||||||
|
on: [push, pull_request]
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
install:
|
||||||
|
if: github.repository == 'Swatinem/rust-cache'
|
||||||
|
strategy:
|
||||||
|
fail-fast: false
|
||||||
|
matrix:
|
||||||
|
os: [ubuntu-latest, macos-latest, windows-latest]
|
||||||
|
|
||||||
|
name: Test `cargo install` on ${{ matrix.os }}
|
||||||
|
runs-on: ${{ matrix.os }}
|
||||||
|
|
||||||
|
env:
|
||||||
|
CARGO_TERM_COLOR: always
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v5
|
||||||
|
|
||||||
|
- run: rustup toolchain install stable --profile minimal --no-self-update
|
||||||
|
|
||||||
|
- uses: ./
|
||||||
|
|
||||||
|
- run: cargo install cargo-deny --locked
|
32
.github/workflows/simple.yml
vendored
Normal file
32
.github/workflows/simple.yml
vendored
Normal file
@ -0,0 +1,32 @@
|
|||||||
|
name: simple
|
||||||
|
|
||||||
|
on: [push, pull_request]
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
simple:
|
||||||
|
if: github.repository == 'Swatinem/rust-cache'
|
||||||
|
strategy:
|
||||||
|
fail-fast: false
|
||||||
|
matrix:
|
||||||
|
os: [ubuntu-latest, macos-latest, windows-latest]
|
||||||
|
|
||||||
|
name: Test `cargo check/test/build` on ${{ matrix.os }}
|
||||||
|
runs-on: ${{ matrix.os }}
|
||||||
|
|
||||||
|
env:
|
||||||
|
CARGO_TERM_COLOR: always
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v5
|
||||||
|
|
||||||
|
- run: rustup toolchain install stable --profile minimal --no-self-update
|
||||||
|
|
||||||
|
- uses: ./
|
||||||
|
with:
|
||||||
|
workspaces: tests
|
||||||
|
|
||||||
|
- run: |
|
||||||
|
cargo check
|
||||||
|
cargo test
|
||||||
|
cargo build --release
|
||||||
|
working-directory: tests
|
30
.github/workflows/target-dir.yml
vendored
Normal file
30
.github/workflows/target-dir.yml
vendored
Normal file
@ -0,0 +1,30 @@
|
|||||||
|
name: target-dir
|
||||||
|
|
||||||
|
on: [push, pull_request]
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
target-dir:
|
||||||
|
if: github.repository == 'Swatinem/rust-cache'
|
||||||
|
strategy:
|
||||||
|
fail-fast: false
|
||||||
|
matrix:
|
||||||
|
os: [ubuntu-latest, macos-latest, windows-latest]
|
||||||
|
|
||||||
|
name: Test custom target-dir on ${{ matrix.os }}
|
||||||
|
runs-on: ${{ matrix.os }}
|
||||||
|
|
||||||
|
env:
|
||||||
|
CARGO_TERM_COLOR: always
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v5
|
||||||
|
|
||||||
|
- run: rustup toolchain install stable --profile minimal --no-self-update
|
||||||
|
|
||||||
|
# the `workspaces` option has the format `$workspace -> $target-dir`
|
||||||
|
# and the `$target-dir` is relative to the `$workspace`.
|
||||||
|
- uses: ./
|
||||||
|
with:
|
||||||
|
workspaces: tests -> ../custom-target-dir
|
||||||
|
|
||||||
|
- run: cargo test --manifest-path tests/Cargo.toml --target-dir custom-target-dir
|
36
.github/workflows/workspaces.yml
vendored
Normal file
36
.github/workflows/workspaces.yml
vendored
Normal file
@ -0,0 +1,36 @@
|
|||||||
|
name: workspaces
|
||||||
|
|
||||||
|
on: [push, pull_request]
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
workspaces:
|
||||||
|
if: github.repository == 'Swatinem/rust-cache'
|
||||||
|
strategy:
|
||||||
|
fail-fast: false
|
||||||
|
matrix:
|
||||||
|
os: [ubuntu-latest, macos-latest, windows-latest]
|
||||||
|
|
||||||
|
name: Test multiple workspaces on ${{ matrix.os }}
|
||||||
|
runs-on: ${{ matrix.os }}
|
||||||
|
|
||||||
|
env:
|
||||||
|
CARGO_TERM_COLOR: always
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v5
|
||||||
|
|
||||||
|
- run: rustup toolchain install stable --profile minimal --target wasm32-unknown-unknown --no-self-update
|
||||||
|
|
||||||
|
- uses: ./
|
||||||
|
with:
|
||||||
|
workspaces: |
|
||||||
|
tests
|
||||||
|
tests/wasm-workspace
|
||||||
|
|
||||||
|
- name: cargo check (tests)
|
||||||
|
working-directory: tests
|
||||||
|
run: cargo check
|
||||||
|
|
||||||
|
- name: cargo check (tests/wasm-workspace)
|
||||||
|
working-directory: tests/wasm-workspace
|
||||||
|
run: cargo check
|
9
.gitignore
vendored
9
.gitignore
vendored
@ -1 +1,8 @@
|
|||||||
node_modules
|
node_modules/
|
||||||
|
target/
|
||||||
|
|
||||||
|
# Editors
|
||||||
|
.idea/
|
||||||
|
|
||||||
|
# Mac
|
||||||
|
.DS_Store
|
||||||
|
130
CHANGELOG.md
130
CHANGELOG.md
@ -1,5 +1,135 @@
|
|||||||
# Changelog
|
# Changelog
|
||||||
|
|
||||||
|
## 2.8.0
|
||||||
|
|
||||||
|
- Add support for `warpbuild` cache provider
|
||||||
|
- Add new `cache-workspace-crates` feature
|
||||||
|
|
||||||
|
## 2.7.8
|
||||||
|
|
||||||
|
- Include CPU arch in the cache key
|
||||||
|
|
||||||
|
## 2.7.7
|
||||||
|
|
||||||
|
- Also cache `cargo install` metadata
|
||||||
|
|
||||||
|
## 2.7.6
|
||||||
|
|
||||||
|
- Allow opting out of caching $CARGO_HOME/bin
|
||||||
|
- Add runner OS in cache key
|
||||||
|
- Adds an option to do lookup-only of the cache
|
||||||
|
|
||||||
|
## 2.7.5
|
||||||
|
|
||||||
|
- Support Cargo.lock format cargo-lock v4
|
||||||
|
- Only run macOsWorkaround() on macOS
|
||||||
|
|
||||||
|
## 2.7.3
|
||||||
|
|
||||||
|
- Work around upstream problem that causes cache saving to hang for minutes.
|
||||||
|
|
||||||
|
## 2.7.2
|
||||||
|
|
||||||
|
- Only key by `Cargo.toml` and `Cargo.lock` files of workspace members.
|
||||||
|
|
||||||
|
## 2.7.1
|
||||||
|
|
||||||
|
- Update toml parser to fix parsing errors.
|
||||||
|
|
||||||
|
## 2.7.0
|
||||||
|
|
||||||
|
- Properly cache `trybuild` tests.
|
||||||
|
|
||||||
|
## 2.6.2
|
||||||
|
|
||||||
|
- Fix `toml` parsing.
|
||||||
|
|
||||||
|
## 2.6.1
|
||||||
|
|
||||||
|
- Fix hash contributions of `Cargo.lock`/`Cargo.toml` files.
|
||||||
|
|
||||||
|
## 2.6.0
|
||||||
|
|
||||||
|
- Add "buildjet" as a second `cache-provider` backend.
|
||||||
|
- Clean up sparse registry index.
|
||||||
|
- Do not clean up src of `-sys` crates.
|
||||||
|
- Remove `.cargo/credentials.toml` before saving.
|
||||||
|
|
||||||
|
## 2.5.1
|
||||||
|
|
||||||
|
- Fix hash contribution of `Cargo.lock`.
|
||||||
|
|
||||||
|
## 2.5.0
|
||||||
|
|
||||||
|
- feat: Rm workspace crates version before caching.
|
||||||
|
- feat: Add hash of `.cargo/config.toml` to key.
|
||||||
|
|
||||||
|
## 2.4.0
|
||||||
|
|
||||||
|
- Fix cache key stability.
|
||||||
|
- Use 8 character hash components to reduce the key length, making it more readable.
|
||||||
|
|
||||||
|
## 2.3.0
|
||||||
|
|
||||||
|
- Add `cache-all-crates` option, which enables caching of crates installed by workflows.
|
||||||
|
- Add installed packages to cache key, so changes to workflows that install rust tools are detected and cached properly.
|
||||||
|
- Fix cache restore failures due to upstream bug.
|
||||||
|
- Fix `EISDIR` error due to globed directories.
|
||||||
|
- Update runtime `@actions/cache`, `@actions/io` and dev `typescript` dependencies.
|
||||||
|
- Update `npm run prepare` so it creates distribution files with the right line endings.
|
||||||
|
|
||||||
|
## 2.2.1
|
||||||
|
|
||||||
|
- Update `@actions/cache` dependency to fix usage of `zstd` compression.
|
||||||
|
|
||||||
|
## 2.2.0
|
||||||
|
|
||||||
|
- Add new `save-if` option to always restore, but only conditionally save the cache.
|
||||||
|
|
||||||
|
## 2.1.0
|
||||||
|
|
||||||
|
- Only hash `Cargo.{lock,toml}` files in the configured workspace directories.
|
||||||
|
|
||||||
|
## 2.0.2
|
||||||
|
|
||||||
|
- Avoid calling `cargo metadata` on pre-cleanup.
|
||||||
|
- Added `prefix-key`, `cache-directories` and `cache-targets` options.
|
||||||
|
|
||||||
|
## 2.0.1
|
||||||
|
|
||||||
|
- Primarily just updating dependencies to fix GitHub deprecation notices.
|
||||||
|
|
||||||
|
## 2.0.0
|
||||||
|
|
||||||
|
- The action code was refactored to allow for caching multiple workspaces and
|
||||||
|
different `target` directory layouts.
|
||||||
|
- The `working-directory` and `target-dir` input options were replaced by a
|
||||||
|
single `workspaces` option that has the form of `$workspace -> $target`.
|
||||||
|
- Support for considering `env-vars` as part of the cache key.
|
||||||
|
- The `sharedKey` input option was renamed to `shared-key` for consistency.
|
||||||
|
|
||||||
|
## 1.4.0
|
||||||
|
|
||||||
|
- Clean both `debug` and `release` target directories.
|
||||||
|
|
||||||
|
## 1.3.0
|
||||||
|
|
||||||
|
- Use Rust toolchain file as additional cache key.
|
||||||
|
- Allow for a configurable target-dir.
|
||||||
|
|
||||||
|
## 1.2.0
|
||||||
|
|
||||||
|
- Cache `~/.cargo/bin`.
|
||||||
|
- Support for custom `$CARGO_HOME`.
|
||||||
|
- Add a `cache-hit` output.
|
||||||
|
- Add a new `sharedKey` option that overrides the automatic job-name based key.
|
||||||
|
|
||||||
|
## 1.1.0
|
||||||
|
|
||||||
|
- Add a new `working-directory` input.
|
||||||
|
- Support caching git dependencies.
|
||||||
|
- Lots of other improvements.
|
||||||
|
|
||||||
## 1.0.2
|
## 1.0.2
|
||||||
|
|
||||||
- Don’t prune targets that have a different name from the crate, but do prune targets from the workspace.
|
- Don’t prune targets that have a different name from the crate, but do prune targets from the workspace.
|
||||||
|
165
LICENSE
Normal file
165
LICENSE
Normal file
@ -0,0 +1,165 @@
|
|||||||
|
GNU LESSER GENERAL PUBLIC LICENSE
|
||||||
|
Version 3, 29 June 2007
|
||||||
|
|
||||||
|
Copyright (C) 2007 Free Software Foundation, Inc. <https://fsf.org/>
|
||||||
|
Everyone is permitted to copy and distribute verbatim copies
|
||||||
|
of this license document, but changing it is not allowed.
|
||||||
|
|
||||||
|
|
||||||
|
This version of the GNU Lesser General Public License incorporates
|
||||||
|
the terms and conditions of version 3 of the GNU General Public
|
||||||
|
License, supplemented by the additional permissions listed below.
|
||||||
|
|
||||||
|
0. Additional Definitions.
|
||||||
|
|
||||||
|
As used herein, "this License" refers to version 3 of the GNU Lesser
|
||||||
|
General Public License, and the "GNU GPL" refers to version 3 of the GNU
|
||||||
|
General Public License.
|
||||||
|
|
||||||
|
"The Library" refers to a covered work governed by this License,
|
||||||
|
other than an Application or a Combined Work as defined below.
|
||||||
|
|
||||||
|
An "Application" is any work that makes use of an interface provided
|
||||||
|
by the Library, but which is not otherwise based on the Library.
|
||||||
|
Defining a subclass of a class defined by the Library is deemed a mode
|
||||||
|
of using an interface provided by the Library.
|
||||||
|
|
||||||
|
A "Combined Work" is a work produced by combining or linking an
|
||||||
|
Application with the Library. The particular version of the Library
|
||||||
|
with which the Combined Work was made is also called the "Linked
|
||||||
|
Version".
|
||||||
|
|
||||||
|
The "Minimal Corresponding Source" for a Combined Work means the
|
||||||
|
Corresponding Source for the Combined Work, excluding any source code
|
||||||
|
for portions of the Combined Work that, considered in isolation, are
|
||||||
|
based on the Application, and not on the Linked Version.
|
||||||
|
|
||||||
|
The "Corresponding Application Code" for a Combined Work means the
|
||||||
|
object code and/or source code for the Application, including any data
|
||||||
|
and utility programs needed for reproducing the Combined Work from the
|
||||||
|
Application, but excluding the System Libraries of the Combined Work.
|
||||||
|
|
||||||
|
1. Exception to Section 3 of the GNU GPL.
|
||||||
|
|
||||||
|
You may convey a covered work under sections 3 and 4 of this License
|
||||||
|
without being bound by section 3 of the GNU GPL.
|
||||||
|
|
||||||
|
2. Conveying Modified Versions.
|
||||||
|
|
||||||
|
If you modify a copy of the Library, and, in your modifications, a
|
||||||
|
facility refers to a function or data to be supplied by an Application
|
||||||
|
that uses the facility (other than as an argument passed when the
|
||||||
|
facility is invoked), then you may convey a copy of the modified
|
||||||
|
version:
|
||||||
|
|
||||||
|
a) under this License, provided that you make a good faith effort to
|
||||||
|
ensure that, in the event an Application does not supply the
|
||||||
|
function or data, the facility still operates, and performs
|
||||||
|
whatever part of its purpose remains meaningful, or
|
||||||
|
|
||||||
|
b) under the GNU GPL, with none of the additional permissions of
|
||||||
|
this License applicable to that copy.
|
||||||
|
|
||||||
|
3. Object Code Incorporating Material from Library Header Files.
|
||||||
|
|
||||||
|
The object code form of an Application may incorporate material from
|
||||||
|
a header file that is part of the Library. You may convey such object
|
||||||
|
code under terms of your choice, provided that, if the incorporated
|
||||||
|
material is not limited to numerical parameters, data structure
|
||||||
|
layouts and accessors, or small macros, inline functions and templates
|
||||||
|
(ten or fewer lines in length), you do both of the following:
|
||||||
|
|
||||||
|
a) Give prominent notice with each copy of the object code that the
|
||||||
|
Library is used in it and that the Library and its use are
|
||||||
|
covered by this License.
|
||||||
|
|
||||||
|
b) Accompany the object code with a copy of the GNU GPL and this license
|
||||||
|
document.
|
||||||
|
|
||||||
|
4. Combined Works.
|
||||||
|
|
||||||
|
You may convey a Combined Work under terms of your choice that,
|
||||||
|
taken together, effectively do not restrict modification of the
|
||||||
|
portions of the Library contained in the Combined Work and reverse
|
||||||
|
engineering for debugging such modifications, if you also do each of
|
||||||
|
the following:
|
||||||
|
|
||||||
|
a) Give prominent notice with each copy of the Combined Work that
|
||||||
|
the Library is used in it and that the Library and its use are
|
||||||
|
covered by this License.
|
||||||
|
|
||||||
|
b) Accompany the Combined Work with a copy of the GNU GPL and this license
|
||||||
|
document.
|
||||||
|
|
||||||
|
c) For a Combined Work that displays copyright notices during
|
||||||
|
execution, include the copyright notice for the Library among
|
||||||
|
these notices, as well as a reference directing the user to the
|
||||||
|
copies of the GNU GPL and this license document.
|
||||||
|
|
||||||
|
d) Do one of the following:
|
||||||
|
|
||||||
|
0) Convey the Minimal Corresponding Source under the terms of this
|
||||||
|
License, and the Corresponding Application Code in a form
|
||||||
|
suitable for, and under terms that permit, the user to
|
||||||
|
recombine or relink the Application with a modified version of
|
||||||
|
the Linked Version to produce a modified Combined Work, in the
|
||||||
|
manner specified by section 6 of the GNU GPL for conveying
|
||||||
|
Corresponding Source.
|
||||||
|
|
||||||
|
1) Use a suitable shared library mechanism for linking with the
|
||||||
|
Library. A suitable mechanism is one that (a) uses at run time
|
||||||
|
a copy of the Library already present on the user's computer
|
||||||
|
system, and (b) will operate properly with a modified version
|
||||||
|
of the Library that is interface-compatible with the Linked
|
||||||
|
Version.
|
||||||
|
|
||||||
|
e) Provide Installation Information, but only if you would otherwise
|
||||||
|
be required to provide such information under section 6 of the
|
||||||
|
GNU GPL, and only to the extent that such information is
|
||||||
|
necessary to install and execute a modified version of the
|
||||||
|
Combined Work produced by recombining or relinking the
|
||||||
|
Application with a modified version of the Linked Version. (If
|
||||||
|
you use option 4d0, the Installation Information must accompany
|
||||||
|
the Minimal Corresponding Source and Corresponding Application
|
||||||
|
Code. If you use option 4d1, you must provide the Installation
|
||||||
|
Information in the manner specified by section 6 of the GNU GPL
|
||||||
|
for conveying Corresponding Source.)
|
||||||
|
|
||||||
|
5. Combined Libraries.
|
||||||
|
|
||||||
|
You may place library facilities that are a work based on the
|
||||||
|
Library side by side in a single library together with other library
|
||||||
|
facilities that are not Applications and are not covered by this
|
||||||
|
License, and convey such a combined library under terms of your
|
||||||
|
choice, if you do both of the following:
|
||||||
|
|
||||||
|
a) Accompany the combined library with a copy of the same work based
|
||||||
|
on the Library, uncombined with any other library facilities,
|
||||||
|
conveyed under the terms of this License.
|
||||||
|
|
||||||
|
b) Give prominent notice with the combined library that part of it
|
||||||
|
is a work based on the Library, and explaining where to find the
|
||||||
|
accompanying uncombined form of the same work.
|
||||||
|
|
||||||
|
6. Revised Versions of the GNU Lesser General Public License.
|
||||||
|
|
||||||
|
The Free Software Foundation may publish revised and/or new versions
|
||||||
|
of the GNU Lesser General Public License from time to time. Such new
|
||||||
|
versions will be similar in spirit to the present version, but may
|
||||||
|
differ in detail to address new problems or concerns.
|
||||||
|
|
||||||
|
Each version is given a distinguishing version number. If the
|
||||||
|
Library as you received it specifies that a certain numbered version
|
||||||
|
of the GNU Lesser General Public License "or any later version"
|
||||||
|
applies to it, you have the option of following the terms and
|
||||||
|
conditions either of that published version or of any later version
|
||||||
|
published by the Free Software Foundation. If the Library as you
|
||||||
|
received it does not specify a version number of the GNU Lesser
|
||||||
|
General Public License, you may choose any version of the GNU Lesser
|
||||||
|
General Public License ever published by the Free Software Foundation.
|
||||||
|
|
||||||
|
If the Library as you received it specifies that a proxy can decide
|
||||||
|
whether future versions of the GNU Lesser General Public License shall
|
||||||
|
apply, that proxy's public statement of acceptance of any version is
|
||||||
|
permanent authorization for you to choose that version for the
|
||||||
|
Library.
|
200
README.md
200
README.md
@ -1,47 +1,191 @@
|
|||||||
# Rust Cache Action
|
# Rust Cache Action
|
||||||
|
|
||||||
A GitHub Action that implements smart caching for rust/cargo projects
|
A GitHub Action that implements smart caching for rust/cargo projects with
|
||||||
|
sensible defaults.
|
||||||
## Inputs
|
|
||||||
|
|
||||||
- `key` - An optional key for the `target` cache. This is useful in case you
|
|
||||||
have different jobs for test / check / clippy, etc
|
|
||||||
|
|
||||||
## Example usage
|
## Example usage
|
||||||
|
|
||||||
```yaml
|
```yaml
|
||||||
- uses: Swatinem/rust-cache@v1
|
- uses: actions/checkout@v5
|
||||||
|
|
||||||
|
# selecting a toolchain either by action or manual `rustup` calls should happen
|
||||||
|
# before the plugin, as the cache uses the current rustc version as its cache key
|
||||||
|
- run: rustup toolchain install stable --profile minimal
|
||||||
|
|
||||||
|
- uses: Swatinem/rust-cache@v2
|
||||||
with:
|
with:
|
||||||
key: test
|
# The prefix cache key, this can be changed to start a new cache manually.
|
||||||
|
# default: "v0-rust"
|
||||||
|
prefix-key: ""
|
||||||
|
|
||||||
|
# A cache key that is used instead of the automatic `job`-based key,
|
||||||
|
# and is stable over multiple jobs.
|
||||||
|
# default: empty
|
||||||
|
shared-key: ""
|
||||||
|
|
||||||
|
# An additional cache key that is added alongside the automatic `job`-based
|
||||||
|
# cache key and can be used to further differentiate jobs.
|
||||||
|
# default: empty
|
||||||
|
key: ""
|
||||||
|
|
||||||
|
# A whitespace separated list of env-var *prefixes* who's value contributes
|
||||||
|
# to the environment cache key.
|
||||||
|
# The env-vars are matched by *prefix*, so the default `RUST` var will
|
||||||
|
# match all of `RUSTC`, `RUSTUP_*`, `RUSTFLAGS`, `RUSTDOC_*`, etc.
|
||||||
|
# default: "CARGO CC CFLAGS CXX CMAKE RUST"
|
||||||
|
env-vars: ""
|
||||||
|
|
||||||
|
# The cargo workspaces and target directory configuration.
|
||||||
|
# These entries are separated by newlines and have the form
|
||||||
|
# `$workspace -> $target`. The `$target` part is treated as a directory
|
||||||
|
# relative to the `$workspace` and defaults to "target" if not explicitly given.
|
||||||
|
# default: ". -> target"
|
||||||
|
workspaces: ""
|
||||||
|
|
||||||
|
# Additional non workspace directories to be cached, separated by newlines.
|
||||||
|
cache-directories: ""
|
||||||
|
|
||||||
|
# Determines whether workspace `target` directories are cached.
|
||||||
|
# If `false`, only the cargo registry will be cached.
|
||||||
|
# default: "true"
|
||||||
|
cache-targets: ""
|
||||||
|
|
||||||
|
# Determines if the cache should be saved even when the workflow has failed.
|
||||||
|
# default: "false"
|
||||||
|
cache-on-failure: ""
|
||||||
|
|
||||||
|
# Determines which crates are cached.
|
||||||
|
# If `true` all crates will be cached, otherwise only dependent crates will be cached.
|
||||||
|
# Useful if additional crates are used for CI tooling.
|
||||||
|
# default: "false"
|
||||||
|
cache-all-crates: ""
|
||||||
|
|
||||||
|
# Similar to cache-all-crates.
|
||||||
|
# If `true` the workspace crates will be cached.
|
||||||
|
# Useful if the workspace contains libraries that are only updated sporadically.
|
||||||
|
# default: "false"
|
||||||
|
cache-workspace-crates: ""
|
||||||
|
|
||||||
|
# Determines whether the cache should be saved.
|
||||||
|
# If `false`, the cache is only restored.
|
||||||
|
# Useful for jobs where the matrix is additive e.g. additional Cargo features,
|
||||||
|
# or when only runs from `master` should be saved to the cache.
|
||||||
|
# default: "true"
|
||||||
|
save-if: ""
|
||||||
|
# To only cache runs from `master`:
|
||||||
|
save-if: ${{ github.ref == 'refs/heads/master' }}
|
||||||
|
|
||||||
|
# Determines whether the cache should be restored.
|
||||||
|
# If `true` the cache key will be checked and the `cache-hit` output will be set
|
||||||
|
# but the cache itself won't be restored
|
||||||
|
# default: "false"
|
||||||
|
lookup-only: ""
|
||||||
|
|
||||||
|
# Specifies what to use as the backend providing cache
|
||||||
|
# Can be set to "github", "buildjet", or "warpbuild"
|
||||||
|
# default: "github"
|
||||||
|
cache-provider: ""
|
||||||
|
|
||||||
|
# Determines whether to cache the ~/.cargo/bin directory.
|
||||||
|
# default: "true"
|
||||||
|
cache-bin: ""
|
||||||
```
|
```
|
||||||
|
|
||||||
## Specifics
|
Further examples are available in the [.github/workflows](./.github/workflows/) directory.
|
||||||
|
|
||||||
This action tries to be better than just caching the following directories:
|
## Outputs
|
||||||
|
|
||||||
```
|
**`cache-hit`**
|
||||||
~/.cargo/registry
|
|
||||||
~/.cargo/git
|
|
||||||
target
|
|
||||||
```
|
|
||||||
|
|
||||||
It disables incremental compilation and only caches dependencies. The
|
This is a boolean flag that will be set to `true` when there was an exact cache hit.
|
||||||
assumption is that we will likely recompile the own crate(s) anyway.
|
|
||||||
|
|
||||||
It also separates the cache into 4 groups, each treated differently:
|
## Cache Effectiveness
|
||||||
|
|
||||||
- Registry Index: `~/.cargo/registry/index/<registry>`:
|
This action only caches the _dependencies_ of a crate, so is more effective if
|
||||||
|
the dependency / own code ratio is higher.
|
||||||
|
|
||||||
This is always restored from its latest snapshot, and persisted based on the
|
It is also most effective for repositories with a `Cargo.lock` file. Library
|
||||||
most recent revision.
|
repositories with only a `Cargo.toml` file have limited benefits, as cargo will
|
||||||
|
_always_ use the most up-to-date dependency versions, which may not be cached.
|
||||||
|
|
||||||
- Registry Cache: `~/.cargo/registry/cache/<registry>`:
|
Usage with Stable Rust is most effective, as a cache is tied to the Rust version.
|
||||||
|
Using it with Nightly Rust is less effective as it will throw away the cache every day,
|
||||||
|
unless a specific nightly build is being pinned.
|
||||||
|
|
||||||
Automatically keyed by the lockfile/toml hash, and is being pruned to only
|
## Cache Details
|
||||||
persist the dependencies that are being used.
|
|
||||||
|
|
||||||
- target: `./target`
|
This action currently caches the following files/directories:
|
||||||
|
|
||||||
Automatically keyed by the lockfile/toml hash, and is being pruned to only
|
- `~/.cargo` (installed binaries, the cargo registry, cache, and git dependencies)
|
||||||
persist the dependencies that are being used. This is especially throwing
|
- `./target` (build artifacts of dependencies)
|
||||||
away any intermediate artifacts.
|
|
||||||
|
This cache is automatically keyed by:
|
||||||
|
|
||||||
|
- the github [`job_id`](https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_id),
|
||||||
|
- the rustc release / host / hash,
|
||||||
|
- the value of some compiler-specific environment variables (eg. RUSTFLAGS, etc), and
|
||||||
|
- a hash of all `Cargo.lock` / `Cargo.toml` files found anywhere in the repository (if present).
|
||||||
|
- a hash of all `rust-toolchain` / `rust-toolchain.toml` files in the root of the repository (if present).
|
||||||
|
- a hash of all `.cargo/config.toml` files in the root of the repository (if present).
|
||||||
|
|
||||||
|
An additional input `key` can be provided if the builtin keys are not sufficient.
|
||||||
|
|
||||||
|
Before being persisted, the cache is cleaned of:
|
||||||
|
|
||||||
|
- Any files in `~/.cargo/bin` that were present before the action ran (for example `rustc`).
|
||||||
|
- Dependencies that are no longer used.
|
||||||
|
- Anything that is not a dependency.
|
||||||
|
- Incremental build artifacts.
|
||||||
|
- Any build artifacts with an `mtime` older than one week.
|
||||||
|
|
||||||
|
In particular, the workspace crates themselves are not cached since doing so is
|
||||||
|
[generally not effective](https://github.com/Swatinem/rust-cache/issues/37#issuecomment-944697938).
|
||||||
|
For this reason, this action automatically sets `CARGO_INCREMENTAL=0` to disable
|
||||||
|
incremental compilation, so that the Rust compiler doesn't waste time creating
|
||||||
|
the additional artifacts required for incremental builds.
|
||||||
|
|
||||||
|
The `~/.cargo/registry/src` directory is not cached since it is quicker for Cargo
|
||||||
|
to recreate it from the compressed crate archives in `~/.cargo/registry/cache`.
|
||||||
|
|
||||||
|
The action will try to restore from a previous `Cargo.lock` version as well, so
|
||||||
|
lockfile updates should only re-build changed dependencies.
|
||||||
|
|
||||||
|
The action invokes `cargo metadata` to determine the current set of dependencies.
|
||||||
|
|
||||||
|
Additionally, the action automatically works around
|
||||||
|
[cargo#8603](https://github.com/rust-lang/cargo/issues/8603) /
|
||||||
|
[actions/cache#403](https://github.com/actions/cache/issues/403) which would
|
||||||
|
otherwise corrupt the cache on macOS builds.
|
||||||
|
|
||||||
|
## Cache Limits and Control
|
||||||
|
|
||||||
|
This specialized cache action is built on top of the upstream cache action
|
||||||
|
maintained by GitHub. The same restrictions and limits apply, which are
|
||||||
|
documented here:
|
||||||
|
[Caching dependencies to speed up workflows](https://docs.github.com/en/actions/using-workflows/caching-dependencies-to-speed-up-workflows)
|
||||||
|
|
||||||
|
In particular, caches are currently limited to 10 GB in total and exceeding that
|
||||||
|
limit will cause eviction of older caches.
|
||||||
|
|
||||||
|
Caches from base branches are available to PRs, but not across unrelated
|
||||||
|
branches.
|
||||||
|
|
||||||
|
The caches can be controlled using the [Cache API](https://docs.github.com/en/rest/actions/cache)
|
||||||
|
which allows listing existing caches and manually removing entries.
|
||||||
|
|
||||||
|
## Debugging
|
||||||
|
|
||||||
|
The action prints detailed information about which information it considers
|
||||||
|
for its cache key, and it outputs more debug-only information about which
|
||||||
|
cleanup steps it performs before persisting the cache.
|
||||||
|
|
||||||
|
You can read up on how to [enable debug logging](https://docs.github.com/en/actions/monitoring-and-troubleshooting-workflows/enabling-debug-logging)
|
||||||
|
to see those details as well as further details related to caching operations.
|
||||||
|
|
||||||
|
## Known issues
|
||||||
|
|
||||||
|
- The cache cleaning process currently removes all the files from `~/.cargo/bin`
|
||||||
|
that were present before the action ran (for example `rustc`), by default.
|
||||||
|
This can be an issue on long-running self-hosted runners, where such state
|
||||||
|
is expected to be preserved across runs. You can work around this by setting
|
||||||
|
`cache-bin: "false"`.
|
||||||
|
3
TODO.md
Normal file
3
TODO.md
Normal file
@ -0,0 +1,3 @@
|
|||||||
|
- better .cargo/bin handling:
|
||||||
|
- get a list of all the files on "pre"/"restore"
|
||||||
|
- move the files out of the way on "post"/"save" and move them back afterwards
|
62
action.yml
62
action.yml
@ -1,15 +1,65 @@
|
|||||||
name: "Rust Cache"
|
name: "Rust Cache"
|
||||||
description: "A GitHub Action that implements smart caching for rust/cargo projects"
|
description: "A GitHub Action that implements smart caching for rust/cargo projects with sensible defaults."
|
||||||
author: "Arpad Borsos <arpad.borsos@googlemail.com>"
|
author: "Arpad Borsos <swatinem@swatinem.de>"
|
||||||
inputs:
|
inputs:
|
||||||
key:
|
prefix-key:
|
||||||
description: "An explicit key for restoring and saving the target cache"
|
description: "The prefix cache key, this can be changed to start a new cache manually."
|
||||||
required: false
|
required: false
|
||||||
|
default: "v0-rust"
|
||||||
|
shared-key:
|
||||||
|
description: "A cache key that is used instead of the automatic `job`-based key, and is stable over multiple jobs."
|
||||||
|
required: false
|
||||||
|
key:
|
||||||
|
description: "An additional cache key that is added alongside the automatic `job`-based cache key and can be used to further differentiate jobs."
|
||||||
|
required: false
|
||||||
|
env-vars:
|
||||||
|
description: "Additional environment variables to include in the cache key, separated by spaces."
|
||||||
|
required: false
|
||||||
|
workspaces:
|
||||||
|
description: "Paths to multiple Cargo workspaces and their target directories, separated by newlines."
|
||||||
|
required: false
|
||||||
|
cache-directories:
|
||||||
|
description: "Additional non workspace directories to be cached, separated by newlines."
|
||||||
|
required: false
|
||||||
|
cache-targets:
|
||||||
|
description: "Determines whether workspace targets are cached. If `false`, only the cargo registry will be cached."
|
||||||
|
required: false
|
||||||
|
default: "true"
|
||||||
|
cache-on-failure:
|
||||||
|
description: "Cache even if the build fails. Defaults to false."
|
||||||
|
required: false
|
||||||
|
cache-all-crates:
|
||||||
|
description: "Determines which crates are cached. If `true` all crates will be cached, otherwise only dependent crates will be cached."
|
||||||
|
required: false
|
||||||
|
default: "false"
|
||||||
|
cache-workspace-crates:
|
||||||
|
description: "Similar to cache-all-crates. If `true` the workspace crates will be cached."
|
||||||
|
required: false
|
||||||
|
default: "false"
|
||||||
|
save-if:
|
||||||
|
description: "Determiners whether the cache should be saved. If `false`, the cache is only restored."
|
||||||
|
required: false
|
||||||
|
default: "true"
|
||||||
|
cache-provider:
|
||||||
|
description: "Determines which provider to use for caching. Options are github, buildjet, or warpbuild. Defaults to github."
|
||||||
|
required: false
|
||||||
|
default: "github"
|
||||||
|
cache-bin:
|
||||||
|
description: "Determines whether to cache ${CARGO_HOME}/bin."
|
||||||
|
required: false
|
||||||
|
default: "true"
|
||||||
|
lookup-only:
|
||||||
|
description: "Check if a cache entry exists without downloading the cache"
|
||||||
|
required: false
|
||||||
|
default: "false"
|
||||||
|
outputs:
|
||||||
|
cache-hit:
|
||||||
|
description: "A boolean value that indicates an exact match was found."
|
||||||
runs:
|
runs:
|
||||||
using: "node12"
|
using: "node20"
|
||||||
main: "dist/restore/index.js"
|
main: "dist/restore/index.js"
|
||||||
post: "dist/save/index.js"
|
post: "dist/save/index.js"
|
||||||
post-if: "success()"
|
post-if: "success() || env.CACHE_ON_FAILURE == 'true'"
|
||||||
branding:
|
branding:
|
||||||
icon: "archive"
|
icon: "archive"
|
||||||
color: "gray-dark"
|
color: "gray-dark"
|
||||||
|
172791
dist/restore/index.js
vendored
172791
dist/restore/index.js
vendored
File diff suppressed because one or more lines are too long
172958
dist/save/index.js
vendored
172958
dist/save/index.js
vendored
File diff suppressed because one or more lines are too long
1990
package-lock.json
generated
1990
package-lock.json
generated
File diff suppressed because it is too large
Load Diff
29
package.json
29
package.json
@ -1,8 +1,8 @@
|
|||||||
{
|
{
|
||||||
"private": true,
|
"private": true,
|
||||||
"name": "rust-cache",
|
"name": "rust-cache",
|
||||||
"version": "1.0.2",
|
"version": "2.8.0",
|
||||||
"description": "A GitHub Action that implements smart caching for rust/cargo projects",
|
"description": "A GitHub Action that implements smart caching for rust/cargo projects with sensible defaults.",
|
||||||
"keywords": [
|
"keywords": [
|
||||||
"actions",
|
"actions",
|
||||||
"rust",
|
"rust",
|
||||||
@ -17,20 +17,27 @@
|
|||||||
"bugs": {
|
"bugs": {
|
||||||
"url": "https://github.com/Swatinem/rust-cache/issues"
|
"url": "https://github.com/Swatinem/rust-cache/issues"
|
||||||
},
|
},
|
||||||
"funding": "https://github.com/sponsors/Swatinem",
|
"funding": {
|
||||||
|
"url": "https://github.com/sponsors/Swatinem"
|
||||||
|
},
|
||||||
"homepage": "https://github.com/Swatinem/rust-cache#readme",
|
"homepage": "https://github.com/Swatinem/rust-cache#readme",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@actions/cache": "^1.0.2",
|
"@actions/buildjet-cache": "npm:github-actions.cache-buildjet@0.2.0",
|
||||||
"@actions/core": "^1.2.6",
|
"@actions/warpbuild-cache": "npm:github-actions.warp-cache@1.4.7",
|
||||||
"@actions/exec": "^1.0.4",
|
"@actions/cache": "^4.0.5",
|
||||||
"@actions/glob": "^0.1.0",
|
"@actions/core": "^1.11.1",
|
||||||
"@actions/io": "^1.0.2"
|
"@actions/exec": "^1.1.1",
|
||||||
|
"@actions/glob": "^0.5.0",
|
||||||
|
"@actions/io": "^1.1.3",
|
||||||
|
"smol-toml": "^1.4.2"
|
||||||
},
|
},
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
"@vercel/ncc": "^0.24.1",
|
"@types/node": "^22.16.0",
|
||||||
"typescript": "^4.0.3"
|
"@vercel/ncc": "^0.38.3",
|
||||||
|
"linefix": "^0.1.1",
|
||||||
|
"typescript": "5.8.3"
|
||||||
},
|
},
|
||||||
"scripts": {
|
"scripts": {
|
||||||
"prepare": "ncc build -o dist/restore src/restore.ts && ncc build -o dist/save src/save.ts"
|
"prepare": "ncc build --target es2020 -o dist/restore src/restore.ts && ncc build --target es2020 -o dist/save src/save.ts && linefix dist"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
311
src/cleanup.ts
Normal file
311
src/cleanup.ts
Normal file
@ -0,0 +1,311 @@
|
|||||||
|
import * as core from "@actions/core";
|
||||||
|
import * as io from "@actions/io";
|
||||||
|
import fs from "fs";
|
||||||
|
import path from "path";
|
||||||
|
|
||||||
|
import { CARGO_HOME } from "./config";
|
||||||
|
import { exists } from "./utils";
|
||||||
|
import { Packages } from "./workspace";
|
||||||
|
|
||||||
|
export async function cleanTargetDir(targetDir: string, packages: Packages, checkTimestamp = false) {
|
||||||
|
core.debug(`cleaning target directory "${targetDir}"`);
|
||||||
|
|
||||||
|
// remove all *files* from the profile directory
|
||||||
|
let dir = await fs.promises.opendir(targetDir);
|
||||||
|
for await (const dirent of dir) {
|
||||||
|
if (dirent.isDirectory()) {
|
||||||
|
let dirName = path.join(dir.path, dirent.name);
|
||||||
|
// is it a profile dir, or a nested target dir?
|
||||||
|
let isNestedTarget =
|
||||||
|
(await exists(path.join(dirName, "CACHEDIR.TAG"))) || (await exists(path.join(dirName, ".rustc_info.json")));
|
||||||
|
|
||||||
|
try {
|
||||||
|
if (isNestedTarget) {
|
||||||
|
await cleanTargetDir(dirName, packages, checkTimestamp);
|
||||||
|
} else {
|
||||||
|
await cleanProfileTarget(dirName, packages, checkTimestamp);
|
||||||
|
}
|
||||||
|
} catch {}
|
||||||
|
} else if (dirent.name !== "CACHEDIR.TAG") {
|
||||||
|
await rm(dir.path, dirent);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async function cleanProfileTarget(profileDir: string, packages: Packages, checkTimestamp = false) {
|
||||||
|
core.debug(`cleaning profile directory "${profileDir}"`);
|
||||||
|
|
||||||
|
// Quite a few testing utility crates store compilation artifacts as nested
|
||||||
|
// workspaces under `target/tests`. Notably, `target/tests/target` and
|
||||||
|
// `target/tests/trybuild`.
|
||||||
|
if (path.basename(profileDir) === "tests") {
|
||||||
|
try {
|
||||||
|
// https://github.com/vertexclique/kaos/blob/9876f6c890339741cc5be4b7cb9df72baa5a6d79/src/cargo.rs#L25
|
||||||
|
// https://github.com/eupn/macrotest/blob/c4151a5f9f545942f4971980b5d264ebcd0b1d11/src/cargo.rs#L27
|
||||||
|
cleanTargetDir(path.join(profileDir, "target"), packages, checkTimestamp);
|
||||||
|
} catch {}
|
||||||
|
try {
|
||||||
|
// https://github.com/dtolnay/trybuild/blob/eec8ca6cb9b8f53d0caf1aa499d99df52cae8b40/src/cargo.rs#L50
|
||||||
|
cleanTargetDir(path.join(profileDir, "trybuild"), packages, checkTimestamp);
|
||||||
|
} catch {}
|
||||||
|
|
||||||
|
// Delete everything else.
|
||||||
|
await rmExcept(profileDir, new Set(["target", "trybuild"]), checkTimestamp);
|
||||||
|
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
let keepProfile = new Set(["build", ".fingerprint", "deps"]);
|
||||||
|
await rmExcept(profileDir, keepProfile);
|
||||||
|
|
||||||
|
const keepPkg = new Set(packages.map((p) => p.name));
|
||||||
|
await rmExcept(path.join(profileDir, "build"), keepPkg, checkTimestamp);
|
||||||
|
await rmExcept(path.join(profileDir, ".fingerprint"), keepPkg, checkTimestamp);
|
||||||
|
|
||||||
|
const keepDeps = new Set(
|
||||||
|
packages.flatMap((p) => {
|
||||||
|
const names = [];
|
||||||
|
for (const n of [p.name, ...p.targets]) {
|
||||||
|
const name = n.replace(/-/g, "_");
|
||||||
|
names.push(name, `lib${name}`);
|
||||||
|
}
|
||||||
|
return names;
|
||||||
|
}),
|
||||||
|
);
|
||||||
|
await rmExcept(path.join(profileDir, "deps"), keepDeps, checkTimestamp);
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function getCargoBins(): Promise<Set<string>> {
|
||||||
|
const bins = new Set<string>();
|
||||||
|
try {
|
||||||
|
const { installs }: { installs: { [key: string]: { bins: Array<string> } } } = JSON.parse(
|
||||||
|
await fs.promises.readFile(path.join(CARGO_HOME, ".crates2.json"), "utf8"),
|
||||||
|
);
|
||||||
|
for (const pkg of Object.values(installs)) {
|
||||||
|
for (const bin of pkg.bins) {
|
||||||
|
bins.add(bin);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} catch {}
|
||||||
|
return bins;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Clean the cargo bin directory, removing the binaries that existed
|
||||||
|
* when the action started, as they were not created by the build.
|
||||||
|
*
|
||||||
|
* @param oldBins The binaries that existed when the action started.
|
||||||
|
*/
|
||||||
|
export async function cleanBin(oldBins: Array<string>) {
|
||||||
|
const bins = await getCargoBins();
|
||||||
|
|
||||||
|
for (const bin of oldBins) {
|
||||||
|
bins.delete(bin);
|
||||||
|
}
|
||||||
|
|
||||||
|
const dir = await fs.promises.opendir(path.join(CARGO_HOME, "bin"));
|
||||||
|
for await (const dirent of dir) {
|
||||||
|
if (dirent.isFile() && !bins.has(dirent.name)) {
|
||||||
|
await rm(dir.path, dirent);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function cleanRegistry(packages: Packages, crates = true) {
|
||||||
|
// remove `.cargo/credentials.toml`
|
||||||
|
try {
|
||||||
|
const credentials = path.join(CARGO_HOME, ".cargo", "credentials.toml");
|
||||||
|
core.debug(`deleting "${credentials}"`);
|
||||||
|
await fs.promises.unlink(credentials);
|
||||||
|
} catch {}
|
||||||
|
|
||||||
|
// `.cargo/registry/index`
|
||||||
|
let pkgSet = new Set(packages.map((p) => p.name));
|
||||||
|
const indexDir = await fs.promises.opendir(path.join(CARGO_HOME, "registry", "index"));
|
||||||
|
for await (const dirent of indexDir) {
|
||||||
|
if (dirent.isDirectory()) {
|
||||||
|
// eg `.cargo/registry/index/github.com-1ecc6299db9ec823`
|
||||||
|
// or `.cargo/registry/index/index.crates.io-e139d0d48fed7772`
|
||||||
|
const dirPath = path.join(indexDir.path, dirent.name);
|
||||||
|
|
||||||
|
// for a git registry, we can remove `.cache`, as cargo will recreate it from git
|
||||||
|
if (await exists(path.join(dirPath, ".git"))) {
|
||||||
|
await rmRF(path.join(dirPath, ".cache"));
|
||||||
|
} else {
|
||||||
|
await cleanRegistryIndexCache(dirPath, pkgSet);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!crates) {
|
||||||
|
core.debug("skipping registry cache and src cleanup");
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// `.cargo/registry/src`
|
||||||
|
// Cargo usually re-creates these from the `.crate` cache below,
|
||||||
|
// but for some reason that does not work for `-sys` crates that check timestamps
|
||||||
|
// to decide if rebuilds are necessary.
|
||||||
|
pkgSet = new Set(packages.filter((p) => p.name.endsWith("-sys")).map((p) => `${p.name}-${p.version}`));
|
||||||
|
const srcDir = await fs.promises.opendir(path.join(CARGO_HOME, "registry", "src"));
|
||||||
|
for await (const dirent of srcDir) {
|
||||||
|
if (dirent.isDirectory()) {
|
||||||
|
// eg `.cargo/registry/src/github.com-1ecc6299db9ec823`
|
||||||
|
// or `.cargo/registry/src/index.crates.io-e139d0d48fed7772`
|
||||||
|
const dir = await fs.promises.opendir(path.join(srcDir.path, dirent.name));
|
||||||
|
for await (const dirent of dir) {
|
||||||
|
if (dirent.isDirectory() && !pkgSet.has(dirent.name)) {
|
||||||
|
await rmRF(path.join(dir.path, dirent.name));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// `.cargo/registry/cache`
|
||||||
|
pkgSet = new Set(packages.map((p) => `${p.name}-${p.version}.crate`));
|
||||||
|
const cacheDir = await fs.promises.opendir(path.join(CARGO_HOME, "registry", "cache"));
|
||||||
|
for await (const dirent of cacheDir) {
|
||||||
|
if (dirent.isDirectory()) {
|
||||||
|
// eg `.cargo/registry/cache/github.com-1ecc6299db9ec823`
|
||||||
|
// or `.cargo/registry/cache/index.crates.io-e139d0d48fed7772`
|
||||||
|
const dir = await fs.promises.opendir(path.join(cacheDir.path, dirent.name));
|
||||||
|
for await (const dirent of dir) {
|
||||||
|
// here we check that the downloaded `.crate` matches one from our dependencies
|
||||||
|
if (dirent.isFile() && !pkgSet.has(dirent.name)) {
|
||||||
|
await rm(dir.path, dirent);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Recursively walks and cleans the index `.cache`
|
||||||
|
async function cleanRegistryIndexCache(dirName: string, keepPkg: Set<string>) {
|
||||||
|
let dirIsEmpty = true;
|
||||||
|
const cacheDir = await fs.promises.opendir(dirName);
|
||||||
|
for await (const dirent of cacheDir) {
|
||||||
|
if (dirent.isDirectory()) {
|
||||||
|
if (await cleanRegistryIndexCache(path.join(dirName, dirent.name), keepPkg)) {
|
||||||
|
await rm(dirName, dirent);
|
||||||
|
} else {
|
||||||
|
dirIsEmpty &&= false;
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
if (keepPkg.has(dirent.name)) {
|
||||||
|
dirIsEmpty &&= false;
|
||||||
|
} else {
|
||||||
|
await rm(dirName, dirent);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return dirIsEmpty;
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function cleanGit(packages: Packages) {
|
||||||
|
const coPath = path.join(CARGO_HOME, "git", "checkouts");
|
||||||
|
const dbPath = path.join(CARGO_HOME, "git", "db");
|
||||||
|
const repos = new Map<string, Set<string>>();
|
||||||
|
for (const p of packages) {
|
||||||
|
if (!p.path.startsWith(coPath)) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
const [repo, ref] = p.path.slice(coPath.length + 1).split(path.sep);
|
||||||
|
const refs = repos.get(repo);
|
||||||
|
if (refs) {
|
||||||
|
refs.add(ref);
|
||||||
|
} else {
|
||||||
|
repos.set(repo, new Set([ref]));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// we have to keep both the clone, and the checkout, removing either will
|
||||||
|
// trigger a rebuild
|
||||||
|
|
||||||
|
// clean the db
|
||||||
|
try {
|
||||||
|
let dir = await fs.promises.opendir(dbPath);
|
||||||
|
for await (const dirent of dir) {
|
||||||
|
if (!repos.has(dirent.name)) {
|
||||||
|
await rm(dir.path, dirent);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} catch {}
|
||||||
|
|
||||||
|
// clean the checkouts
|
||||||
|
try {
|
||||||
|
let dir = await fs.promises.opendir(coPath);
|
||||||
|
for await (const dirent of dir) {
|
||||||
|
const refs = repos.get(dirent.name);
|
||||||
|
if (!refs) {
|
||||||
|
await rm(dir.path, dirent);
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
if (!dirent.isDirectory()) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
const refsDir = await fs.promises.opendir(path.join(dir.path, dirent.name));
|
||||||
|
for await (const dirent of refsDir) {
|
||||||
|
if (!refs.has(dirent.name)) {
|
||||||
|
await rm(refsDir.path, dirent);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} catch {}
|
||||||
|
}
|
||||||
|
|
||||||
|
const ONE_WEEK = 7 * 24 * 3600 * 1000;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Removes all files or directories in `dirName` matching some criteria.
|
||||||
|
*
|
||||||
|
* When the `checkTimestamp` flag is set, this will also remove anything older
|
||||||
|
* than one week.
|
||||||
|
*
|
||||||
|
* Otherwise, it will remove everything that does not match any string in the
|
||||||
|
* `keepPrefix` set.
|
||||||
|
* The matching strips and trailing `-$hash` suffix.
|
||||||
|
*/
|
||||||
|
async function rmExcept(dirName: string, keepPrefix: Set<string>, checkTimestamp = false) {
|
||||||
|
const dir = await fs.promises.opendir(dirName);
|
||||||
|
for await (const dirent of dir) {
|
||||||
|
if (checkTimestamp) {
|
||||||
|
const fileName = path.join(dir.path, dirent.name);
|
||||||
|
const { mtime } = await fs.promises.stat(fileName);
|
||||||
|
const isOutdated = Date.now() - mtime.getTime() > ONE_WEEK;
|
||||||
|
|
||||||
|
if (isOutdated) {
|
||||||
|
await rm(dir.path, dirent);
|
||||||
|
}
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
let name = dirent.name;
|
||||||
|
|
||||||
|
// strip the trailing hash
|
||||||
|
const idx = name.lastIndexOf("-");
|
||||||
|
if (idx !== -1) {
|
||||||
|
name = name.slice(0, idx);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!keepPrefix.has(name)) {
|
||||||
|
await rm(dir.path, dirent);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async function rm(parent: string, dirent: fs.Dirent) {
|
||||||
|
try {
|
||||||
|
const fileName = path.join(parent, dirent.name);
|
||||||
|
core.debug(`deleting "${fileName}"`);
|
||||||
|
if (dirent.isFile()) {
|
||||||
|
await fs.promises.unlink(fileName);
|
||||||
|
} else if (dirent.isDirectory()) {
|
||||||
|
await io.rmRF(fileName);
|
||||||
|
}
|
||||||
|
} catch {}
|
||||||
|
}
|
||||||
|
|
||||||
|
async function rmRF(dirName: string) {
|
||||||
|
core.debug(`deleting "${dirName}"`);
|
||||||
|
await io.rmRF(dirName);
|
||||||
|
}
|
144
src/common.ts
144
src/common.ts
@ -1,144 +0,0 @@
|
|||||||
import * as core from "@actions/core";
|
|
||||||
import * as exec from "@actions/exec";
|
|
||||||
import * as glob from "@actions/glob";
|
|
||||||
import crypto from "crypto";
|
|
||||||
import fs from "fs";
|
|
||||||
import os from "os";
|
|
||||||
import path from "path";
|
|
||||||
|
|
||||||
const home = os.homedir();
|
|
||||||
export const paths = {
|
|
||||||
index: path.join(home, ".cargo/registry/index"),
|
|
||||||
cache: path.join(home, ".cargo/registry/cache"),
|
|
||||||
git: path.join(home, ".cargo/git/db"),
|
|
||||||
target: "target",
|
|
||||||
};
|
|
||||||
|
|
||||||
interface CacheConfig {
|
|
||||||
name: string;
|
|
||||||
path: string;
|
|
||||||
key: string;
|
|
||||||
restoreKeys?: Array<string>;
|
|
||||||
}
|
|
||||||
|
|
||||||
interface Caches {
|
|
||||||
index: CacheConfig;
|
|
||||||
cache: CacheConfig;
|
|
||||||
// git: CacheConfig;
|
|
||||||
target: CacheConfig;
|
|
||||||
}
|
|
||||||
|
|
||||||
const RefKey = "GITHUB_REF";
|
|
||||||
|
|
||||||
export function isValidEvent(): boolean {
|
|
||||||
return RefKey in process.env && Boolean(process.env[RefKey]);
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function getCaches(): Promise<Caches> {
|
|
||||||
const rustKey = await getRustKey();
|
|
||||||
let lockHash = core.getState("lockHash");
|
|
||||||
if (!lockHash) {
|
|
||||||
lockHash = await getLockfileHash();
|
|
||||||
core.saveState("lockHash", lockHash);
|
|
||||||
}
|
|
||||||
let targetKey = core.getInput("key");
|
|
||||||
if (targetKey) {
|
|
||||||
targetKey = `${targetKey}-`;
|
|
||||||
}
|
|
||||||
|
|
||||||
const registryIndex = `v0-registry-index`;
|
|
||||||
const registryCache = `v0-registry-cache`;
|
|
||||||
const target = `v0-target-${targetKey}${rustKey}`;
|
|
||||||
return {
|
|
||||||
index: {
|
|
||||||
name: "Registry Index",
|
|
||||||
path: paths.index,
|
|
||||||
key: `${registryIndex}-`,
|
|
||||||
restoreKeys: [registryIndex],
|
|
||||||
},
|
|
||||||
cache: {
|
|
||||||
name: "Registry Cache",
|
|
||||||
path: paths.cache,
|
|
||||||
key: `${registryCache}-${lockHash}`,
|
|
||||||
restoreKeys: [registryCache],
|
|
||||||
},
|
|
||||||
// git: {
|
|
||||||
// name: "Git Dependencies",
|
|
||||||
// path: paths.git,
|
|
||||||
// key: "git-db",
|
|
||||||
// },
|
|
||||||
target: {
|
|
||||||
name: "Target",
|
|
||||||
path: paths.target,
|
|
||||||
key: `${target}-${lockHash}`,
|
|
||||||
restoreKeys: [target],
|
|
||||||
},
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
async function getRustKey(): Promise<string> {
|
|
||||||
const rustc = await getRustVersion();
|
|
||||||
return `${rustc.release}-${rustc.host}-${rustc["commit-hash"]}`;
|
|
||||||
}
|
|
||||||
|
|
||||||
interface RustVersion {
|
|
||||||
host: string;
|
|
||||||
release: string;
|
|
||||||
"commit-hash": string;
|
|
||||||
}
|
|
||||||
|
|
||||||
async function getRustVersion(): Promise<RustVersion> {
|
|
||||||
const stdout = await getCmdOutput("rustc", ["-vV"]);
|
|
||||||
let splits = stdout
|
|
||||||
.split(/[\n\r]+/)
|
|
||||||
.filter(Boolean)
|
|
||||||
.map((s) => s.split(":").map((s) => s.trim()))
|
|
||||||
.filter((s) => s.length === 2);
|
|
||||||
return Object.fromEntries(splits);
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function getCmdOutput(
|
|
||||||
cmd: string,
|
|
||||||
args: Array<string> = [],
|
|
||||||
options: exec.ExecOptions = {},
|
|
||||||
): Promise<string> {
|
|
||||||
let stdout = "";
|
|
||||||
await exec.exec(cmd, args, {
|
|
||||||
silent: true,
|
|
||||||
listeners: {
|
|
||||||
stdout(data) {
|
|
||||||
stdout += data.toString();
|
|
||||||
},
|
|
||||||
},
|
|
||||||
...options,
|
|
||||||
});
|
|
||||||
return stdout;
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function getRegistryName() {
|
|
||||||
const globber = await glob.create(`${paths.index}/**/.last-updated`, { followSymbolicLinks: false });
|
|
||||||
const files = await globber.glob();
|
|
||||||
if (files.length > 1) {
|
|
||||||
core.debug(`got multiple registries: "${files.join('", "')}"`);
|
|
||||||
}
|
|
||||||
|
|
||||||
const first = files.shift();
|
|
||||||
if (!first) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
return path.basename(path.dirname(first));
|
|
||||||
}
|
|
||||||
|
|
||||||
async function getLockfileHash() {
|
|
||||||
const globber = await glob.create("**/Cargo.toml\n**/Cargo.lock", { followSymbolicLinks: false });
|
|
||||||
const files = await globber.glob();
|
|
||||||
files.sort((a, b) => a.localeCompare(b));
|
|
||||||
|
|
||||||
const hasher = crypto.createHash("sha1");
|
|
||||||
for (const file of files) {
|
|
||||||
for await (const chunk of fs.createReadStream(file)) {
|
|
||||||
hasher.update(chunk);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return hasher.digest("hex");
|
|
||||||
}
|
|
397
src/config.ts
Normal file
397
src/config.ts
Normal file
@ -0,0 +1,397 @@
|
|||||||
|
import * as core from "@actions/core";
|
||||||
|
import * as glob from "@actions/glob";
|
||||||
|
import crypto from "crypto";
|
||||||
|
import fs from "fs";
|
||||||
|
import fs_promises from "fs/promises";
|
||||||
|
import os from "os";
|
||||||
|
import path from "path";
|
||||||
|
import * as toml from "smol-toml";
|
||||||
|
|
||||||
|
import { getCargoBins } from "./cleanup";
|
||||||
|
import { CacheProvider, exists, getCmdOutput } from "./utils";
|
||||||
|
import { Workspace } from "./workspace";
|
||||||
|
|
||||||
|
const HOME = os.homedir();
|
||||||
|
export const CARGO_HOME = process.env.CARGO_HOME || path.join(HOME, ".cargo");
|
||||||
|
|
||||||
|
const STATE_CONFIG = "RUST_CACHE_CONFIG";
|
||||||
|
const HASH_LENGTH = 8;
|
||||||
|
|
||||||
|
export class CacheConfig {
|
||||||
|
/** All the paths we want to cache */
|
||||||
|
public cachePaths: Array<string> = [];
|
||||||
|
/** The primary cache key */
|
||||||
|
public cacheKey = "";
|
||||||
|
/** The secondary (restore) key that only contains the prefix and environment */
|
||||||
|
public restoreKey = "";
|
||||||
|
|
||||||
|
/** Whether to cache CARGO_HOME/.bin */
|
||||||
|
public cacheBin: boolean = true;
|
||||||
|
|
||||||
|
/** The workspace configurations */
|
||||||
|
public workspaces: Array<Workspace> = [];
|
||||||
|
|
||||||
|
/** The cargo binaries present during main step */
|
||||||
|
public cargoBins: Array<string> = [];
|
||||||
|
|
||||||
|
/** The prefix portion of the cache key */
|
||||||
|
private keyPrefix = "";
|
||||||
|
/** The rust version considered for the cache key */
|
||||||
|
private keyRust = "";
|
||||||
|
/** The environment variables considered for the cache key */
|
||||||
|
private keyEnvs: Array<string> = [];
|
||||||
|
/** The files considered for the cache key */
|
||||||
|
private keyFiles: Array<string> = [];
|
||||||
|
|
||||||
|
private constructor() {}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Constructs a [`CacheConfig`] with all the paths and keys.
|
||||||
|
*
|
||||||
|
* This will read the action `input`s, and read and persist `state` as necessary.
|
||||||
|
*/
|
||||||
|
static async new(): Promise<CacheConfig> {
|
||||||
|
const self = new CacheConfig();
|
||||||
|
|
||||||
|
// Construct key prefix:
|
||||||
|
// This uses either the `shared-key` input,
|
||||||
|
// or the `key` input combined with the `job` key.
|
||||||
|
|
||||||
|
let key = core.getInput("prefix-key") || "v0-rust";
|
||||||
|
|
||||||
|
const sharedKey = core.getInput("shared-key");
|
||||||
|
if (sharedKey) {
|
||||||
|
key += `-${sharedKey}`;
|
||||||
|
} else {
|
||||||
|
const inputKey = core.getInput("key");
|
||||||
|
if (inputKey) {
|
||||||
|
key += `-${inputKey}`;
|
||||||
|
}
|
||||||
|
|
||||||
|
const job = process.env.GITHUB_JOB;
|
||||||
|
if (job) {
|
||||||
|
key += `-${job}`;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Add runner OS and CPU architecture to the key to avoid cross-contamination of cache
|
||||||
|
const runnerOS = os.type();
|
||||||
|
const runnerArch = os.arch();
|
||||||
|
key += `-${runnerOS}-${runnerArch}`;
|
||||||
|
|
||||||
|
self.keyPrefix = key;
|
||||||
|
|
||||||
|
// Construct environment portion of the key:
|
||||||
|
// This consists of a hash that considers the rust version
|
||||||
|
// as well as all the environment variables as given by a default list
|
||||||
|
// and the `env-vars` input.
|
||||||
|
// The env vars are sorted, matched by prefix and hashed into the
|
||||||
|
// resulting environment hash.
|
||||||
|
|
||||||
|
let hasher = crypto.createHash("sha1");
|
||||||
|
const rustVersion = await getRustVersion();
|
||||||
|
|
||||||
|
let keyRust = `${rustVersion.release} ${rustVersion.host}`;
|
||||||
|
hasher.update(keyRust);
|
||||||
|
hasher.update(rustVersion["commit-hash"]);
|
||||||
|
|
||||||
|
keyRust += ` (${rustVersion["commit-hash"]})`;
|
||||||
|
self.keyRust = keyRust;
|
||||||
|
|
||||||
|
// these prefixes should cover most of the compiler / rust / cargo keys
|
||||||
|
const envPrefixes = ["CARGO", "CC", "CFLAGS", "CXX", "CMAKE", "RUST"];
|
||||||
|
envPrefixes.push(...core.getInput("env-vars").split(/\s+/).filter(Boolean));
|
||||||
|
|
||||||
|
// sort the available env vars so we have a more stable hash
|
||||||
|
const keyEnvs = [];
|
||||||
|
const envKeys = Object.keys(process.env);
|
||||||
|
envKeys.sort((a, b) => a.localeCompare(b));
|
||||||
|
for (const key of envKeys) {
|
||||||
|
const value = process.env[key];
|
||||||
|
if (envPrefixes.some((prefix) => key.startsWith(prefix)) && value) {
|
||||||
|
hasher.update(`${key}=${value}`);
|
||||||
|
keyEnvs.push(key);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
self.keyEnvs = keyEnvs;
|
||||||
|
|
||||||
|
key += `-${digest(hasher)}`;
|
||||||
|
|
||||||
|
self.restoreKey = key;
|
||||||
|
|
||||||
|
// Construct the lockfiles portion of the key:
|
||||||
|
// This considers all the files found via globbing for various manifests
|
||||||
|
// and lockfiles.
|
||||||
|
|
||||||
|
self.cacheBin = core.getInput("cache-bin").toLowerCase() == "true";
|
||||||
|
|
||||||
|
// Constructs the workspace config and paths to restore:
|
||||||
|
// The workspaces are given using a `$workspace -> $target` syntax.
|
||||||
|
|
||||||
|
const workspaces: Array<Workspace> = [];
|
||||||
|
const workspacesInput = core.getInput("workspaces") || ".";
|
||||||
|
for (const workspace of workspacesInput.trim().split("\n")) {
|
||||||
|
let [root, target = "target"] = workspace.split("->").map((s) => s.trim());
|
||||||
|
root = path.resolve(root);
|
||||||
|
target = path.join(root, target);
|
||||||
|
workspaces.push(new Workspace(root, target));
|
||||||
|
}
|
||||||
|
self.workspaces = workspaces;
|
||||||
|
|
||||||
|
let keyFiles = await globFiles(".cargo/config.toml\nrust-toolchain\nrust-toolchain.toml");
|
||||||
|
const parsedKeyFiles = []; // keyFiles that are parsed, pre-processed and hashed
|
||||||
|
|
||||||
|
hasher = crypto.createHash("sha1");
|
||||||
|
|
||||||
|
for (const workspace of workspaces) {
|
||||||
|
const root = workspace.root;
|
||||||
|
keyFiles.push(
|
||||||
|
...(await globFiles(
|
||||||
|
`${root}/**/.cargo/config.toml\n${root}/**/rust-toolchain\n${root}/**/rust-toolchain.toml`,
|
||||||
|
)),
|
||||||
|
);
|
||||||
|
|
||||||
|
const workspaceMembers = await workspace.getWorkspaceMembers();
|
||||||
|
|
||||||
|
const cargo_manifests = sort_and_uniq(workspaceMembers.map((member) => path.join(member.path, "Cargo.toml")));
|
||||||
|
|
||||||
|
for (const cargo_manifest of cargo_manifests) {
|
||||||
|
try {
|
||||||
|
const content = await fs_promises.readFile(cargo_manifest, { encoding: "utf8" });
|
||||||
|
// Use any since TomlPrimitive is not exposed
|
||||||
|
const parsed = toml.parse(content) as { [key: string]: any };
|
||||||
|
|
||||||
|
if ("package" in parsed) {
|
||||||
|
const pack = parsed.package;
|
||||||
|
if ("version" in pack) {
|
||||||
|
pack["version"] = "0.0.0";
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
for (const prefix of ["", "build-", "dev-"]) {
|
||||||
|
const section_name = `${prefix}dependencies`;
|
||||||
|
if (!(section_name in parsed)) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
const deps = parsed[section_name];
|
||||||
|
|
||||||
|
for (const key of Object.keys(deps)) {
|
||||||
|
const dep = deps[key];
|
||||||
|
|
||||||
|
try {
|
||||||
|
if ("path" in dep) {
|
||||||
|
dep.version = "0.0.0";
|
||||||
|
dep.path = "";
|
||||||
|
}
|
||||||
|
} catch (_e) {
|
||||||
|
// Not an object, probably a string (version),
|
||||||
|
// continue.
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
hasher.update(JSON.stringify(parsed));
|
||||||
|
|
||||||
|
parsedKeyFiles.push(cargo_manifest);
|
||||||
|
} catch (e) {
|
||||||
|
// Fallback to caching them as regular file
|
||||||
|
core.warning(`Error parsing Cargo.toml manifest, fallback to caching entire file: ${e}`);
|
||||||
|
keyFiles.push(cargo_manifest);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const cargo_lock = path.join(workspace.root, "Cargo.lock");
|
||||||
|
if (await exists(cargo_lock)) {
|
||||||
|
try {
|
||||||
|
const content = await fs_promises.readFile(cargo_lock, { encoding: "utf8" });
|
||||||
|
const parsed = toml.parse(content);
|
||||||
|
|
||||||
|
if ((parsed.version !== 3 && parsed.version !== 4) || !("package" in parsed)) {
|
||||||
|
// Fallback to caching them as regular file since this action
|
||||||
|
// can only handle Cargo.lock format version 3
|
||||||
|
core.warning("Unsupported Cargo.lock format, fallback to caching entire file");
|
||||||
|
keyFiles.push(cargo_lock);
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Package without `[[package]].source` and `[[package]].checksum`
|
||||||
|
// are the one with `path = "..."` to crates within the workspace.
|
||||||
|
const packages = (parsed.package as any[]).filter((p: any) => "source" in p || "checksum" in p);
|
||||||
|
|
||||||
|
hasher.update(JSON.stringify(packages));
|
||||||
|
|
||||||
|
parsedKeyFiles.push(cargo_lock);
|
||||||
|
} catch (e) {
|
||||||
|
// Fallback to caching them as regular file
|
||||||
|
core.warning(`Error parsing Cargo.lock manifest, fallback to caching entire file: ${e}`);
|
||||||
|
keyFiles.push(cargo_lock);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
keyFiles = sort_and_uniq(keyFiles);
|
||||||
|
|
||||||
|
for (const file of keyFiles) {
|
||||||
|
for await (const chunk of fs.createReadStream(file)) {
|
||||||
|
hasher.update(chunk);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let lockHash = digest(hasher);
|
||||||
|
|
||||||
|
keyFiles.push(...parsedKeyFiles);
|
||||||
|
self.keyFiles = sort_and_uniq(keyFiles);
|
||||||
|
|
||||||
|
key += `-${lockHash}`;
|
||||||
|
self.cacheKey = key;
|
||||||
|
|
||||||
|
self.cachePaths = [path.join(CARGO_HOME, "registry"), path.join(CARGO_HOME, "git")];
|
||||||
|
if (self.cacheBin) {
|
||||||
|
self.cachePaths = [
|
||||||
|
path.join(CARGO_HOME, "bin"),
|
||||||
|
path.join(CARGO_HOME, ".crates.toml"),
|
||||||
|
path.join(CARGO_HOME, ".crates2.json"),
|
||||||
|
...self.cachePaths,
|
||||||
|
];
|
||||||
|
}
|
||||||
|
const cacheTargets = core.getInput("cache-targets").toLowerCase() || "true";
|
||||||
|
if (cacheTargets === "true") {
|
||||||
|
self.cachePaths.push(...workspaces.map((ws) => ws.target));
|
||||||
|
}
|
||||||
|
|
||||||
|
const cacheDirectories = core.getInput("cache-directories");
|
||||||
|
for (const dir of cacheDirectories.trim().split(/\s+/).filter(Boolean)) {
|
||||||
|
self.cachePaths.push(dir);
|
||||||
|
}
|
||||||
|
|
||||||
|
const bins = await getCargoBins();
|
||||||
|
self.cargoBins = Array.from(bins.values());
|
||||||
|
|
||||||
|
return self;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Reads and returns the cache config from the action `state`.
|
||||||
|
*
|
||||||
|
* @throws {Error} if the state is not present.
|
||||||
|
* @returns {CacheConfig} the configuration.
|
||||||
|
* @see {@link CacheConfig#saveState}
|
||||||
|
* @see {@link CacheConfig#new}
|
||||||
|
*/
|
||||||
|
static fromState(): CacheConfig {
|
||||||
|
const source = core.getState(STATE_CONFIG);
|
||||||
|
if (!source) {
|
||||||
|
throw new Error("Cache configuration not found in state");
|
||||||
|
}
|
||||||
|
|
||||||
|
const self = new CacheConfig();
|
||||||
|
Object.assign(self, JSON.parse(source));
|
||||||
|
self.workspaces = self.workspaces.map((w: any) => new Workspace(w.root, w.target));
|
||||||
|
|
||||||
|
return self;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Prints the configuration to the action log.
|
||||||
|
*/
|
||||||
|
printInfo(cacheProvider: CacheProvider) {
|
||||||
|
core.startGroup("Cache Configuration");
|
||||||
|
core.info(`Cache Provider:`);
|
||||||
|
core.info(` ${cacheProvider.name}`);
|
||||||
|
core.info(`Workspaces:`);
|
||||||
|
for (const workspace of this.workspaces) {
|
||||||
|
core.info(` ${workspace.root}`);
|
||||||
|
}
|
||||||
|
core.info(`Cache Paths:`);
|
||||||
|
for (const path of this.cachePaths) {
|
||||||
|
core.info(` ${path}`);
|
||||||
|
}
|
||||||
|
core.info(`Restore Key:`);
|
||||||
|
core.info(` ${this.restoreKey}`);
|
||||||
|
core.info(`Cache Key:`);
|
||||||
|
core.info(` ${this.cacheKey}`);
|
||||||
|
core.info(`.. Prefix:`);
|
||||||
|
core.info(` - ${this.keyPrefix}`);
|
||||||
|
core.info(`.. Environment considered:`);
|
||||||
|
core.info(` - Rust Version: ${this.keyRust}`);
|
||||||
|
for (const env of this.keyEnvs) {
|
||||||
|
core.info(` - ${env}`);
|
||||||
|
}
|
||||||
|
core.info(`.. Lockfiles considered:`);
|
||||||
|
for (const file of this.keyFiles) {
|
||||||
|
core.info(` - ${file}`);
|
||||||
|
}
|
||||||
|
core.endGroup();
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Saves the configuration to the state store.
|
||||||
|
* This is used to restore the configuration in the post action.
|
||||||
|
*/
|
||||||
|
saveState() {
|
||||||
|
core.saveState(STATE_CONFIG, this);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Checks if the cache is up to date.
|
||||||
|
*
|
||||||
|
* @returns `true` if the cache is up to date, `false` otherwise.
|
||||||
|
*/
|
||||||
|
export function isCacheUpToDate(): boolean {
|
||||||
|
return core.getState(STATE_CONFIG) === "";
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns a hex digest of the given hasher truncated to `HASH_LENGTH`.
|
||||||
|
*
|
||||||
|
* @param hasher The hasher to digest.
|
||||||
|
* @returns The hex digest.
|
||||||
|
*/
|
||||||
|
function digest(hasher: crypto.Hash): string {
|
||||||
|
return hasher.digest("hex").substring(0, HASH_LENGTH);
|
||||||
|
}
|
||||||
|
|
||||||
|
interface RustVersion {
|
||||||
|
host: string;
|
||||||
|
release: string;
|
||||||
|
"commit-hash": string;
|
||||||
|
}
|
||||||
|
|
||||||
|
async function getRustVersion(): Promise<RustVersion> {
|
||||||
|
const stdout = await getCmdOutput("rustc", ["-vV"]);
|
||||||
|
let splits = stdout
|
||||||
|
.split(/[\n\r]+/)
|
||||||
|
.filter(Boolean)
|
||||||
|
.map((s) => s.split(":").map((s) => s.trim()))
|
||||||
|
.filter((s) => s.length === 2);
|
||||||
|
return Object.fromEntries(splits);
|
||||||
|
}
|
||||||
|
|
||||||
|
async function globFiles(pattern: string): Promise<string[]> {
|
||||||
|
const globber = await glob.create(pattern, {
|
||||||
|
followSymbolicLinks: false,
|
||||||
|
});
|
||||||
|
// fs.statSync resolve the symbolic link and returns stat for the
|
||||||
|
// file it pointed to, so isFile would make sure the resolved
|
||||||
|
// file is actually a regular file.
|
||||||
|
return (await globber.glob()).filter((file) => fs.statSync(file).isFile());
|
||||||
|
}
|
||||||
|
|
||||||
|
function sort_and_uniq(a: string[]) {
|
||||||
|
return a
|
||||||
|
.sort((a, b) => a.localeCompare(b))
|
||||||
|
.reduce((accumulator: string[], currentValue: string) => {
|
||||||
|
const len = accumulator.length;
|
||||||
|
// If accumulator is empty or its last element != currentValue
|
||||||
|
// Since array is already sorted, elements with the same value
|
||||||
|
// are grouped together to be continugous in space.
|
||||||
|
//
|
||||||
|
// If currentValue != last element, then it must be unique.
|
||||||
|
if (len == 0 || accumulator[len - 1].localeCompare(currentValue) != 0) {
|
||||||
|
accumulator.push(currentValue);
|
||||||
|
}
|
||||||
|
return accumulator;
|
||||||
|
}, []);
|
||||||
|
}
|
@ -1,41 +1,78 @@
|
|||||||
import * as cache from "@actions/cache";
|
|
||||||
import * as core from "@actions/core";
|
import * as core from "@actions/core";
|
||||||
import { getCaches, isValidEvent } from "./common";
|
|
||||||
|
import { cleanTargetDir } from "./cleanup";
|
||||||
|
import { CacheConfig } from "./config";
|
||||||
|
import { getCacheProvider, reportError } from "./utils";
|
||||||
|
|
||||||
|
process.on("uncaughtException", (e) => {
|
||||||
|
core.error(e.message);
|
||||||
|
if (e.stack) {
|
||||||
|
core.error(e.stack);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
async function run() {
|
async function run() {
|
||||||
if (!isValidEvent()) {
|
const cacheProvider = getCacheProvider();
|
||||||
|
|
||||||
|
if (!cacheProvider.cache.isFeatureAvailable()) {
|
||||||
|
setCacheHitOutput(false);
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
try {
|
try {
|
||||||
|
var cacheOnFailure = core.getInput("cache-on-failure").toLowerCase();
|
||||||
|
if (cacheOnFailure !== "true") {
|
||||||
|
cacheOnFailure = "false";
|
||||||
|
}
|
||||||
|
var lookupOnly = core.getInput("lookup-only").toLowerCase() === "true";
|
||||||
|
|
||||||
|
core.exportVariable("CACHE_ON_FAILURE", cacheOnFailure);
|
||||||
core.exportVariable("CARGO_INCREMENTAL", 0);
|
core.exportVariable("CARGO_INCREMENTAL", 0);
|
||||||
|
|
||||||
const caches = await getCaches();
|
const config = await CacheConfig.new();
|
||||||
for (const [type, { name, path, key, restoreKeys }] of Object.entries(caches)) {
|
config.printInfo(cacheProvider);
|
||||||
const start = Date.now();
|
core.info("");
|
||||||
core.startGroup(`Restoring ${name}…`);
|
|
||||||
core.info(`Restoring to path "${path}".`);
|
core.info(`... ${lookupOnly ? "Checking" : "Restoring"} cache ...`);
|
||||||
core.info(`Using keys:\n ${[key, ...restoreKeys].join("\n ")}`);
|
const key = config.cacheKey;
|
||||||
try {
|
// Pass a copy of cachePaths to avoid mutating the original array as reported by:
|
||||||
const restoreKey = await cache.restoreCache([path], key, restoreKeys);
|
// https://github.com/actions/toolkit/pull/1378
|
||||||
if (restoreKey) {
|
// TODO: remove this once the underlying bug is fixed.
|
||||||
core.info(`Restored from cache key "${restoreKey}".`);
|
const restoreKey = await cacheProvider.cache.restoreCache(config.cachePaths.slice(), key, [config.restoreKey], {
|
||||||
core.saveState(type, restoreKey);
|
lookupOnly,
|
||||||
} else {
|
});
|
||||||
core.info("No cache found.");
|
if (restoreKey) {
|
||||||
|
const match = restoreKey === key;
|
||||||
|
core.info(`${lookupOnly ? "Found" : "Restored from"} cache key "${restoreKey}" full match: ${match}.`);
|
||||||
|
if (!match) {
|
||||||
|
// pre-clean the target directory on cache mismatch
|
||||||
|
for (const workspace of config.workspaces) {
|
||||||
|
try {
|
||||||
|
await cleanTargetDir(workspace.target, [], true);
|
||||||
|
} catch {}
|
||||||
}
|
}
|
||||||
} catch (e) {
|
|
||||||
core.info(`[warning] ${e.message}`);
|
// We restored the cache but it is not a full match.
|
||||||
|
config.saveState();
|
||||||
}
|
}
|
||||||
const duration = Math.round((Date.now() - start) / 1000);
|
|
||||||
if (duration) {
|
setCacheHitOutput(match);
|
||||||
core.info(`Took ${duration}s.`);
|
} else {
|
||||||
}
|
core.info("No cache found.");
|
||||||
core.endGroup();
|
config.saveState();
|
||||||
|
|
||||||
|
setCacheHitOutput(false);
|
||||||
}
|
}
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
core.info(`[warning] ${e.message}`);
|
setCacheHitOutput(false);
|
||||||
|
|
||||||
|
reportError(e);
|
||||||
}
|
}
|
||||||
|
process.exit();
|
||||||
|
}
|
||||||
|
|
||||||
|
function setCacheHitOutput(cacheHit: boolean): void {
|
||||||
|
core.setOutput("cache-hit", cacheHit.toString());
|
||||||
}
|
}
|
||||||
|
|
||||||
run();
|
run();
|
||||||
|
210
src/save.ts
210
src/save.ts
@ -1,167 +1,95 @@
|
|||||||
import * as cache from "@actions/cache";
|
|
||||||
import * as core from "@actions/core";
|
import * as core from "@actions/core";
|
||||||
import * as exec from "@actions/exec";
|
import * as exec from "@actions/exec";
|
||||||
import * as io from "@actions/io";
|
|
||||||
import fs from "fs";
|
import { cleanBin, cleanGit, cleanRegistry, cleanTargetDir } from "./cleanup";
|
||||||
import path from "path";
|
import { CacheConfig, isCacheUpToDate } from "./config";
|
||||||
import { getCaches, getCmdOutput, getRegistryName, isValidEvent, paths } from "./common";
|
import { getCacheProvider, reportError } from "./utils";
|
||||||
|
|
||||||
|
process.on("uncaughtException", (e) => {
|
||||||
|
core.error(e.message);
|
||||||
|
if (e.stack) {
|
||||||
|
core.error(e.stack);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
async function run() {
|
async function run() {
|
||||||
if (!isValidEvent()) {
|
const cacheProvider = getCacheProvider();
|
||||||
|
|
||||||
|
const save = core.getInput("save-if").toLowerCase() || "true";
|
||||||
|
|
||||||
|
if (!(cacheProvider.cache.isFeatureAvailable() && save === "true")) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
try {
|
try {
|
||||||
const caches = await getCaches();
|
if (isCacheUpToDate()) {
|
||||||
const registryName = await getRegistryName();
|
core.info(`Cache up-to-date.`);
|
||||||
const packages = await getPackages();
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const config = CacheConfig.fromState();
|
||||||
|
config.printInfo(cacheProvider);
|
||||||
|
core.info("");
|
||||||
|
|
||||||
// TODO: remove this once https://github.com/actions/toolkit/pull/553 lands
|
// TODO: remove this once https://github.com/actions/toolkit/pull/553 lands
|
||||||
await macOsWorkaround();
|
if (process.env["RUNNER_OS"] == "macOS") {
|
||||||
|
await macOsWorkaround();
|
||||||
await pruneTarget(packages);
|
|
||||||
|
|
||||||
if (registryName) {
|
|
||||||
// save the index based on its revision
|
|
||||||
const indexRef = await getIndexRef(registryName);
|
|
||||||
caches.index.key += indexRef;
|
|
||||||
await io.rmRF(path.join(paths.index, registryName, ".cache"));
|
|
||||||
|
|
||||||
await pruneRegistryCache(registryName, packages);
|
|
||||||
} else {
|
|
||||||
delete (caches as any).index;
|
|
||||||
delete (caches as any).cache;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
for (const [type, { name, path, key }] of Object.entries(caches)) {
|
const workspaceCrates = core.getInput("cache-workspace-crates").toLowerCase() || "false";
|
||||||
if (core.getState(type) === key) {
|
const allPackages = [];
|
||||||
core.info(`${name} up-to-date.`);
|
for (const workspace of config.workspaces) {
|
||||||
continue;
|
const packages = await workspace.getPackagesOutsideWorkspaceRoot();
|
||||||
|
if (workspaceCrates === "true") {
|
||||||
|
const wsMembers = await workspace.getWorkspaceMembers();
|
||||||
|
packages.push(...wsMembers);
|
||||||
}
|
}
|
||||||
const start = Date.now();
|
allPackages.push(...packages);
|
||||||
core.startGroup(`Saving ${name}…`);
|
|
||||||
core.info(`Saving path "${path}".`);
|
|
||||||
core.info(`Using key "${key}".`);
|
|
||||||
try {
|
try {
|
||||||
await cache.saveCache([path], key);
|
core.info(`... Cleaning ${workspace.target} ...`);
|
||||||
|
await cleanTargetDir(workspace.target, packages);
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
core.info(`[warning] ${e.message}`);
|
core.debug(`${(e as any).stack}`);
|
||||||
}
|
}
|
||||||
const duration = Math.round((Date.now() - start) / 1000);
|
|
||||||
if (duration) {
|
|
||||||
core.info(`Took ${duration}s.`);
|
|
||||||
}
|
|
||||||
core.endGroup();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
const crates = core.getInput("cache-all-crates").toLowerCase() || "false";
|
||||||
|
core.info(`... Cleaning cargo registry (cache-all-crates: ${crates}) ...`);
|
||||||
|
await cleanRegistry(allPackages, crates !== "true");
|
||||||
|
} catch (e) {
|
||||||
|
core.debug(`${(e as any).stack}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (config.cacheBin) {
|
||||||
|
try {
|
||||||
|
core.info(`... Cleaning cargo/bin ...`);
|
||||||
|
await cleanBin(config.cargoBins);
|
||||||
|
} catch (e) {
|
||||||
|
core.debug(`${(e as any).stack}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
core.info(`... Cleaning cargo git cache ...`);
|
||||||
|
await cleanGit(allPackages);
|
||||||
|
} catch (e) {
|
||||||
|
core.debug(`${(e as any).stack}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
core.info(`... Saving cache ...`);
|
||||||
|
// Pass a copy of cachePaths to avoid mutating the original array as reported by:
|
||||||
|
// https://github.com/actions/toolkit/pull/1378
|
||||||
|
// TODO: remove this once the underlying bug is fixed.
|
||||||
|
await cacheProvider.cache.saveCache(config.cachePaths.slice(), config.cacheKey);
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
core.info(`[warning] ${e.message}`);
|
reportError(e);
|
||||||
}
|
}
|
||||||
|
process.exit();
|
||||||
}
|
}
|
||||||
|
|
||||||
run();
|
run();
|
||||||
|
|
||||||
async function getIndexRef(registryName: string) {
|
|
||||||
const cwd = path.join(paths.index, registryName);
|
|
||||||
return (await getCmdOutput("git", ["rev-parse", "--short", "origin/master"], { cwd })).trim();
|
|
||||||
}
|
|
||||||
|
|
||||||
interface PackageDefinition {
|
|
||||||
name: string;
|
|
||||||
version: string;
|
|
||||||
targets: Array<string>;
|
|
||||||
}
|
|
||||||
|
|
||||||
type Packages = Array<PackageDefinition>;
|
|
||||||
|
|
||||||
interface Meta {
|
|
||||||
packages: Array<{
|
|
||||||
name: string;
|
|
||||||
version: string;
|
|
||||||
manifest_path: string;
|
|
||||||
targets: Array<{ kind: Array<string>; name: string }>;
|
|
||||||
}>;
|
|
||||||
}
|
|
||||||
|
|
||||||
async function getPackages(): Promise<Packages> {
|
|
||||||
const cwd = process.cwd();
|
|
||||||
const meta: Meta = JSON.parse(await getCmdOutput("cargo", ["metadata", "--all-features", "--format-version", "1"]));
|
|
||||||
|
|
||||||
return meta.packages
|
|
||||||
.filter((p) => !p.manifest_path.startsWith(cwd))
|
|
||||||
.map((p) => {
|
|
||||||
const targets = p.targets.filter((t) => t.kind[0] === "lib").map((t) => t.name);
|
|
||||||
return { name: p.name, version: p.version, targets };
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
async function pruneRegistryCache(registryName: string, packages: Packages) {
|
|
||||||
const pkgSet = new Set(packages.map((p) => `${p.name}-${p.version}.crate`));
|
|
||||||
|
|
||||||
const dir = await fs.promises.opendir(path.join(paths.cache, registryName));
|
|
||||||
for await (const dirent of dir) {
|
|
||||||
if (dirent.isFile() && !pkgSet.has(dirent.name)) {
|
|
||||||
const fileName = path.join(dir.path, dirent.name);
|
|
||||||
await fs.promises.unlink(fileName);
|
|
||||||
core.debug(`deleting "${fileName}"`);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
async function pruneTarget(packages: Packages) {
|
|
||||||
await fs.promises.unlink("./target/.rustc_info.json");
|
|
||||||
await io.rmRF("./target/debug/examples");
|
|
||||||
await io.rmRF("./target/debug/incremental");
|
|
||||||
let dir: fs.Dir;
|
|
||||||
|
|
||||||
// remove all *files* from debug
|
|
||||||
dir = await fs.promises.opendir("./target/debug");
|
|
||||||
for await (const dirent of dir) {
|
|
||||||
if (dirent.isFile()) {
|
|
||||||
const fileName = path.join(dir.path, dirent.name);
|
|
||||||
await fs.promises.unlink(fileName);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
const keepPkg = new Set(packages.map((p) => p.name));
|
|
||||||
await rmExcept("./target/debug/build", keepPkg);
|
|
||||||
await rmExcept("./target/debug/.fingerprint", keepPkg);
|
|
||||||
|
|
||||||
const keepDeps = new Set(
|
|
||||||
packages.flatMap((p) => {
|
|
||||||
const names = [];
|
|
||||||
for (const n of [p.name, ...p.targets]) {
|
|
||||||
const name = n.replace(/-/g, "_");
|
|
||||||
names.push(name, `lib${name}`);
|
|
||||||
}
|
|
||||||
return names;
|
|
||||||
}),
|
|
||||||
);
|
|
||||||
await rmExcept("./target/debug/deps", keepDeps);
|
|
||||||
}
|
|
||||||
|
|
||||||
const twoWeeks = 14 * 24 * 3600 * 1000;
|
|
||||||
|
|
||||||
async function rmExcept(dirName: string, keepPrefix: Set<string>) {
|
|
||||||
const dir = await fs.promises.opendir(dirName);
|
|
||||||
for await (const dirent of dir) {
|
|
||||||
let name = dirent.name;
|
|
||||||
const idx = name.lastIndexOf("-");
|
|
||||||
if (idx !== -1) {
|
|
||||||
name = name.slice(0, idx);
|
|
||||||
}
|
|
||||||
const fileName = path.join(dir.path, dirent.name);
|
|
||||||
const { mtime } = await fs.promises.stat(fileName);
|
|
||||||
if (!keepPrefix.has(name) || Date.now() - mtime.getTime() > twoWeeks) {
|
|
||||||
core.debug(`deleting "${fileName}"`);
|
|
||||||
if (dirent.isFile()) {
|
|
||||||
await fs.promises.unlink(fileName);
|
|
||||||
} else if (dirent.isDirectory()) {
|
|
||||||
await io.rmRF(fileName);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
async function macOsWorkaround() {
|
async function macOsWorkaround() {
|
||||||
try {
|
try {
|
||||||
// Workaround for https://github.com/actions/cache/issues/403
|
// Workaround for https://github.com/actions/cache/issues/403
|
||||||
|
89
src/utils.ts
Normal file
89
src/utils.ts
Normal file
@ -0,0 +1,89 @@
|
|||||||
|
import * as core from "@actions/core";
|
||||||
|
import * as exec from "@actions/exec";
|
||||||
|
import * as buildjetCache from "@actions/buildjet-cache";
|
||||||
|
import * as warpbuildCache from "@actions/warpbuild-cache";
|
||||||
|
import * as ghCache from "@actions/cache";
|
||||||
|
import fs from "fs";
|
||||||
|
|
||||||
|
export function reportError(e: any) {
|
||||||
|
const { commandFailed } = e;
|
||||||
|
if (commandFailed) {
|
||||||
|
core.error(`Command failed: ${commandFailed.command}`);
|
||||||
|
core.error(commandFailed.stderr);
|
||||||
|
} else {
|
||||||
|
core.error(`${e.stack}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function getCmdOutput(
|
||||||
|
cmd: string,
|
||||||
|
args: Array<string> = [],
|
||||||
|
options: exec.ExecOptions = {},
|
||||||
|
): Promise<string> {
|
||||||
|
let stdout = "";
|
||||||
|
let stderr = "";
|
||||||
|
try {
|
||||||
|
await exec.exec(cmd, args, {
|
||||||
|
silent: true,
|
||||||
|
listeners: {
|
||||||
|
stdout(data) {
|
||||||
|
stdout += data.toString();
|
||||||
|
},
|
||||||
|
stderr(data) {
|
||||||
|
stderr += data.toString();
|
||||||
|
},
|
||||||
|
},
|
||||||
|
...options,
|
||||||
|
});
|
||||||
|
} catch (e) {
|
||||||
|
(e as any).commandFailed = {
|
||||||
|
command: `${cmd} ${args.join(" ")}`,
|
||||||
|
stderr,
|
||||||
|
};
|
||||||
|
throw e;
|
||||||
|
}
|
||||||
|
return stdout;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface GhCache {
|
||||||
|
isFeatureAvailable: typeof ghCache.isFeatureAvailable;
|
||||||
|
restoreCache: typeof ghCache.restoreCache;
|
||||||
|
saveCache: (paths: string[], key: string) => Promise<string | number>;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface CacheProvider {
|
||||||
|
name: string;
|
||||||
|
cache: GhCache;
|
||||||
|
}
|
||||||
|
|
||||||
|
export function getCacheProvider(): CacheProvider {
|
||||||
|
const cacheProvider = core.getInput("cache-provider");
|
||||||
|
let cache: GhCache;
|
||||||
|
switch (cacheProvider) {
|
||||||
|
case "github":
|
||||||
|
cache = ghCache;
|
||||||
|
break;
|
||||||
|
case "buildjet":
|
||||||
|
cache = buildjetCache;
|
||||||
|
break;
|
||||||
|
case "warpbuild":
|
||||||
|
cache = warpbuildCache;
|
||||||
|
break;
|
||||||
|
default:
|
||||||
|
throw new Error(`The \`cache-provider\` \`${cacheProvider}\` is not valid.`);
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
name: cacheProvider,
|
||||||
|
cache: cache,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function exists(path: string) {
|
||||||
|
try {
|
||||||
|
await fs.promises.access(path);
|
||||||
|
return true;
|
||||||
|
} catch {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
57
src/workspace.ts
Normal file
57
src/workspace.ts
Normal file
@ -0,0 +1,57 @@
|
|||||||
|
import * as core from "@actions/core";
|
||||||
|
import path from "path";
|
||||||
|
|
||||||
|
import { getCmdOutput } from "./utils";
|
||||||
|
|
||||||
|
const SAVE_TARGETS = new Set(["lib", "proc-macro"]);
|
||||||
|
|
||||||
|
export class Workspace {
|
||||||
|
constructor(public root: string, public target: string) {}
|
||||||
|
|
||||||
|
async getPackages(filter: (p: Meta["packages"][0]) => boolean, ...extraArgs: string[]): Promise<Packages> {
|
||||||
|
let packages: Packages = [];
|
||||||
|
try {
|
||||||
|
core.debug(`collecting metadata for "${this.root}"`);
|
||||||
|
const meta: Meta = JSON.parse(
|
||||||
|
await getCmdOutput("cargo", ["metadata", "--all-features", "--format-version", "1", ...extraArgs], {
|
||||||
|
cwd: this.root,
|
||||||
|
env: { "CARGO_ENCODED_RUSTFLAGS": "" },
|
||||||
|
}),
|
||||||
|
);
|
||||||
|
core.debug(`workspace "${this.root}" has ${meta.packages.length} packages`);
|
||||||
|
for (const pkg of meta.packages.filter(filter)) {
|
||||||
|
const targets = pkg.targets.filter((t) => t.kind.some((kind) => SAVE_TARGETS.has(kind))).map((t) => t.name);
|
||||||
|
packages.push({ name: pkg.name, version: pkg.version, targets, path: path.dirname(pkg.manifest_path) });
|
||||||
|
}
|
||||||
|
} catch (err) {
|
||||||
|
console.error(err);
|
||||||
|
}
|
||||||
|
return packages;
|
||||||
|
}
|
||||||
|
|
||||||
|
public async getPackagesOutsideWorkspaceRoot(): Promise<Packages> {
|
||||||
|
return await this.getPackages((pkg) => !pkg.manifest_path.startsWith(this.root));
|
||||||
|
}
|
||||||
|
|
||||||
|
public async getWorkspaceMembers(): Promise<Packages> {
|
||||||
|
return await this.getPackages((_) => true, "--no-deps");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface PackageDefinition {
|
||||||
|
name: string;
|
||||||
|
version: string;
|
||||||
|
path: string;
|
||||||
|
targets: Array<string>;
|
||||||
|
}
|
||||||
|
|
||||||
|
export type Packages = Array<PackageDefinition>;
|
||||||
|
|
||||||
|
interface Meta {
|
||||||
|
packages: Array<{
|
||||||
|
name: string;
|
||||||
|
version: string;
|
||||||
|
manifest_path: string;
|
||||||
|
targets: Array<{ kind: Array<string>; name: string }>;
|
||||||
|
}>;
|
||||||
|
}
|
1780
tests/Cargo.lock
generated
Normal file
1780
tests/Cargo.lock
generated
Normal file
File diff suppressed because it is too large
Load Diff
17
tests/Cargo.toml
Normal file
17
tests/Cargo.toml
Normal file
@ -0,0 +1,17 @@
|
|||||||
|
[package]
|
||||||
|
publish = false
|
||||||
|
name = "rust-cache"
|
||||||
|
version = "0.1.0"
|
||||||
|
authors = ["Arpad Borsos <arpad.borsos@googlemail.com>"]
|
||||||
|
edition = "2021"
|
||||||
|
|
||||||
|
[dependencies]
|
||||||
|
reqwest = "0.12.1"
|
||||||
|
jsonpath_lib_polars_vendor = "0.0.1"
|
||||||
|
watto = { git = "https://github.com/getsentry/watto", rev = "39ccb9add289c1f23c89f40506f4a80b2f4011b9", features = ["strings"] }
|
||||||
|
|
||||||
|
[dev-dependencies]
|
||||||
|
trybuild = "1"
|
||||||
|
|
||||||
|
[target.'cfg(not(target_env = "msvc"))'.dependencies]
|
||||||
|
tikv-jemallocator = "0.6.0"
|
2
tests/rust-toolchain/.keep
Normal file
2
tests/rust-toolchain/.keep
Normal file
@ -0,0 +1,2 @@
|
|||||||
|
the `rust-toolchain` directory will be globbed,
|
||||||
|
and should not lead to any errors down the road
|
25
tests/src/main.rs
Normal file
25
tests/src/main.rs
Normal file
@ -0,0 +1,25 @@
|
|||||||
|
#[cfg(not(target_env = "msvc"))]
|
||||||
|
use tikv_jemallocator::Jemalloc;
|
||||||
|
|
||||||
|
#[cfg(not(target_env = "msvc"))]
|
||||||
|
#[global_allocator]
|
||||||
|
static GLOBAL: Jemalloc = Jemalloc;
|
||||||
|
|
||||||
|
fn main() {
|
||||||
|
println!("Hello, world!");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
fn some_fn(input: bool) -> usize {
|
||||||
|
if input {
|
||||||
|
2 + 4
|
||||||
|
} else {
|
||||||
|
3_usize.saturating_add(5)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn some_test() {
|
||||||
|
assert_eq!(some_fn(true), 6);
|
||||||
|
assert_eq!(some_fn(false), 8);
|
||||||
|
}
|
6
tests/tests/trybuild.rs
Normal file
6
tests/tests/trybuild.rs
Normal file
@ -0,0 +1,6 @@
|
|||||||
|
#[test]
|
||||||
|
fn test_trybuild() {
|
||||||
|
let t = trybuild::TestCases::new();
|
||||||
|
t.pass("tests/trybuild/empty_main.rs");
|
||||||
|
t.compile_fail("tests/trybuild/fail_to_compile.rs");
|
||||||
|
}
|
1
tests/tests/trybuild/empty_main.rs
Normal file
1
tests/tests/trybuild/empty_main.rs
Normal file
@ -0,0 +1 @@
|
|||||||
|
fn main() {}
|
3
tests/tests/trybuild/fail_to_compile.rs
Normal file
3
tests/tests/trybuild/fail_to_compile.rs
Normal file
@ -0,0 +1,3 @@
|
|||||||
|
fn main() {
|
||||||
|
"foobar".foobar();
|
||||||
|
}
|
5
tests/tests/trybuild/fail_to_compile.stderr
Normal file
5
tests/tests/trybuild/fail_to_compile.stderr
Normal file
@ -0,0 +1,5 @@
|
|||||||
|
error[E0599]: no method named `foobar` found for reference `&'static str` in the current scope
|
||||||
|
--> tests/trybuild/fail_to_compile.rs:2:14
|
||||||
|
|
|
||||||
|
2 | "foobar".foobar();
|
||||||
|
| ^^^^^^ method not found in `&'static str`
|
2
tests/wasm-workspace/.cargo/config.toml
Normal file
2
tests/wasm-workspace/.cargo/config.toml
Normal file
@ -0,0 +1,2 @@
|
|||||||
|
[build]
|
||||||
|
target = "wasm32-unknown-unknown"
|
2007
tests/wasm-workspace/Cargo.lock
generated
Normal file
2007
tests/wasm-workspace/Cargo.lock
generated
Normal file
File diff suppressed because it is too large
Load Diff
6
tests/wasm-workspace/Cargo.toml
Normal file
6
tests/wasm-workspace/Cargo.toml
Normal file
@ -0,0 +1,6 @@
|
|||||||
|
[workspace]
|
||||||
|
resolver = "2"
|
||||||
|
members = [
|
||||||
|
"crates/one",
|
||||||
|
"crates/two",
|
||||||
|
]
|
13
tests/wasm-workspace/crates/one/Cargo.toml
Normal file
13
tests/wasm-workspace/crates/one/Cargo.toml
Normal file
@ -0,0 +1,13 @@
|
|||||||
|
[package]
|
||||||
|
publish = false
|
||||||
|
name = "wasm-one"
|
||||||
|
version = "0.1.0"
|
||||||
|
edition = "2021"
|
||||||
|
|
||||||
|
[dependencies]
|
||||||
|
reqwest = "0.12"
|
||||||
|
async-std = "1"
|
||||||
|
tracing = "0.1"
|
||||||
|
tracing-futures = "0.2"
|
||||||
|
serde = "1"
|
||||||
|
serde_json = "1"
|
3
tests/wasm-workspace/crates/one/src/main.rs
Normal file
3
tests/wasm-workspace/crates/one/src/main.rs
Normal file
@ -0,0 +1,3 @@
|
|||||||
|
fn main() {
|
||||||
|
println!("Hello, world!");
|
||||||
|
}
|
8
tests/wasm-workspace/crates/two/Cargo.toml
Normal file
8
tests/wasm-workspace/crates/two/Cargo.toml
Normal file
@ -0,0 +1,8 @@
|
|||||||
|
[package]
|
||||||
|
publish = false
|
||||||
|
name = "wasm-two"
|
||||||
|
version = "0.1.0"
|
||||||
|
edition = "2021"
|
||||||
|
|
||||||
|
[dependencies]
|
||||||
|
clap = "4"
|
3
tests/wasm-workspace/crates/two/src/main.rs
Normal file
3
tests/wasm-workspace/crates/two/src/main.rs
Normal file
@ -0,0 +1,3 @@
|
|||||||
|
fn main() {
|
||||||
|
println!("Hello, world!");
|
||||||
|
}
|
@ -4,7 +4,7 @@
|
|||||||
"diagnostics": true,
|
"diagnostics": true,
|
||||||
"lib": ["esnext"],
|
"lib": ["esnext"],
|
||||||
|
|
||||||
"target": "es2017",
|
"target": "es2020",
|
||||||
|
|
||||||
"resolveJsonModule": true,
|
"resolveJsonModule": true,
|
||||||
"moduleResolution": "node",
|
"moduleResolution": "node",
|
||||||
|
Loading…
x
Reference in New Issue
Block a user