mirror of
				https://github.com/Swatinem/rust-cache.git
				synced 2025-10-31 23:43:47 +00:00 
			
		
		
		
	cause save state if no incrementa.json
This commit is contained in:
		
							parent
							
								
									0381560ff9
								
							
						
					
					
						commit
						36c8b73442
					
				
							
								
								
									
										114
									
								
								dist/restore/index.js
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										114
									
								
								dist/restore/index.js
									
									
									
									
										vendored
									
									
								
							| @ -86691,6 +86691,59 @@ class Workspace { | |||||||
|     } |     } | ||||||
| } | } | ||||||
| 
 | 
 | ||||||
|  | ;// CONCATENATED MODULE: ./src/incremental.ts
 | ||||||
|  | 
 | ||||||
|  | // import * as io from "@actions/io";
 | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | // import { CARGO_HOME } from "./config";
 | ||||||
|  | 
 | ||||||
|  | // import { Packages } from "./workspace";
 | ||||||
|  | let incremental_missing = false; | ||||||
|  | function isIncrementalMissing() { | ||||||
|  |     return incremental_missing; | ||||||
|  | } | ||||||
|  | async function restoreIncremental(targetDir) { | ||||||
|  |     lib_core.debug(`restoring incremental directory "${targetDir}"`); | ||||||
|  |     let dir = await external_fs_default().promises.opendir(targetDir); | ||||||
|  |     for await (const dirent of dir) { | ||||||
|  |         if (dirent.isDirectory()) { | ||||||
|  |             let dirName = external_path_default().join(dir.path, dirent.name); | ||||||
|  |             // is it a profile dir, or a nested target dir?
 | ||||||
|  |             let isNestedTarget = (await utils_exists(external_path_default().join(dirName, "CACHEDIR.TAG"))) || (await utils_exists(external_path_default().join(dirName, ".rustc_info.json"))); | ||||||
|  |             try { | ||||||
|  |                 if (isNestedTarget) { | ||||||
|  |                     await restoreIncremental(dirName); | ||||||
|  |                 } | ||||||
|  |                 else { | ||||||
|  |                     await restoreIncrementalProfile(dirName); | ||||||
|  |                 } | ||||||
|  |                 restoreIncrementalProfile; | ||||||
|  |             } | ||||||
|  |             catch { } | ||||||
|  |         } | ||||||
|  |     } | ||||||
|  | } | ||||||
|  | async function restoreIncrementalProfile(dirName) { | ||||||
|  |     lib_core.debug(`restoring incremental profile directory "${dirName}"`); | ||||||
|  |     const incrementalJson = external_path_default().join(dirName, "incremental-restore.json"); | ||||||
|  |     if (await utils_exists(incrementalJson)) { | ||||||
|  |         const contents = await external_fs_default().promises.readFile(incrementalJson, "utf8"); | ||||||
|  |         const { modifiedTimes } = JSON.parse(contents); | ||||||
|  |         lib_core.debug(`restoring incremental profile directory "${dirName}" with ${modifiedTimes} files`); | ||||||
|  |         // Write the mtimes to all the files in the profile directory
 | ||||||
|  |         for (const fileName of Object.keys(modifiedTimes)) { | ||||||
|  |             const mtime = modifiedTimes[fileName]; | ||||||
|  |             const filePath = external_path_default().join(dirName, fileName); | ||||||
|  |             await external_fs_default().promises.utimes(filePath, new Date(mtime), new Date(mtime)); | ||||||
|  |         } | ||||||
|  |     } | ||||||
|  |     else { | ||||||
|  |         lib_core.debug(`incremental-restore.json not found for ${dirName}`); | ||||||
|  |         incremental_missing = true; | ||||||
|  |     } | ||||||
|  | } | ||||||
|  | 
 | ||||||
| ;// CONCATENATED MODULE: ./src/config.ts
 | ;// CONCATENATED MODULE: ./src/config.ts
 | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| @ -86703,6 +86756,7 @@ class Workspace { | |||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
|  | 
 | ||||||
| const HOME = external_os_default().homedir(); | const HOME = external_os_default().homedir(); | ||||||
| const config_CARGO_HOME = process.env.CARGO_HOME || external_path_default().join(HOME, ".cargo"); | const config_CARGO_HOME = process.env.CARGO_HOME || external_path_default().join(HOME, ".cargo"); | ||||||
| const STATE_CONFIG = "RUST_CACHE_CONFIG"; | const STATE_CONFIG = "RUST_CACHE_CONFIG"; | ||||||
| @ -86911,6 +86965,13 @@ class CacheConfig { | |||||||
|         for (const dir of cacheDirectories.trim().split(/\s+/).filter(Boolean)) { |         for (const dir of cacheDirectories.trim().split(/\s+/).filter(Boolean)) { | ||||||
|             self.cachePaths.push(dir); |             self.cachePaths.push(dir); | ||||||
|         } |         } | ||||||
|  |         if (self.incremental) { | ||||||
|  |             if (cacheTargets === "true") { | ||||||
|  |                 for (const target of self.workspaces.map((ws) => ws.target)) { | ||||||
|  |                     self.cachePaths.push(external_path_default().join(target, "incremental")); | ||||||
|  |                 } | ||||||
|  |             } | ||||||
|  |         } | ||||||
|         const bins = await getCargoBins(); |         const bins = await getCargoBins(); | ||||||
|         self.cargoBins = Array.from(bins.values()); |         self.cargoBins = Array.from(bins.values()); | ||||||
|         return self; |         return self; | ||||||
| @ -86973,6 +87034,12 @@ class CacheConfig { | |||||||
|     saveState() { |     saveState() { | ||||||
|         lib_core.saveState(STATE_CONFIG, this); |         lib_core.saveState(STATE_CONFIG, this); | ||||||
|     } |     } | ||||||
|  |     isIncrementalMissing() { | ||||||
|  |         if (this.incremental) { | ||||||
|  |             return isIncrementalMissing(); | ||||||
|  |         } | ||||||
|  |         return false; | ||||||
|  |     } | ||||||
| } | } | ||||||
| /** | /** | ||||||
|  * Checks if the cache is up to date. |  * Checks if the cache is up to date. | ||||||
| @ -87340,51 +87407,6 @@ async function rmRF(dirName) { | |||||||
|     await io.rmRF(dirName); |     await io.rmRF(dirName); | ||||||
| } | } | ||||||
| 
 | 
 | ||||||
| ;// CONCATENATED MODULE: ./src/incremental.ts
 |  | ||||||
| 
 |  | ||||||
| // import * as io from "@actions/io";
 |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| // import { CARGO_HOME } from "./config";
 |  | ||||||
| 
 |  | ||||||
| // import { Packages } from "./workspace";
 |  | ||||||
| async function restoreIncremental(targetDir) { |  | ||||||
|     lib_core.debug(`restoring incremental directory "${targetDir}"`); |  | ||||||
|     let dir = await external_fs_default().promises.opendir(targetDir); |  | ||||||
|     for await (const dirent of dir) { |  | ||||||
|         if (dirent.isDirectory()) { |  | ||||||
|             let dirName = external_path_default().join(dir.path, dirent.name); |  | ||||||
|             // is it a profile dir, or a nested target dir?
 |  | ||||||
|             let isNestedTarget = (await utils_exists(external_path_default().join(dirName, "CACHEDIR.TAG"))) || (await utils_exists(external_path_default().join(dirName, ".rustc_info.json"))); |  | ||||||
|             try { |  | ||||||
|                 if (isNestedTarget) { |  | ||||||
|                     await restoreIncremental(dirName); |  | ||||||
|                 } |  | ||||||
|                 else { |  | ||||||
|                     await restoreIncrementalProfile(dirName); |  | ||||||
|                 } |  | ||||||
|                 restoreIncrementalProfile; |  | ||||||
|             } |  | ||||||
|             catch { } |  | ||||||
|         } |  | ||||||
|     } |  | ||||||
| } |  | ||||||
| async function restoreIncrementalProfile(dirName) { |  | ||||||
|     lib_core.debug(`restoring incremental profile directory "${dirName}"`); |  | ||||||
|     const incrementalJson = external_path_default().join(dirName, "incremental-restore.json"); |  | ||||||
|     if (await utils_exists(incrementalJson)) { |  | ||||||
|         const contents = await external_fs_default().promises.readFile(incrementalJson, "utf8"); |  | ||||||
|         const { modifiedTimes } = JSON.parse(contents); |  | ||||||
|         lib_core.debug(`restoring incremental profile directory "${dirName}" with ${modifiedTimes} files`); |  | ||||||
|         // Write the mtimes to all the files in the profile directory
 |  | ||||||
|         for (const fileName of Object.keys(modifiedTimes)) { |  | ||||||
|             const mtime = modifiedTimes[fileName]; |  | ||||||
|             const filePath = external_path_default().join(dirName, fileName); |  | ||||||
|             await external_fs_default().promises.utimes(filePath, new Date(mtime), new Date(mtime)); |  | ||||||
|         } |  | ||||||
|     } |  | ||||||
| } |  | ||||||
| 
 |  | ||||||
| ;// CONCATENATED MODULE: ./src/restore.ts
 | ;// CONCATENATED MODULE: ./src/restore.ts
 | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| @ -87433,7 +87455,7 @@ async function run() { | |||||||
|                     await restoreIncremental(workspace.target); |                     await restoreIncremental(workspace.target); | ||||||
|                 } |                 } | ||||||
|             } |             } | ||||||
|             if (!match) { |             if (!match || config.isIncrementalMissing()) { | ||||||
|                 // pre-clean the target directory on cache mismatch
 |                 // pre-clean the target directory on cache mismatch
 | ||||||
|                 for (const workspace of config.workspaces) { |                 for (const workspace of config.workspaces) { | ||||||
|                     try { |                     try { | ||||||
|  | |||||||
							
								
								
									
										203
									
								
								dist/save/index.js
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										203
									
								
								dist/save/index.js
									
									
									
									
										vendored
									
									
								
							| @ -85430,7 +85430,7 @@ var __webpack_exports__ = {}; | |||||||
| "use strict"; | "use strict"; | ||||||
| 
 | 
 | ||||||
| // EXTERNAL MODULE: ./node_modules/@actions/core/lib/core.js
 | // EXTERNAL MODULE: ./node_modules/@actions/core/lib/core.js
 | ||||||
| var core = __nccwpck_require__(7484); | var lib_core = __nccwpck_require__(7484); | ||||||
| // EXTERNAL MODULE: ./node_modules/@actions/exec/lib/exec.js
 | // EXTERNAL MODULE: ./node_modules/@actions/exec/lib/exec.js
 | ||||||
| var exec = __nccwpck_require__(5236); | var exec = __nccwpck_require__(5236); | ||||||
| // EXTERNAL MODULE: ./node_modules/@actions/io/lib/io.js
 | // EXTERNAL MODULE: ./node_modules/@actions/io/lib/io.js
 | ||||||
| @ -86601,11 +86601,11 @@ var cache_lib_cache = __nccwpck_require__(5116); | |||||||
| function reportError(e) { | function reportError(e) { | ||||||
|     const { commandFailed } = e; |     const { commandFailed } = e; | ||||||
|     if (commandFailed) { |     if (commandFailed) { | ||||||
|         core.error(`Command failed: ${commandFailed.command}`); |         lib_core.error(`Command failed: ${commandFailed.command}`); | ||||||
|         core.error(commandFailed.stderr); |         lib_core.error(commandFailed.stderr); | ||||||
|     } |     } | ||||||
|     else { |     else { | ||||||
|         core.error(`${e.stack}`); |         lib_core.error(`${e.stack}`); | ||||||
|     } |     } | ||||||
| } | } | ||||||
| async function getCmdOutput(cmd, args = [], options = {}) { | async function getCmdOutput(cmd, args = [], options = {}) { | ||||||
| @ -86635,7 +86635,7 @@ async function getCmdOutput(cmd, args = [], options = {}) { | |||||||
|     return stdout; |     return stdout; | ||||||
| } | } | ||||||
| function getCacheProvider() { | function getCacheProvider() { | ||||||
|     const cacheProvider = core.getInput("cache-provider"); |     const cacheProvider = lib_core.getInput("cache-provider"); | ||||||
|     const cache = cacheProvider === "github" ? cache_lib_cache : cacheProvider === "buildjet" ? lib_cache : undefined; |     const cache = cacheProvider === "github" ? cache_lib_cache : cacheProvider === "buildjet" ? lib_cache : undefined; | ||||||
|     if (!cache) { |     if (!cache) { | ||||||
|         throw new Error(`The \`cache-provider\` \`{cacheProvider}\` is not valid.`); |         throw new Error(`The \`cache-provider\` \`{cacheProvider}\` is not valid.`); | ||||||
| @ -86645,7 +86645,7 @@ function getCacheProvider() { | |||||||
|         cache: cache, |         cache: cache, | ||||||
|     }; |     }; | ||||||
| } | } | ||||||
| async function exists(path) { | async function utils_exists(path) { | ||||||
|     try { |     try { | ||||||
|         await external_fs_default().promises.access(path); |         await external_fs_default().promises.access(path); | ||||||
|         return true; |         return true; | ||||||
| @ -86668,11 +86668,11 @@ class Workspace { | |||||||
|     async getPackages(filter, ...extraArgs) { |     async getPackages(filter, ...extraArgs) { | ||||||
|         let packages = []; |         let packages = []; | ||||||
|         try { |         try { | ||||||
|             core.debug(`collecting metadata for "${this.root}"`); |             lib_core.debug(`collecting metadata for "${this.root}"`); | ||||||
|             const meta = JSON.parse(await getCmdOutput("cargo", ["metadata", "--all-features", "--format-version", "1", ...extraArgs], { |             const meta = JSON.parse(await getCmdOutput("cargo", ["metadata", "--all-features", "--format-version", "1", ...extraArgs], { | ||||||
|                 cwd: this.root, |                 cwd: this.root, | ||||||
|             })); |             })); | ||||||
|             core.debug(`workspace "${this.root}" has ${meta.packages.length} packages`); |             lib_core.debug(`workspace "${this.root}" has ${meta.packages.length} packages`); | ||||||
|             for (const pkg of meta.packages.filter(filter)) { |             for (const pkg of meta.packages.filter(filter)) { | ||||||
|                 const targets = pkg.targets.filter((t) => t.kind.some((kind) => SAVE_TARGETS.has(kind))).map((t) => t.name); |                 const targets = pkg.targets.filter((t) => t.kind.some((kind) => SAVE_TARGETS.has(kind))).map((t) => t.name); | ||||||
|                 packages.push({ name: pkg.name, version: pkg.version, targets, path: external_path_default().dirname(pkg.manifest_path) }); |                 packages.push({ name: pkg.name, version: pkg.version, targets, path: external_path_default().dirname(pkg.manifest_path) }); | ||||||
| @ -86691,6 +86691,59 @@ class Workspace { | |||||||
|     } |     } | ||||||
| } | } | ||||||
| 
 | 
 | ||||||
|  | ;// CONCATENATED MODULE: ./src/incremental.ts
 | ||||||
|  | 
 | ||||||
|  | // import * as io from "@actions/io";
 | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | // import { CARGO_HOME } from "./config";
 | ||||||
|  | 
 | ||||||
|  | // import { Packages } from "./workspace";
 | ||||||
|  | let incremental_missing = false; | ||||||
|  | function isIncrementalMissing() { | ||||||
|  |     return incremental_missing; | ||||||
|  | } | ||||||
|  | async function restoreIncremental(targetDir) { | ||||||
|  |     core.debug(`restoring incremental directory "${targetDir}"`); | ||||||
|  |     let dir = await fs.promises.opendir(targetDir); | ||||||
|  |     for await (const dirent of dir) { | ||||||
|  |         if (dirent.isDirectory()) { | ||||||
|  |             let dirName = path.join(dir.path, dirent.name); | ||||||
|  |             // is it a profile dir, or a nested target dir?
 | ||||||
|  |             let isNestedTarget = (await exists(path.join(dirName, "CACHEDIR.TAG"))) || (await exists(path.join(dirName, ".rustc_info.json"))); | ||||||
|  |             try { | ||||||
|  |                 if (isNestedTarget) { | ||||||
|  |                     await restoreIncremental(dirName); | ||||||
|  |                 } | ||||||
|  |                 else { | ||||||
|  |                     await restoreIncrementalProfile(dirName); | ||||||
|  |                 } | ||||||
|  |                 restoreIncrementalProfile; | ||||||
|  |             } | ||||||
|  |             catch { } | ||||||
|  |         } | ||||||
|  |     } | ||||||
|  | } | ||||||
|  | async function restoreIncrementalProfile(dirName) { | ||||||
|  |     core.debug(`restoring incremental profile directory "${dirName}"`); | ||||||
|  |     const incrementalJson = path.join(dirName, "incremental-restore.json"); | ||||||
|  |     if (await exists(incrementalJson)) { | ||||||
|  |         const contents = await fs.promises.readFile(incrementalJson, "utf8"); | ||||||
|  |         const { modifiedTimes } = JSON.parse(contents); | ||||||
|  |         core.debug(`restoring incremental profile directory "${dirName}" with ${modifiedTimes} files`); | ||||||
|  |         // Write the mtimes to all the files in the profile directory
 | ||||||
|  |         for (const fileName of Object.keys(modifiedTimes)) { | ||||||
|  |             const mtime = modifiedTimes[fileName]; | ||||||
|  |             const filePath = path.join(dirName, fileName); | ||||||
|  |             await fs.promises.utimes(filePath, new Date(mtime), new Date(mtime)); | ||||||
|  |         } | ||||||
|  |     } | ||||||
|  |     else { | ||||||
|  |         core.debug(`incremental-restore.json not found for ${dirName}`); | ||||||
|  |         incremental_missing = true; | ||||||
|  |     } | ||||||
|  | } | ||||||
|  | 
 | ||||||
| ;// CONCATENATED MODULE: ./src/config.ts
 | ;// CONCATENATED MODULE: ./src/config.ts
 | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| @ -86703,6 +86756,7 @@ class Workspace { | |||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
|  | 
 | ||||||
| const HOME = external_os_default().homedir(); | const HOME = external_os_default().homedir(); | ||||||
| const CARGO_HOME = process.env.CARGO_HOME || external_path_default().join(HOME, ".cargo"); | const CARGO_HOME = process.env.CARGO_HOME || external_path_default().join(HOME, ".cargo"); | ||||||
| const STATE_CONFIG = "RUST_CACHE_CONFIG"; | const STATE_CONFIG = "RUST_CACHE_CONFIG"; | ||||||
| @ -86742,13 +86796,13 @@ class CacheConfig { | |||||||
|         // Construct key prefix:
 |         // Construct key prefix:
 | ||||||
|         // This uses either the `shared-key` input,
 |         // This uses either the `shared-key` input,
 | ||||||
|         // or the `key` input combined with the `job` key.
 |         // or the `key` input combined with the `job` key.
 | ||||||
|         let key = core.getInput("prefix-key") || "v0-rust"; |         let key = lib_core.getInput("prefix-key") || "v0-rust"; | ||||||
|         const sharedKey = core.getInput("shared-key"); |         const sharedKey = lib_core.getInput("shared-key"); | ||||||
|         if (sharedKey) { |         if (sharedKey) { | ||||||
|             key += `-${sharedKey}`; |             key += `-${sharedKey}`; | ||||||
|         } |         } | ||||||
|         else { |         else { | ||||||
|             const inputKey = core.getInput("key"); |             const inputKey = lib_core.getInput("key"); | ||||||
|             if (inputKey) { |             if (inputKey) { | ||||||
|                 key += `-${inputKey}`; |                 key += `-${inputKey}`; | ||||||
|             } |             } | ||||||
| @ -86776,7 +86830,7 @@ class CacheConfig { | |||||||
|         self.keyRust = keyRust; |         self.keyRust = keyRust; | ||||||
|         // these prefixes should cover most of the compiler / rust / cargo keys
 |         // these prefixes should cover most of the compiler / rust / cargo keys
 | ||||||
|         const envPrefixes = ["CARGO", "CC", "CFLAGS", "CXX", "CMAKE", "RUST"]; |         const envPrefixes = ["CARGO", "CC", "CFLAGS", "CXX", "CMAKE", "RUST"]; | ||||||
|         envPrefixes.push(...core.getInput("env-vars").split(/\s+/).filter(Boolean)); |         envPrefixes.push(...lib_core.getInput("env-vars").split(/\s+/).filter(Boolean)); | ||||||
|         // sort the available env vars so we have a more stable hash
 |         // sort the available env vars so we have a more stable hash
 | ||||||
|         const keyEnvs = []; |         const keyEnvs = []; | ||||||
|         const envKeys = Object.keys(process.env); |         const envKeys = Object.keys(process.env); | ||||||
| @ -86790,18 +86844,18 @@ class CacheConfig { | |||||||
|         } |         } | ||||||
|         self.keyEnvs = keyEnvs; |         self.keyEnvs = keyEnvs; | ||||||
|         // Make sure we consider incremental builds
 |         // Make sure we consider incremental builds
 | ||||||
|         self.incremental = core.getInput("incremental").toLowerCase() == "true"; |         self.incremental = lib_core.getInput("incremental").toLowerCase() == "true"; | ||||||
|         hasher.update(`incremental=${self.incremental}`); |         hasher.update(`incremental=${self.incremental}`); | ||||||
|         key += `-${digest(hasher)}`; |         key += `-${digest(hasher)}`; | ||||||
|         self.restoreKey = key; |         self.restoreKey = key; | ||||||
|         // Construct the lockfiles portion of the key:
 |         // Construct the lockfiles portion of the key:
 | ||||||
|         // This considers all the files found via globbing for various manifests
 |         // This considers all the files found via globbing for various manifests
 | ||||||
|         // and lockfiles.
 |         // and lockfiles.
 | ||||||
|         self.cacheBin = core.getInput("cache-bin").toLowerCase() == "true"; |         self.cacheBin = lib_core.getInput("cache-bin").toLowerCase() == "true"; | ||||||
|         // Constructs the workspace config and paths to restore:
 |         // Constructs the workspace config and paths to restore:
 | ||||||
|         // The workspaces are given using a `$workspace -> $target` syntax.
 |         // The workspaces are given using a `$workspace -> $target` syntax.
 | ||||||
|         const workspaces = []; |         const workspaces = []; | ||||||
|         const workspacesInput = core.getInput("workspaces") || "."; |         const workspacesInput = lib_core.getInput("workspaces") || "."; | ||||||
|         for (const workspace of workspacesInput.trim().split("\n")) { |         for (const workspace of workspacesInput.trim().split("\n")) { | ||||||
|             let [root, target = "target"] = workspace.split("->").map((s) => s.trim()); |             let [root, target = "target"] = workspace.split("->").map((s) => s.trim()); | ||||||
|             root = external_path_default().resolve(root); |             root = external_path_default().resolve(root); | ||||||
| @ -86854,19 +86908,19 @@ class CacheConfig { | |||||||
|                 } |                 } | ||||||
|                 catch (e) { |                 catch (e) { | ||||||
|                     // Fallback to caching them as regular file
 |                     // Fallback to caching them as regular file
 | ||||||
|                     core.warning(`Error parsing Cargo.toml manifest, fallback to caching entire file: ${e}`); |                     lib_core.warning(`Error parsing Cargo.toml manifest, fallback to caching entire file: ${e}`); | ||||||
|                     keyFiles.push(cargo_manifest); |                     keyFiles.push(cargo_manifest); | ||||||
|                 } |                 } | ||||||
|             } |             } | ||||||
|             const cargo_lock = external_path_default().join(workspace.root, "Cargo.lock"); |             const cargo_lock = external_path_default().join(workspace.root, "Cargo.lock"); | ||||||
|             if (await exists(cargo_lock)) { |             if (await utils_exists(cargo_lock)) { | ||||||
|                 try { |                 try { | ||||||
|                     const content = await promises_default().readFile(cargo_lock, { encoding: "utf8" }); |                     const content = await promises_default().readFile(cargo_lock, { encoding: "utf8" }); | ||||||
|                     const parsed = parse(content); |                     const parsed = parse(content); | ||||||
|                     if ((parsed.version !== 3 && parsed.version !== 4) || !("package" in parsed)) { |                     if ((parsed.version !== 3 && parsed.version !== 4) || !("package" in parsed)) { | ||||||
|                         // Fallback to caching them as regular file since this action
 |                         // Fallback to caching them as regular file since this action
 | ||||||
|                         // can only handle Cargo.lock format version 3
 |                         // can only handle Cargo.lock format version 3
 | ||||||
|                         core.warning("Unsupported Cargo.lock format, fallback to caching entire file"); |                         lib_core.warning("Unsupported Cargo.lock format, fallback to caching entire file"); | ||||||
|                         keyFiles.push(cargo_lock); |                         keyFiles.push(cargo_lock); | ||||||
|                         continue; |                         continue; | ||||||
|                     } |                     } | ||||||
| @ -86878,7 +86932,7 @@ class CacheConfig { | |||||||
|                 } |                 } | ||||||
|                 catch (e) { |                 catch (e) { | ||||||
|                     // Fallback to caching them as regular file
 |                     // Fallback to caching them as regular file
 | ||||||
|                     core.warning(`Error parsing Cargo.lock manifest, fallback to caching entire file: ${e}`); |                     lib_core.warning(`Error parsing Cargo.lock manifest, fallback to caching entire file: ${e}`); | ||||||
|                     keyFiles.push(cargo_lock); |                     keyFiles.push(cargo_lock); | ||||||
|                 } |                 } | ||||||
|             } |             } | ||||||
| @ -86903,14 +86957,21 @@ class CacheConfig { | |||||||
|                 ...self.cachePaths, |                 ...self.cachePaths, | ||||||
|             ]; |             ]; | ||||||
|         } |         } | ||||||
|         const cacheTargets = core.getInput("cache-targets").toLowerCase() || "true"; |         const cacheTargets = lib_core.getInput("cache-targets").toLowerCase() || "true"; | ||||||
|         if (cacheTargets === "true") { |         if (cacheTargets === "true") { | ||||||
|             self.cachePaths.push(...workspaces.map((ws) => ws.target)); |             self.cachePaths.push(...workspaces.map((ws) => ws.target)); | ||||||
|         } |         } | ||||||
|         const cacheDirectories = core.getInput("cache-directories"); |         const cacheDirectories = lib_core.getInput("cache-directories"); | ||||||
|         for (const dir of cacheDirectories.trim().split(/\s+/).filter(Boolean)) { |         for (const dir of cacheDirectories.trim().split(/\s+/).filter(Boolean)) { | ||||||
|             self.cachePaths.push(dir); |             self.cachePaths.push(dir); | ||||||
|         } |         } | ||||||
|  |         if (self.incremental) { | ||||||
|  |             if (cacheTargets === "true") { | ||||||
|  |                 for (const target of self.workspaces.map((ws) => ws.target)) { | ||||||
|  |                     self.cachePaths.push(external_path_default().join(target, "incremental")); | ||||||
|  |                 } | ||||||
|  |             } | ||||||
|  |         } | ||||||
|         const bins = await getCargoBins(); |         const bins = await getCargoBins(); | ||||||
|         self.cargoBins = Array.from(bins.values()); |         self.cargoBins = Array.from(bins.values()); | ||||||
|         return self; |         return self; | ||||||
| @ -86924,7 +86985,7 @@ class CacheConfig { | |||||||
|      * @see {@link CacheConfig#new} |      * @see {@link CacheConfig#new} | ||||||
|      */ |      */ | ||||||
|     static fromState() { |     static fromState() { | ||||||
|         const source = core.getState(STATE_CONFIG); |         const source = lib_core.getState(STATE_CONFIG); | ||||||
|         if (!source) { |         if (!source) { | ||||||
|             throw new Error("Cache configuration not found in state"); |             throw new Error("Cache configuration not found in state"); | ||||||
|         } |         } | ||||||
| @ -86937,41 +86998,47 @@ class CacheConfig { | |||||||
|      * Prints the configuration to the action log. |      * Prints the configuration to the action log. | ||||||
|      */ |      */ | ||||||
|     printInfo(cacheProvider) { |     printInfo(cacheProvider) { | ||||||
|         core.startGroup("Cache Configuration"); |         lib_core.startGroup("Cache Configuration"); | ||||||
|         core.info(`Cache Provider:`); |         lib_core.info(`Cache Provider:`); | ||||||
|         core.info(`    ${cacheProvider.name}`); |         lib_core.info(`    ${cacheProvider.name}`); | ||||||
|         core.info(`Workspaces:`); |         lib_core.info(`Workspaces:`); | ||||||
|         for (const workspace of this.workspaces) { |         for (const workspace of this.workspaces) { | ||||||
|             core.info(`    ${workspace.root}`); |             lib_core.info(`    ${workspace.root}`); | ||||||
|         } |         } | ||||||
|         core.info(`Cache Paths:`); |         lib_core.info(`Cache Paths:`); | ||||||
|         for (const path of this.cachePaths) { |         for (const path of this.cachePaths) { | ||||||
|             core.info(`    ${path}`); |             lib_core.info(`    ${path}`); | ||||||
|         } |         } | ||||||
|         core.info(`Restore Key:`); |         lib_core.info(`Restore Key:`); | ||||||
|         core.info(`    ${this.restoreKey}`); |         lib_core.info(`    ${this.restoreKey}`); | ||||||
|         core.info(`Cache Key:`); |         lib_core.info(`Cache Key:`); | ||||||
|         core.info(`    ${this.cacheKey}`); |         lib_core.info(`    ${this.cacheKey}`); | ||||||
|         core.info(`.. Prefix:`); |         lib_core.info(`.. Prefix:`); | ||||||
|         core.info(`  - ${this.keyPrefix}`); |         lib_core.info(`  - ${this.keyPrefix}`); | ||||||
|         core.info(`.. Environment considered:`); |         lib_core.info(`.. Environment considered:`); | ||||||
|         core.info(`  - Rust Version: ${this.keyRust}`); |         lib_core.info(`  - Rust Version: ${this.keyRust}`); | ||||||
|         for (const env of this.keyEnvs) { |         for (const env of this.keyEnvs) { | ||||||
|             core.info(`  - ${env}`); |             lib_core.info(`  - ${env}`); | ||||||
|         } |         } | ||||||
|         core.info(`.. Lockfiles considered:`); |         lib_core.info(`.. Lockfiles considered:`); | ||||||
|         for (const file of this.keyFiles) { |         for (const file of this.keyFiles) { | ||||||
|             core.info(`  - ${file}`); |             lib_core.info(`  - ${file}`); | ||||||
|         } |         } | ||||||
|         core.info(`.. Incremental: ${this.incremental}`); |         lib_core.info(`.. Incremental: ${this.incremental}`); | ||||||
|         core.endGroup(); |         lib_core.endGroup(); | ||||||
|     } |     } | ||||||
|     /** |     /** | ||||||
|      * Saves the configuration to the state store. |      * Saves the configuration to the state store. | ||||||
|      * This is used to restore the configuration in the post action. |      * This is used to restore the configuration in the post action. | ||||||
|      */ |      */ | ||||||
|     saveState() { |     saveState() { | ||||||
|         core.saveState(STATE_CONFIG, this); |         lib_core.saveState(STATE_CONFIG, this); | ||||||
|  |     } | ||||||
|  |     isIncrementalMissing() { | ||||||
|  |         if (this.incremental) { | ||||||
|  |             return isIncrementalMissing(); | ||||||
|  |         } | ||||||
|  |         return false; | ||||||
|     } |     } | ||||||
| } | } | ||||||
| /** | /** | ||||||
| @ -86980,7 +87047,7 @@ class CacheConfig { | |||||||
|  * @returns `true` if the cache is up to date, `false` otherwise. |  * @returns `true` if the cache is up to date, `false` otherwise. | ||||||
|  */ |  */ | ||||||
| function isCacheUpToDate() { | function isCacheUpToDate() { | ||||||
|     return core.getState(STATE_CONFIG) === ""; |     return lib_core.getState(STATE_CONFIG) === ""; | ||||||
| } | } | ||||||
| /** | /** | ||||||
|  * Returns a hex digest of the given hasher truncated to `HASH_LENGTH`. |  * Returns a hex digest of the given hasher truncated to `HASH_LENGTH`. | ||||||
| @ -87034,14 +87101,14 @@ function sort_and_uniq(a) { | |||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| async function cleanTargetDir(targetDir, packages, checkTimestamp, incremental) { | async function cleanTargetDir(targetDir, packages, checkTimestamp, incremental) { | ||||||
|     core.debug(`cleaning target directory "${targetDir}"`); |     lib_core.debug(`cleaning target directory "${targetDir}"`); | ||||||
|     // remove all *files* from the profile directory
 |     // remove all *files* from the profile directory
 | ||||||
|     let dir = await external_fs_default().promises.opendir(targetDir); |     let dir = await external_fs_default().promises.opendir(targetDir); | ||||||
|     for await (const dirent of dir) { |     for await (const dirent of dir) { | ||||||
|         if (dirent.isDirectory()) { |         if (dirent.isDirectory()) { | ||||||
|             let dirName = external_path_default().join(dir.path, dirent.name); |             let dirName = external_path_default().join(dir.path, dirent.name); | ||||||
|             // is it a profile dir, or a nested target dir?
 |             // is it a profile dir, or a nested target dir?
 | ||||||
|             let isNestedTarget = (await exists(external_path_default().join(dirName, "CACHEDIR.TAG"))) || (await exists(external_path_default().join(dirName, ".rustc_info.json"))); |             let isNestedTarget = (await utils_exists(external_path_default().join(dirName, "CACHEDIR.TAG"))) || (await utils_exists(external_path_default().join(dirName, ".rustc_info.json"))); | ||||||
|             try { |             try { | ||||||
|                 if (isNestedTarget) { |                 if (isNestedTarget) { | ||||||
|                     await cleanTargetDir(dirName, packages, checkTimestamp, incremental); |                     await cleanTargetDir(dirName, packages, checkTimestamp, incremental); | ||||||
| @ -87058,7 +87125,7 @@ async function cleanTargetDir(targetDir, packages, checkTimestamp, incremental) | |||||||
|     } |     } | ||||||
| } | } | ||||||
| async function cleanProfileTarget(profileDir, packages, checkTimestamp, incremental) { | async function cleanProfileTarget(profileDir, packages, checkTimestamp, incremental) { | ||||||
|     core.debug(`cleaning profile directory "${profileDir}"`); |     lib_core.debug(`cleaning profile directory "${profileDir}"`); | ||||||
|     // Quite a few testing utility crates store compilation artifacts as nested
 |     // Quite a few testing utility crates store compilation artifacts as nested
 | ||||||
|     // workspaces under `target/tests`. Notably, `target/tests/target` and
 |     // workspaces under `target/tests`. Notably, `target/tests/target` and
 | ||||||
|     // `target/tests/trybuild`.
 |     // `target/tests/trybuild`.
 | ||||||
| @ -87100,7 +87167,7 @@ async function cleanProfileTarget(profileDir, packages, checkTimestamp, incremen | |||||||
|         }; |         }; | ||||||
|         await fillModifiedTimes(incrementalDir); |         await fillModifiedTimes(incrementalDir); | ||||||
|         // Write the modified times to the incremental folder
 |         // Write the modified times to the incremental folder
 | ||||||
|         core.debug(`writing incremental-restore.json for ${incrementalDir} with ${modifiedTimes} files`); |         lib_core.debug(`writing incremental-restore.json for ${incrementalDir} with ${modifiedTimes} files`); | ||||||
|         const contents = JSON.stringify({ modifiedTimes }); |         const contents = JSON.stringify({ modifiedTimes }); | ||||||
|         await external_fs_default().promises.writeFile(external_path_default().join(incrementalDir, "incremental-restore.json"), contents); |         await external_fs_default().promises.writeFile(external_path_default().join(incrementalDir, "incremental-restore.json"), contents); | ||||||
|     } |     } | ||||||
| @ -87153,7 +87220,7 @@ async function cleanRegistry(packages, crates = true) { | |||||||
|     // remove `.cargo/credentials.toml`
 |     // remove `.cargo/credentials.toml`
 | ||||||
|     try { |     try { | ||||||
|         const credentials = external_path_default().join(CARGO_HOME, ".cargo", "credentials.toml"); |         const credentials = external_path_default().join(CARGO_HOME, ".cargo", "credentials.toml"); | ||||||
|         core.debug(`deleting "${credentials}"`); |         lib_core.debug(`deleting "${credentials}"`); | ||||||
|         await external_fs_default().promises.unlink(credentials); |         await external_fs_default().promises.unlink(credentials); | ||||||
|     } |     } | ||||||
|     catch { } |     catch { } | ||||||
| @ -87166,7 +87233,7 @@ async function cleanRegistry(packages, crates = true) { | |||||||
|             // or `.cargo/registry/index/index.crates.io-e139d0d48fed7772`
 |             // or `.cargo/registry/index/index.crates.io-e139d0d48fed7772`
 | ||||||
|             const dirPath = external_path_default().join(indexDir.path, dirent.name); |             const dirPath = external_path_default().join(indexDir.path, dirent.name); | ||||||
|             // for a git registry, we can remove `.cache`, as cargo will recreate it from git
 |             // for a git registry, we can remove `.cache`, as cargo will recreate it from git
 | ||||||
|             if (await exists(external_path_default().join(dirPath, ".git"))) { |             if (await utils_exists(external_path_default().join(dirPath, ".git"))) { | ||||||
|                 await rmRF(external_path_default().join(dirPath, ".cache")); |                 await rmRF(external_path_default().join(dirPath, ".cache")); | ||||||
|             } |             } | ||||||
|             else { |             else { | ||||||
| @ -87175,7 +87242,7 @@ async function cleanRegistry(packages, crates = true) { | |||||||
|         } |         } | ||||||
|     } |     } | ||||||
|     if (!crates) { |     if (!crates) { | ||||||
|         core.debug("skipping registry cache and src cleanup"); |         lib_core.debug("skipping registry cache and src cleanup"); | ||||||
|         return; |         return; | ||||||
|     } |     } | ||||||
|     // `.cargo/registry/src`
 |     // `.cargo/registry/src`
 | ||||||
| @ -87325,7 +87392,7 @@ async function rmExcept(dirName, keepPrefix, checkTimestamp = false) { | |||||||
| async function rm(parent, dirent) { | async function rm(parent, dirent) { | ||||||
|     try { |     try { | ||||||
|         const fileName = external_path_default().join(parent, dirent.name); |         const fileName = external_path_default().join(parent, dirent.name); | ||||||
|         core.debug(`deleting "${fileName}"`); |         lib_core.debug(`deleting "${fileName}"`); | ||||||
|         if (dirent.isFile()) { |         if (dirent.isFile()) { | ||||||
|             await external_fs_default().promises.unlink(fileName); |             await external_fs_default().promises.unlink(fileName); | ||||||
|         } |         } | ||||||
| @ -87336,7 +87403,7 @@ async function rm(parent, dirent) { | |||||||
|     catch { } |     catch { } | ||||||
| } | } | ||||||
| async function rmRF(dirName) { | async function rmRF(dirName) { | ||||||
|     core.debug(`deleting "${dirName}"`); |     lib_core.debug(`deleting "${dirName}"`); | ||||||
|     await io.rmRF(dirName); |     await io.rmRF(dirName); | ||||||
| } | } | ||||||
| 
 | 
 | ||||||
| @ -87347,25 +87414,25 @@ async function rmRF(dirName) { | |||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| process.on("uncaughtException", (e) => { | process.on("uncaughtException", (e) => { | ||||||
|     core.error(e.message); |     lib_core.error(e.message); | ||||||
|     if (e.stack) { |     if (e.stack) { | ||||||
|         core.error(e.stack); |         lib_core.error(e.stack); | ||||||
|     } |     } | ||||||
| }); | }); | ||||||
| async function run() { | async function run() { | ||||||
|     const cacheProvider = getCacheProvider(); |     const cacheProvider = getCacheProvider(); | ||||||
|     const save = core.getInput("save-if").toLowerCase() || "true"; |     const save = lib_core.getInput("save-if").toLowerCase() || "true"; | ||||||
|     if (!(cacheProvider.cache.isFeatureAvailable() && save === "true")) { |     if (!(cacheProvider.cache.isFeatureAvailable() && save === "true")) { | ||||||
|         return; |         return; | ||||||
|     } |     } | ||||||
|     try { |     try { | ||||||
|         if (isCacheUpToDate()) { |         if (isCacheUpToDate()) { | ||||||
|             core.info(`Cache up-to-date.`); |             lib_core.info(`Cache up-to-date.`); | ||||||
|             return; |             return; | ||||||
|         } |         } | ||||||
|         const config = CacheConfig.fromState(); |         const config = CacheConfig.fromState(); | ||||||
|         config.printInfo(cacheProvider); |         config.printInfo(cacheProvider); | ||||||
|         core.info(""); |         lib_core.info(""); | ||||||
|         // TODO: remove this once https://github.com/actions/toolkit/pull/553 lands
 |         // TODO: remove this once https://github.com/actions/toolkit/pull/553 lands
 | ||||||
|         if (process.env["RUNNER_OS"] == "macOS") { |         if (process.env["RUNNER_OS"] == "macOS") { | ||||||
|             await macOsWorkaround(); |             await macOsWorkaround(); | ||||||
| @ -87375,38 +87442,38 @@ async function run() { | |||||||
|             const packages = await workspace.getPackagesOutsideWorkspaceRoot(); |             const packages = await workspace.getPackagesOutsideWorkspaceRoot(); | ||||||
|             allPackages.push(...packages); |             allPackages.push(...packages); | ||||||
|             try { |             try { | ||||||
|                 core.info(`... Cleaning ${workspace.target} ...`); |                 lib_core.info(`... Cleaning ${workspace.target} ...`); | ||||||
|                 await cleanTargetDir(workspace.target, packages, false, config.incremental); |                 await cleanTargetDir(workspace.target, packages, false, config.incremental); | ||||||
|             } |             } | ||||||
|             catch (e) { |             catch (e) { | ||||||
|                 core.debug(`${e.stack}`); |                 lib_core.debug(`${e.stack}`); | ||||||
|             } |             } | ||||||
|         } |         } | ||||||
|         try { |         try { | ||||||
|             const crates = core.getInput("cache-all-crates").toLowerCase() || "false"; |             const crates = lib_core.getInput("cache-all-crates").toLowerCase() || "false"; | ||||||
|             core.info(`... Cleaning cargo registry (cache-all-crates: ${crates}) ...`); |             lib_core.info(`... Cleaning cargo registry (cache-all-crates: ${crates}) ...`); | ||||||
|             await cleanRegistry(allPackages, crates !== "true"); |             await cleanRegistry(allPackages, crates !== "true"); | ||||||
|         } |         } | ||||||
|         catch (e) { |         catch (e) { | ||||||
|             core.debug(`${e.stack}`); |             lib_core.debug(`${e.stack}`); | ||||||
|         } |         } | ||||||
|         if (config.cacheBin) { |         if (config.cacheBin) { | ||||||
|             try { |             try { | ||||||
|                 core.info(`... Cleaning cargo/bin ...`); |                 lib_core.info(`... Cleaning cargo/bin ...`); | ||||||
|                 await cleanBin(config.cargoBins); |                 await cleanBin(config.cargoBins); | ||||||
|             } |             } | ||||||
|             catch (e) { |             catch (e) { | ||||||
|                 core.debug(`${e.stack}`); |                 lib_core.debug(`${e.stack}`); | ||||||
|             } |             } | ||||||
|         } |         } | ||||||
|         try { |         try { | ||||||
|             core.info(`... Cleaning cargo git cache ...`); |             lib_core.info(`... Cleaning cargo git cache ...`); | ||||||
|             await cleanGit(allPackages); |             await cleanGit(allPackages); | ||||||
|         } |         } | ||||||
|         catch (e) { |         catch (e) { | ||||||
|             core.debug(`${e.stack}`); |             lib_core.debug(`${e.stack}`); | ||||||
|         } |         } | ||||||
|         core.info(`... Saving cache ...`); |         lib_core.info(`... Saving cache ...`); | ||||||
|         // Pass a copy of cachePaths to avoid mutating the original array as reported by:
 |         // Pass a copy of cachePaths to avoid mutating the original array as reported by:
 | ||||||
|         // https://github.com/actions/toolkit/pull/1378
 |         // https://github.com/actions/toolkit/pull/1378
 | ||||||
|         // TODO: remove this once the underlying bug is fixed.
 |         // TODO: remove this once the underlying bug is fixed.
 | ||||||
|  | |||||||
| @ -10,6 +10,7 @@ import * as toml from "smol-toml"; | |||||||
| import { getCargoBins } from "./cleanup"; | import { getCargoBins } from "./cleanup"; | ||||||
| import { CacheProvider, exists, getCmdOutput } from "./utils"; | import { CacheProvider, exists, getCmdOutput } from "./utils"; | ||||||
| import { Workspace } from "./workspace"; | import { Workspace } from "./workspace"; | ||||||
|  | import { isIncrementalMissing } from "./incremental"; | ||||||
| 
 | 
 | ||||||
| const HOME = os.homedir(); | const HOME = os.homedir(); | ||||||
| export const CARGO_HOME = process.env.CARGO_HOME || path.join(HOME, ".cargo"); | export const CARGO_HOME = process.env.CARGO_HOME || path.join(HOME, ".cargo"); | ||||||
| @ -62,8 +63,6 @@ export class CacheConfig { | |||||||
| 
 | 
 | ||||||
|     let key = core.getInput("prefix-key") || "v0-rust"; |     let key = core.getInput("prefix-key") || "v0-rust"; | ||||||
| 
 | 
 | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
|     const sharedKey = core.getInput("shared-key"); |     const sharedKey = core.getInput("shared-key"); | ||||||
|     if (sharedKey) { |     if (sharedKey) { | ||||||
|       key += `-${sharedKey}`; |       key += `-${sharedKey}`; | ||||||
| @ -120,7 +119,6 @@ export class CacheConfig { | |||||||
| 
 | 
 | ||||||
|     self.keyEnvs = keyEnvs; |     self.keyEnvs = keyEnvs; | ||||||
| 
 | 
 | ||||||
| 
 |  | ||||||
|     // Make sure we consider incremental builds
 |     // Make sure we consider incremental builds
 | ||||||
|     self.incremental = core.getInput("incremental").toLowerCase() == "true"; |     self.incremental = core.getInput("incremental").toLowerCase() == "true"; | ||||||
|     hasher.update(`incremental=${self.incremental}`); |     hasher.update(`incremental=${self.incremental}`); | ||||||
| @ -275,6 +273,14 @@ export class CacheConfig { | |||||||
|       self.cachePaths.push(dir); |       self.cachePaths.push(dir); | ||||||
|     } |     } | ||||||
| 
 | 
 | ||||||
|  |     if (self.incremental) { | ||||||
|  |       if (cacheTargets === "true") { | ||||||
|  |         for (const target of self.workspaces.map((ws) => ws.target)) { | ||||||
|  |           self.cachePaths.push(path.join(target, "incremental")); | ||||||
|  |         } | ||||||
|  |       } | ||||||
|  |     } | ||||||
|  | 
 | ||||||
|     const bins = await getCargoBins(); |     const bins = await getCargoBins(); | ||||||
|     self.cargoBins = Array.from(bins.values()); |     self.cargoBins = Array.from(bins.values()); | ||||||
| 
 | 
 | ||||||
| @ -343,6 +349,14 @@ export class CacheConfig { | |||||||
|   saveState() { |   saveState() { | ||||||
|     core.saveState(STATE_CONFIG, this); |     core.saveState(STATE_CONFIG, this); | ||||||
|   } |   } | ||||||
|  | 
 | ||||||
|  |   isIncrementalMissing(): boolean { | ||||||
|  |     if (this.incremental) { | ||||||
|  |       return isIncrementalMissing(); | ||||||
|  |     } | ||||||
|  | 
 | ||||||
|  |     return false; | ||||||
|  |   } | ||||||
| } | } | ||||||
| 
 | 
 | ||||||
| /** | /** | ||||||
|  | |||||||
| @ -7,11 +7,15 @@ import path from "path"; | |||||||
| import { exists } from "./utils"; | import { exists } from "./utils"; | ||||||
| // import { Packages } from "./workspace";
 | // import { Packages } from "./workspace";
 | ||||||
| 
 | 
 | ||||||
|  | let incremental_missing = false; | ||||||
|  | 
 | ||||||
|  | export function isIncrementalMissing(): boolean { | ||||||
|  |   return incremental_missing; | ||||||
|  | } | ||||||
| 
 | 
 | ||||||
| export async function restoreIncremental(targetDir: string) { | export async function restoreIncremental(targetDir: string) { | ||||||
|   core.debug(`restoring incremental directory "${targetDir}"`); |   core.debug(`restoring incremental directory "${targetDir}"`); | ||||||
| 
 | 
 | ||||||
| 
 |  | ||||||
|   let dir = await fs.promises.opendir(targetDir); |   let dir = await fs.promises.opendir(targetDir); | ||||||
|   for await (const dirent of dir) { |   for await (const dirent of dir) { | ||||||
|     if (dirent.isDirectory()) { |     if (dirent.isDirectory()) { | ||||||
| @ -46,5 +50,8 @@ async function restoreIncrementalProfile(dirName: string) { | |||||||
|       const filePath = path.join(dirName, fileName); |       const filePath = path.join(dirName, fileName); | ||||||
|       await fs.promises.utimes(filePath, new Date(mtime), new Date(mtime)); |       await fs.promises.utimes(filePath, new Date(mtime), new Date(mtime)); | ||||||
|     } |     } | ||||||
|  |   } else { | ||||||
|  |     core.debug(`incremental-restore.json not found for ${dirName}`); | ||||||
|  |     incremental_missing = true; | ||||||
|   } |   } | ||||||
| } | } | ||||||
|  | |||||||
| @ -56,7 +56,7 @@ async function run() { | |||||||
|         } |         } | ||||||
|       } |       } | ||||||
| 
 | 
 | ||||||
|       if (!match) { |       if (!match || config.isIncrementalMissing()) { | ||||||
|         // pre-clean the target directory on cache mismatch
 |         // pre-clean the target directory on cache mismatch
 | ||||||
|         for (const workspace of config.workspaces) { |         for (const workspace of config.workspaces) { | ||||||
|           try { |           try { | ||||||
|  | |||||||
		Loading…
	
	
			
			x
			
			
		
	
		Reference in New Issue
	
	Block a user