mirror of
				https://github.com/Swatinem/rust-cache.git
				synced 2025-10-31 23:43:47 +00:00 
			
		
		
		
	Update dependencies and add changelog
This commit is contained in:
		
							parent
							
								
									ab6b2769d1
								
							
						
					
					
						commit
						decb69d790
					
				| @ -1,5 +1,9 @@ | ||||
| # Changelog | ||||
| 
 | ||||
| ## 2.6.2 | ||||
| 
 | ||||
| - Fix `toml` parsing. | ||||
| 
 | ||||
| ## 2.6.1 | ||||
| 
 | ||||
| - Fix hash contributions of `Cargo.lock`/`Cargo.toml` files. | ||||
|  | ||||
							
								
								
									
										469
									
								
								dist/restore/index.js
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										469
									
								
								dist/restore/index.js
									
									
									
									
										vendored
									
									
								
							| @ -39,7 +39,7 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge | ||||
|     }); | ||||
| }; | ||||
| Object.defineProperty(exports, "__esModule", ({ value: true })); | ||||
| exports.saveCache = exports.restoreCache = exports.isFeatureAvailable = exports.ReserveCacheError = exports.ValidationError = void 0; | ||||
| exports.deleteCache = exports.saveCache = exports.restoreCache = exports.isFeatureAvailable = exports.ReserveCacheError = exports.ValidationError = void 0; | ||||
| const core = __importStar(__nccwpck_require__(2186)); | ||||
| const path = __importStar(__nccwpck_require__(1017)); | ||||
| const utils = __importStar(__nccwpck_require__(2552)); | ||||
| @ -260,6 +260,23 @@ function saveCache(paths, key, options, enableCrossOsArchive = false) { | ||||
|     }); | ||||
| } | ||||
| exports.saveCache = saveCache; | ||||
| /** | ||||
|  * Delete a list of caches with the specified keys | ||||
|  * @param keys a list of keys for deleting the cache | ||||
|  */ | ||||
| function deleteCache(keys) { | ||||
|     return __awaiter(this, void 0, void 0, function* () { | ||||
|         core.debug('Deleting Cache'); | ||||
|         core.debug(`Cache Keys: ${keys}`); | ||||
|         try { | ||||
|             yield cacheHttpClient.deleteCache(keys); | ||||
|         } | ||||
|         catch (error) { | ||||
|             core.warning(`Failed to delete: ${error.message}`); | ||||
|         } | ||||
|     }); | ||||
| } | ||||
| exports.deleteCache = deleteCache; | ||||
| //# sourceMappingURL=cache.js.map
 | ||||
| 
 | ||||
| /***/ }), | ||||
| @ -302,7 +319,7 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge | ||||
|     }); | ||||
| }; | ||||
| Object.defineProperty(exports, "__esModule", ({ value: true })); | ||||
| exports.saveCache = exports.reserveCache = exports.reportCacheRestore = exports.downloadCache = exports.getCacheEntry = exports.getCacheVersion = void 0; | ||||
| exports.deleteCache = exports.saveCache = exports.reserveCache = exports.reportCacheRestore = exports.downloadCache = exports.getCacheEntry = exports.getCacheVersion = void 0; | ||||
| const core = __importStar(__nccwpck_require__(2186)); | ||||
| const http_client_1 = __nccwpck_require__(6255); | ||||
| const auth_1 = __nccwpck_require__(5526); | ||||
| @ -548,6 +565,17 @@ function saveCache(key, version, uploadId, urls, archivePath, archiveTimeMs, opt | ||||
|     }); | ||||
| } | ||||
| exports.saveCache = saveCache; | ||||
| function deleteCache(keys) { | ||||
|     return __awaiter(this, void 0, void 0, function* () { | ||||
|         const httpClient = createHttpClient(); | ||||
|         const resource = `cache?keys=${encodeURIComponent(keys.join(','))}`; | ||||
|         const response = yield (0, requestUtils_1.retryHttpClientResponse)('deleteCache', () => __awaiter(this, void 0, void 0, function* () { return httpClient.del(getCacheApiUrl(resource)); })); | ||||
|         if (!(0, requestUtils_1.isSuccessStatusCode)(response.message.statusCode)) { | ||||
|             throw new Error(`Cache service responded with ${response.message.statusCode}`); | ||||
|         } | ||||
|     }); | ||||
| } | ||||
| exports.deleteCache = deleteCache; | ||||
| //# sourceMappingURL=cacheHttpClient.js.map
 | ||||
| 
 | ||||
| /***/ }), | ||||
| @ -1500,7 +1528,7 @@ exports.createTar = createTar; | ||||
| 
 | ||||
| Object.defineProperty(exports, "__esModule", ({ value: true })); | ||||
| exports.LIB_VERSION = void 0; | ||||
| exports.LIB_VERSION = "0.1.2"; | ||||
| exports.LIB_VERSION = "0.2.0"; | ||||
| //# sourceMappingURL=version.js.map
 | ||||
| 
 | ||||
| /***/ }), | ||||
| @ -3001,10 +3029,7 @@ function getCacheVersion(paths, compressionMethod, enableCrossOsArchive = false) | ||||
|     } | ||||
|     // Add salt to cache version to support breaking changes in cache entry
 | ||||
|     components.push(versionSalt); | ||||
|     return crypto | ||||
|         .createHash('sha256') | ||||
|         .update(components.join('|')) | ||||
|         .digest('hex'); | ||||
|     return crypto.createHash('sha256').update(components.join('|')).digest('hex'); | ||||
| } | ||||
| exports.getCacheVersion = getCacheVersion; | ||||
| function getCacheEntry(keys, paths, options) { | ||||
| @ -3057,13 +3082,21 @@ function downloadCache(archiveLocation, archivePath, options) { | ||||
|     return __awaiter(this, void 0, void 0, function* () { | ||||
|         const archiveUrl = new url_1.URL(archiveLocation); | ||||
|         const downloadOptions = (0, options_1.getDownloadOptions)(options); | ||||
|         if (downloadOptions.useAzureSdk && | ||||
|             archiveUrl.hostname.endsWith('.blob.core.windows.net')) { | ||||
|             // Use Azure storage SDK to download caches hosted on Azure to improve speed and reliability.
 | ||||
|             yield (0, downloadUtils_1.downloadCacheStorageSDK)(archiveLocation, archivePath, downloadOptions); | ||||
|         if (archiveUrl.hostname.endsWith('.blob.core.windows.net')) { | ||||
|             if (downloadOptions.useAzureSdk) { | ||||
|                 // Use Azure storage SDK to download caches hosted on Azure to improve speed and reliability.
 | ||||
|                 yield (0, downloadUtils_1.downloadCacheStorageSDK)(archiveLocation, archivePath, downloadOptions); | ||||
|             } | ||||
|             else if (downloadOptions.concurrentBlobDownloads) { | ||||
|                 // Use concurrent implementation with HttpClient to work around blob SDK issue
 | ||||
|                 yield (0, downloadUtils_1.downloadCacheHttpClientConcurrent)(archiveLocation, archivePath, downloadOptions); | ||||
|             } | ||||
|             else { | ||||
|                 // Otherwise, download using the Actions http-client.
 | ||||
|                 yield (0, downloadUtils_1.downloadCacheHttpClient)(archiveLocation, archivePath); | ||||
|             } | ||||
|         } | ||||
|         else { | ||||
|             // Otherwise, download using the Actions http-client.
 | ||||
|             yield (0, downloadUtils_1.downloadCacheHttpClient)(archiveLocation, archivePath); | ||||
|         } | ||||
|     }); | ||||
| @ -3096,9 +3129,7 @@ function getContentRange(start, end) { | ||||
| } | ||||
| function uploadChunk(httpClient, resourceUrl, openStream, start, end) { | ||||
|     return __awaiter(this, void 0, void 0, function* () { | ||||
|         core.debug(`Uploading chunk of size ${end - | ||||
|             start + | ||||
|             1} bytes at offset ${start} with content range: ${getContentRange(start, end)}`);
 | ||||
|         core.debug(`Uploading chunk of size ${end - start + 1} bytes at offset ${start} with content range: ${getContentRange(start, end)}`); | ||||
|         const additionalHeaders = { | ||||
|             'Content-Type': 'application/octet-stream', | ||||
|             'Content-Range': getContentRange(start, end) | ||||
| @ -3266,35 +3297,42 @@ function getArchiveFileSizeInBytes(filePath) { | ||||
| } | ||||
| exports.getArchiveFileSizeInBytes = getArchiveFileSizeInBytes; | ||||
| function resolvePaths(patterns) { | ||||
|     var e_1, _a; | ||||
|     var _b; | ||||
|     var _a, e_1, _b, _c; | ||||
|     var _d; | ||||
|     return __awaiter(this, void 0, void 0, function* () { | ||||
|         const paths = []; | ||||
|         const workspace = (_b = process.env['GITHUB_WORKSPACE']) !== null && _b !== void 0 ? _b : process.cwd(); | ||||
|         const workspace = (_d = process.env['GITHUB_WORKSPACE']) !== null && _d !== void 0 ? _d : process.cwd(); | ||||
|         const globber = yield glob.create(patterns.join('\n'), { | ||||
|             implicitDescendants: false | ||||
|         }); | ||||
|         try { | ||||
|             for (var _c = __asyncValues(globber.globGenerator()), _d; _d = yield _c.next(), !_d.done;) { | ||||
|                 const file = _d.value; | ||||
|                 const relativeFile = path | ||||
|                     .relative(workspace, file) | ||||
|                     .replace(new RegExp(`\\${path.sep}`, 'g'), '/'); | ||||
|                 core.debug(`Matched: ${relativeFile}`); | ||||
|                 // Paths are made relative so the tar entries are all relative to the root of the workspace.
 | ||||
|                 if (relativeFile === '') { | ||||
|                     // path.relative returns empty string if workspace and file are equal
 | ||||
|                     paths.push('.'); | ||||
|             for (var _e = true, _f = __asyncValues(globber.globGenerator()), _g; _g = yield _f.next(), _a = _g.done, !_a;) { | ||||
|                 _c = _g.value; | ||||
|                 _e = false; | ||||
|                 try { | ||||
|                     const file = _c; | ||||
|                     const relativeFile = path | ||||
|                         .relative(workspace, file) | ||||
|                         .replace(new RegExp(`\\${path.sep}`, 'g'), '/'); | ||||
|                     core.debug(`Matched: ${relativeFile}`); | ||||
|                     // Paths are made relative so the tar entries are all relative to the root of the workspace.
 | ||||
|                     if (relativeFile === '') { | ||||
|                         // path.relative returns empty string if workspace and file are equal
 | ||||
|                         paths.push('.'); | ||||
|                     } | ||||
|                     else { | ||||
|                         paths.push(`${relativeFile}`); | ||||
|                     } | ||||
|                 } | ||||
|                 else { | ||||
|                     paths.push(`${relativeFile}`); | ||||
|                 finally { | ||||
|                     _e = true; | ||||
|                 } | ||||
|             } | ||||
|         } | ||||
|         catch (e_1_1) { e_1 = { error: e_1_1 }; } | ||||
|         finally { | ||||
|             try { | ||||
|                 if (_d && !_d.done && (_a = _c.return)) yield _a.call(_c); | ||||
|                 if (!_e && !_a && (_b = _f.return)) yield _b.call(_f); | ||||
|             } | ||||
|             finally { if (e_1) throw e_1.error; } | ||||
|         } | ||||
| @ -3459,7 +3497,7 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge | ||||
|     }); | ||||
| }; | ||||
| Object.defineProperty(exports, "__esModule", ({ value: true })); | ||||
| exports.downloadCacheStorageSDK = exports.downloadCacheHttpClient = exports.DownloadProgress = void 0; | ||||
| exports.downloadCacheStorageSDK = exports.downloadCacheHttpClientConcurrent = exports.downloadCacheHttpClient = exports.DownloadProgress = void 0; | ||||
| const core = __importStar(__nccwpck_require__(2186)); | ||||
| const http_client_1 = __nccwpck_require__(6255); | ||||
| const storage_blob_1 = __nccwpck_require__(4100); | ||||
| @ -3616,6 +3654,115 @@ function downloadCacheHttpClient(archiveLocation, archivePath) { | ||||
|     }); | ||||
| } | ||||
| exports.downloadCacheHttpClient = downloadCacheHttpClient; | ||||
| /** | ||||
|  * Download the cache using the Actions toolkit http-client concurrently | ||||
|  * | ||||
|  * @param archiveLocation the URL for the cache | ||||
|  * @param archivePath the local path where the cache is saved | ||||
|  */ | ||||
| function downloadCacheHttpClientConcurrent(archiveLocation, archivePath, options) { | ||||
|     var _a; | ||||
|     return __awaiter(this, void 0, void 0, function* () { | ||||
|         const archiveDescriptor = yield fs.promises.open(archivePath, 'w'); | ||||
|         const httpClient = new http_client_1.HttpClient('actions/cache', undefined, { | ||||
|             socketTimeout: options.timeoutInMs, | ||||
|             keepAlive: true | ||||
|         }); | ||||
|         try { | ||||
|             const res = yield (0, requestUtils_1.retryHttpClientResponse)('downloadCacheMetadata', () => __awaiter(this, void 0, void 0, function* () { return yield httpClient.request('HEAD', archiveLocation, null, {}); })); | ||||
|             const lengthHeader = res.message.headers['content-length']; | ||||
|             if (lengthHeader === undefined || lengthHeader === null) { | ||||
|                 throw new Error('Content-Length not found on blob response'); | ||||
|             } | ||||
|             const length = parseInt(lengthHeader); | ||||
|             if (Number.isNaN(length)) { | ||||
|                 throw new Error(`Could not interpret Content-Length: ${length}`); | ||||
|             } | ||||
|             const downloads = []; | ||||
|             const blockSize = 4 * 1024 * 1024; | ||||
|             for (let offset = 0; offset < length; offset += blockSize) { | ||||
|                 const count = Math.min(blockSize, length - offset); | ||||
|                 downloads.push({ | ||||
|                     offset, | ||||
|                     promiseGetter: () => __awaiter(this, void 0, void 0, function* () { | ||||
|                         return yield downloadSegmentRetry(httpClient, archiveLocation, offset, count); | ||||
|                     }) | ||||
|                 }); | ||||
|             } | ||||
|             // reverse to use .pop instead of .shift
 | ||||
|             downloads.reverse(); | ||||
|             let actives = 0; | ||||
|             let bytesDownloaded = 0; | ||||
|             const progress = new DownloadProgress(length); | ||||
|             progress.startDisplayTimer(); | ||||
|             const progressFn = progress.onProgress(); | ||||
|             const activeDownloads = []; | ||||
|             let nextDownload; | ||||
|             const waitAndWrite = () => __awaiter(this, void 0, void 0, function* () { | ||||
|                 const segment = yield Promise.race(Object.values(activeDownloads)); | ||||
|                 yield archiveDescriptor.write(segment.buffer, 0, segment.count, segment.offset); | ||||
|                 actives--; | ||||
|                 delete activeDownloads[segment.offset]; | ||||
|                 bytesDownloaded += segment.count; | ||||
|                 progressFn({ loadedBytes: bytesDownloaded }); | ||||
|             }); | ||||
|             while ((nextDownload = downloads.pop())) { | ||||
|                 activeDownloads[nextDownload.offset] = nextDownload.promiseGetter(); | ||||
|                 actives++; | ||||
|                 if (actives >= ((_a = options.downloadConcurrency) !== null && _a !== void 0 ? _a : 10)) { | ||||
|                     yield waitAndWrite(); | ||||
|                 } | ||||
|             } | ||||
|             while (actives > 0) { | ||||
|                 yield waitAndWrite(); | ||||
|             } | ||||
|         } | ||||
|         finally { | ||||
|             httpClient.dispose(); | ||||
|             yield archiveDescriptor.close(); | ||||
|         } | ||||
|     }); | ||||
| } | ||||
| exports.downloadCacheHttpClientConcurrent = downloadCacheHttpClientConcurrent; | ||||
| function downloadSegmentRetry(httpClient, archiveLocation, offset, count) { | ||||
|     return __awaiter(this, void 0, void 0, function* () { | ||||
|         const retries = 5; | ||||
|         let failures = 0; | ||||
|         while (true) { | ||||
|             try { | ||||
|                 const timeout = 30000; | ||||
|                 const result = yield promiseWithTimeout(timeout, downloadSegment(httpClient, archiveLocation, offset, count)); | ||||
|                 if (typeof result === 'string') { | ||||
|                     throw new Error('downloadSegmentRetry failed due to timeout'); | ||||
|                 } | ||||
|                 return result; | ||||
|             } | ||||
|             catch (err) { | ||||
|                 if (failures >= retries) { | ||||
|                     throw err; | ||||
|                 } | ||||
|                 failures++; | ||||
|             } | ||||
|         } | ||||
|     }); | ||||
| } | ||||
| function downloadSegment(httpClient, archiveLocation, offset, count) { | ||||
|     return __awaiter(this, void 0, void 0, function* () { | ||||
|         const partRes = yield (0, requestUtils_1.retryHttpClientResponse)('downloadCachePart', () => __awaiter(this, void 0, void 0, function* () { | ||||
|             return yield httpClient.get(archiveLocation, { | ||||
|                 Range: `bytes=${offset}-${offset + count - 1}` | ||||
|             }); | ||||
|         })); | ||||
|         if (!partRes.readBodyBuffer) { | ||||
|             throw new Error('Expected HttpClientResponse to implement readBodyBuffer'); | ||||
|         } | ||||
|         return { | ||||
|             offset, | ||||
|             count, | ||||
|             buffer: yield partRes.readBodyBuffer() | ||||
|         }; | ||||
|     }); | ||||
| } | ||||
| /** | ||||
|  * Download the cache using the Azure Storage SDK.  Only call this method if the | ||||
|  * URL points to an Azure Storage endpoint. | ||||
| @ -4181,7 +4328,8 @@ exports.getUploadOptions = getUploadOptions; | ||||
|  */ | ||||
| function getDownloadOptions(copy) { | ||||
|     const result = { | ||||
|         useAzureSdk: true, | ||||
|         useAzureSdk: false, | ||||
|         concurrentBlobDownloads: true, | ||||
|         downloadConcurrency: 8, | ||||
|         timeoutInMs: 30000, | ||||
|         segmentTimeoutInMs: 600000, | ||||
| @ -4191,6 +4339,9 @@ function getDownloadOptions(copy) { | ||||
|         if (typeof copy.useAzureSdk === 'boolean') { | ||||
|             result.useAzureSdk = copy.useAzureSdk; | ||||
|         } | ||||
|         if (typeof copy.concurrentBlobDownloads === 'boolean') { | ||||
|             result.concurrentBlobDownloads = copy.concurrentBlobDownloads; | ||||
|         } | ||||
|         if (typeof copy.downloadConcurrency === 'number') { | ||||
|             result.downloadConcurrency = copy.downloadConcurrency; | ||||
|         } | ||||
| @ -9069,6 +9220,19 @@ class HttpClientResponse { | ||||
|             })); | ||||
|         }); | ||||
|     } | ||||
|     readBodyBuffer() { | ||||
|         return __awaiter(this, void 0, void 0, function* () { | ||||
|             return new Promise((resolve) => __awaiter(this, void 0, void 0, function* () { | ||||
|                 const chunks = []; | ||||
|                 this.message.on('data', (chunk) => { | ||||
|                     chunks.push(chunk); | ||||
|                 }); | ||||
|                 this.message.on('end', () => { | ||||
|                     resolve(Buffer.concat(chunks)); | ||||
|                 }); | ||||
|             })); | ||||
|         }); | ||||
|     } | ||||
| } | ||||
| exports.HttpClientResponse = HttpClientResponse; | ||||
| function isHttps(requestUrl) { | ||||
| @ -9573,7 +9737,13 @@ function getProxyUrl(reqUrl) { | ||||
|         } | ||||
|     })(); | ||||
|     if (proxyVar) { | ||||
|         return new URL(proxyVar); | ||||
|         try { | ||||
|             return new URL(proxyVar); | ||||
|         } | ||||
|         catch (_a) { | ||||
|             if (!proxyVar.startsWith('http://') && !proxyVar.startsWith('https://')) | ||||
|                 return new URL(`http://${proxyVar}`); | ||||
|         } | ||||
|     } | ||||
|     else { | ||||
|         return undefined; | ||||
| @ -10379,13 +10549,15 @@ exports.AbortSignal = AbortSignal; | ||||
| /***/ }), | ||||
| 
 | ||||
| /***/ 9645: | ||||
| /***/ ((__unused_webpack_module, exports) => { | ||||
| /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { | ||||
| 
 | ||||
| "use strict"; | ||||
| 
 | ||||
| 
 | ||||
| Object.defineProperty(exports, "__esModule", ({ value: true })); | ||||
| 
 | ||||
| var coreUtil = __nccwpck_require__(1333); | ||||
| 
 | ||||
| // Copyright (c) Microsoft Corporation.
 | ||||
| // Licensed under the MIT license.
 | ||||
| /** | ||||
| @ -10393,6 +10565,12 @@ Object.defineProperty(exports, "__esModule", ({ value: true })); | ||||
|  * the underlying key value. | ||||
|  */ | ||||
| class AzureKeyCredential { | ||||
|     /** | ||||
|      * The value of the key to be used in authentication | ||||
|      */ | ||||
|     get key() { | ||||
|         return this._key; | ||||
|     } | ||||
|     /** | ||||
|      * Create an instance of an AzureKeyCredential for use | ||||
|      * with a service client. | ||||
| @ -10405,12 +10583,6 @@ class AzureKeyCredential { | ||||
|         } | ||||
|         this._key = key; | ||||
|     } | ||||
|     /** | ||||
|      * The value of the key to be used in authentication | ||||
|      */ | ||||
|     get key() { | ||||
|         return this._key; | ||||
|     } | ||||
|     /** | ||||
|      * Change the value of the key. | ||||
|      * | ||||
| @ -10424,51 +10596,24 @@ class AzureKeyCredential { | ||||
|     } | ||||
| } | ||||
| 
 | ||||
| // Copyright (c) Microsoft Corporation.
 | ||||
| // Licensed under the MIT license.
 | ||||
| /** | ||||
|  * Helper TypeGuard that checks if something is defined or not. | ||||
|  * @param thing - Anything | ||||
|  * @internal | ||||
|  */ | ||||
| function isDefined(thing) { | ||||
|     return typeof thing !== "undefined" && thing !== null; | ||||
| } | ||||
| /** | ||||
|  * Helper TypeGuard that checks if the input is an object with the specified properties. | ||||
|  * Note: The properties may be inherited. | ||||
|  * @param thing - Anything. | ||||
|  * @param properties - The name of the properties that should appear in the object. | ||||
|  * @internal | ||||
|  */ | ||||
| function isObjectWithProperties(thing, properties) { | ||||
|     if (!isDefined(thing) || typeof thing !== "object") { | ||||
|         return false; | ||||
|     } | ||||
|     for (const property of properties) { | ||||
|         if (!objectHasProperty(thing, property)) { | ||||
|             return false; | ||||
|         } | ||||
|     } | ||||
|     return true; | ||||
| } | ||||
| /** | ||||
|  * Helper TypeGuard that checks if the input is an object with the specified property. | ||||
|  * Note: The property may be inherited. | ||||
|  * @param thing - Any object. | ||||
|  * @param property - The name of the property that should appear in the object. | ||||
|  * @internal | ||||
|  */ | ||||
| function objectHasProperty(thing, property) { | ||||
|     return typeof thing === "object" && property in thing; | ||||
| } | ||||
| 
 | ||||
| // Copyright (c) Microsoft Corporation.
 | ||||
| /** | ||||
|  * A static name/key-based credential that supports updating | ||||
|  * the underlying name and key values. | ||||
|  */ | ||||
| class AzureNamedKeyCredential { | ||||
|     /** | ||||
|      * The value of the key to be used in authentication. | ||||
|      */ | ||||
|     get key() { | ||||
|         return this._key; | ||||
|     } | ||||
|     /** | ||||
|      * The value of the name to be used in authentication. | ||||
|      */ | ||||
|     get name() { | ||||
|         return this._name; | ||||
|     } | ||||
|     /** | ||||
|      * Create an instance of an AzureNamedKeyCredential for use | ||||
|      * with a service client. | ||||
| @ -10483,18 +10628,6 @@ class AzureNamedKeyCredential { | ||||
|         this._name = name; | ||||
|         this._key = key; | ||||
|     } | ||||
|     /** | ||||
|      * The value of the key to be used in authentication. | ||||
|      */ | ||||
|     get key() { | ||||
|         return this._key; | ||||
|     } | ||||
|     /** | ||||
|      * The value of the name to be used in authentication. | ||||
|      */ | ||||
|     get name() { | ||||
|         return this._name; | ||||
|     } | ||||
|     /** | ||||
|      * Change the value of the key. | ||||
|      * | ||||
| @ -10518,7 +10651,7 @@ class AzureNamedKeyCredential { | ||||
|  * @param credential - The assumed NamedKeyCredential to be tested. | ||||
|  */ | ||||
| function isNamedKeyCredential(credential) { | ||||
|     return (isObjectWithProperties(credential, ["name", "key"]) && | ||||
|     return (coreUtil.isObjectWithProperties(credential, ["name", "key"]) && | ||||
|         typeof credential.key === "string" && | ||||
|         typeof credential.name === "string"); | ||||
| } | ||||
| @ -10529,6 +10662,12 @@ function isNamedKeyCredential(credential) { | ||||
|  * the underlying signature value. | ||||
|  */ | ||||
| class AzureSASCredential { | ||||
|     /** | ||||
|      * The value of the shared access signature to be used in authentication | ||||
|      */ | ||||
|     get signature() { | ||||
|         return this._signature; | ||||
|     } | ||||
|     /** | ||||
|      * Create an instance of an AzureSASCredential for use | ||||
|      * with a service client. | ||||
| @ -10541,12 +10680,6 @@ class AzureSASCredential { | ||||
|         } | ||||
|         this._signature = signature; | ||||
|     } | ||||
|     /** | ||||
|      * The value of the shared access signature to be used in authentication | ||||
|      */ | ||||
|     get signature() { | ||||
|         return this._signature; | ||||
|     } | ||||
|     /** | ||||
|      * Change the value of the signature. | ||||
|      * | ||||
| @ -10568,7 +10701,7 @@ class AzureSASCredential { | ||||
|  * @param credential - The assumed SASCredential to be tested. | ||||
|  */ | ||||
| function isSASCredential(credential) { | ||||
|     return (isObjectWithProperties(credential, ["signature"]) && typeof credential.signature === "string"); | ||||
|     return (coreUtil.isObjectWithProperties(credential, ["signature"]) && typeof credential.signature === "string"); | ||||
| } | ||||
| 
 | ||||
| // Copyright (c) Microsoft Corporation.
 | ||||
| @ -18771,14 +18904,6 @@ Object.defineProperty(exports, "__esModule", ({ value: true })); | ||||
| var abortController = __nccwpck_require__(978); | ||||
| var crypto = __nccwpck_require__(6113); | ||||
| 
 | ||||
| // Copyright (c) Microsoft Corporation.
 | ||||
| // Licensed under the MIT license.
 | ||||
| var _a$1; | ||||
| /** | ||||
|  * A constant that indicates whether the environment the code is running is Node.JS. | ||||
|  */ | ||||
| const isNode = typeof process !== "undefined" && Boolean(process.version) && Boolean((_a$1 = process.versions) === null || _a$1 === void 0 ? void 0 : _a$1.node); | ||||
| 
 | ||||
| // Copyright (c) Microsoft Corporation.
 | ||||
| /** | ||||
|  * Creates an abortable promise. | ||||
| @ -19005,9 +19130,9 @@ function generateUUID() { | ||||
| 
 | ||||
| // Copyright (c) Microsoft Corporation.
 | ||||
| // Licensed under the MIT license.
 | ||||
| var _a; | ||||
| var _a$1; | ||||
| // NOTE: This is a workaround until we can use `globalThis.crypto.randomUUID` in Node.js 19+.
 | ||||
| let uuidFunction = typeof ((_a = globalThis === null || globalThis === void 0 ? void 0 : globalThis.crypto) === null || _a === void 0 ? void 0 : _a.randomUUID) === "function" | ||||
| let uuidFunction = typeof ((_a$1 = globalThis === null || globalThis === void 0 ? void 0 : globalThis.crypto) === null || _a$1 === void 0 ? void 0 : _a$1.randomUUID) === "function" | ||||
|     ? globalThis.crypto.randomUUID.bind(globalThis.crypto) | ||||
|     : crypto.randomUUID; | ||||
| // Not defined in earlier versions of Node.js 14
 | ||||
| @ -19023,19 +19148,139 @@ function randomUUID() { | ||||
|     return uuidFunction(); | ||||
| } | ||||
| 
 | ||||
| // Copyright (c) Microsoft Corporation.
 | ||||
| // Licensed under the MIT license.
 | ||||
| var _a, _b, _c, _d; | ||||
| /** | ||||
|  * A constant that indicates whether the environment the code is running is a Web Browser. | ||||
|  */ | ||||
| // eslint-disable-next-line @azure/azure-sdk/ts-no-window
 | ||||
| const isBrowser = typeof window !== "undefined" && typeof window.document !== "undefined"; | ||||
| /** | ||||
|  * A constant that indicates whether the environment the code is running is a Web Worker. | ||||
|  */ | ||||
| const isWebWorker = typeof self === "object" && | ||||
|     typeof (self === null || self === void 0 ? void 0 : self.importScripts) === "function" && | ||||
|     (((_a = self.constructor) === null || _a === void 0 ? void 0 : _a.name) === "DedicatedWorkerGlobalScope" || | ||||
|         ((_b = self.constructor) === null || _b === void 0 ? void 0 : _b.name) === "ServiceWorkerGlobalScope" || | ||||
|         ((_c = self.constructor) === null || _c === void 0 ? void 0 : _c.name) === "SharedWorkerGlobalScope"); | ||||
| /** | ||||
|  * A constant that indicates whether the environment the code is running is Node.JS. | ||||
|  */ | ||||
| const isNode = typeof process !== "undefined" && Boolean(process.version) && Boolean((_d = process.versions) === null || _d === void 0 ? void 0 : _d.node); | ||||
| /** | ||||
|  * A constant that indicates whether the environment the code is running is Deno. | ||||
|  */ | ||||
| const isDeno = typeof Deno !== "undefined" && | ||||
|     typeof Deno.version !== "undefined" && | ||||
|     typeof Deno.version.deno !== "undefined"; | ||||
| /** | ||||
|  * A constant that indicates whether the environment the code is running is Bun.sh. | ||||
|  */ | ||||
| const isBun = typeof Bun !== "undefined" && typeof Bun.version !== "undefined"; | ||||
| /** | ||||
|  * A constant that indicates whether the environment the code is running is in React-Native. | ||||
|  */ | ||||
| // https://github.com/facebook/react-native/blob/main/packages/react-native/Libraries/Core/setUpNavigator.js
 | ||||
| const isReactNative = typeof navigator !== "undefined" && (navigator === null || navigator === void 0 ? void 0 : navigator.product) === "ReactNative"; | ||||
| 
 | ||||
| // Copyright (c) Microsoft Corporation.
 | ||||
| // Licensed under the MIT license.
 | ||||
| /** | ||||
|  * The helper that transforms bytes with specific character encoding into string | ||||
|  * @param bytes - the uint8array bytes | ||||
|  * @param format - the format we use to encode the byte | ||||
|  * @returns a string of the encoded string | ||||
|  */ | ||||
| function uint8ArrayToString(bytes, format) { | ||||
|     switch (format) { | ||||
|         case "utf-8": | ||||
|             return uint8ArrayToUtf8String(bytes); | ||||
|         case "base64": | ||||
|             return uint8ArrayToBase64(bytes); | ||||
|         case "base64url": | ||||
|             return uint8ArrayToBase64Url(bytes); | ||||
|     } | ||||
| } | ||||
| /** | ||||
|  * The helper that transforms string to specific character encoded bytes array. | ||||
|  * @param value - the string to be converted | ||||
|  * @param format - the format we use to decode the value | ||||
|  * @returns a uint8array | ||||
|  */ | ||||
| function stringToUint8Array(value, format) { | ||||
|     switch (format) { | ||||
|         case "utf-8": | ||||
|             return utf8StringToUint8Array(value); | ||||
|         case "base64": | ||||
|             return base64ToUint8Array(value); | ||||
|         case "base64url": | ||||
|             return base64UrlToUint8Array(value); | ||||
|     } | ||||
| } | ||||
| /** | ||||
|  * Decodes a Uint8Array into a Base64 string. | ||||
|  * @internal | ||||
|  */ | ||||
| function uint8ArrayToBase64(bytes) { | ||||
|     return Buffer.from(bytes).toString("base64"); | ||||
| } | ||||
| /** | ||||
|  * Decodes a Uint8Array into a Base64Url string. | ||||
|  * @internal | ||||
|  */ | ||||
| function uint8ArrayToBase64Url(bytes) { | ||||
|     return Buffer.from(bytes).toString("base64url"); | ||||
| } | ||||
| /** | ||||
|  * Decodes a Uint8Array into a javascript string. | ||||
|  * @internal | ||||
|  */ | ||||
| function uint8ArrayToUtf8String(bytes) { | ||||
|     return Buffer.from(bytes).toString("utf-8"); | ||||
| } | ||||
| /** | ||||
|  * Encodes a JavaScript string into a Uint8Array. | ||||
|  * @internal | ||||
|  */ | ||||
| function utf8StringToUint8Array(value) { | ||||
|     return Buffer.from(value); | ||||
| } | ||||
| /** | ||||
|  * Encodes a Base64 string into a Uint8Array. | ||||
|  * @internal | ||||
|  */ | ||||
| function base64ToUint8Array(value) { | ||||
|     return Buffer.from(value, "base64"); | ||||
| } | ||||
| /** | ||||
|  * Encodes a Base64Url string into a Uint8Array. | ||||
|  * @internal | ||||
|  */ | ||||
| function base64UrlToUint8Array(value) { | ||||
|     return Buffer.from(value, "base64url"); | ||||
| } | ||||
| 
 | ||||
| exports.computeSha256Hash = computeSha256Hash; | ||||
| exports.computeSha256Hmac = computeSha256Hmac; | ||||
| exports.createAbortablePromise = createAbortablePromise; | ||||
| exports.delay = delay; | ||||
| exports.getErrorMessage = getErrorMessage; | ||||
| exports.getRandomIntegerInclusive = getRandomIntegerInclusive; | ||||
| exports.isBrowser = isBrowser; | ||||
| exports.isBun = isBun; | ||||
| exports.isDefined = isDefined; | ||||
| exports.isDeno = isDeno; | ||||
| exports.isError = isError; | ||||
| exports.isNode = isNode; | ||||
| exports.isObject = isObject; | ||||
| exports.isObjectWithProperties = isObjectWithProperties; | ||||
| exports.isReactNative = isReactNative; | ||||
| exports.isWebWorker = isWebWorker; | ||||
| exports.objectHasProperty = objectHasProperty; | ||||
| exports.randomUUID = randomUUID; | ||||
| exports.stringToUint8Array = stringToUint8Array; | ||||
| exports.uint8ArrayToString = uint8ArrayToString; | ||||
| //# sourceMappingURL=index.js.map
 | ||||
| 
 | ||||
| 
 | ||||
| @ -50699,10 +50944,6 @@ function getNodeRequestOptions(request) { | ||||
| 		agent = agent(parsedURL); | ||||
| 	} | ||||
| 
 | ||||
| 	if (!headers.has('Connection') && !agent) { | ||||
| 		headers.set('Connection', 'close'); | ||||
| 	} | ||||
| 
 | ||||
| 	// HTTP-network fetch step 4.2
 | ||||
| 	// chunked encoding is handled by Node.js
 | ||||
| 
 | ||||
|  | ||||
							
								
								
									
										469
									
								
								dist/save/index.js
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										469
									
								
								dist/save/index.js
									
									
									
									
										vendored
									
									
								
							| @ -39,7 +39,7 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge | ||||
|     }); | ||||
| }; | ||||
| Object.defineProperty(exports, "__esModule", ({ value: true })); | ||||
| exports.saveCache = exports.restoreCache = exports.isFeatureAvailable = exports.ReserveCacheError = exports.ValidationError = void 0; | ||||
| exports.deleteCache = exports.saveCache = exports.restoreCache = exports.isFeatureAvailable = exports.ReserveCacheError = exports.ValidationError = void 0; | ||||
| const core = __importStar(__nccwpck_require__(2186)); | ||||
| const path = __importStar(__nccwpck_require__(1017)); | ||||
| const utils = __importStar(__nccwpck_require__(2552)); | ||||
| @ -260,6 +260,23 @@ function saveCache(paths, key, options, enableCrossOsArchive = false) { | ||||
|     }); | ||||
| } | ||||
| exports.saveCache = saveCache; | ||||
| /** | ||||
|  * Delete a list of caches with the specified keys | ||||
|  * @param keys a list of keys for deleting the cache | ||||
|  */ | ||||
| function deleteCache(keys) { | ||||
|     return __awaiter(this, void 0, void 0, function* () { | ||||
|         core.debug('Deleting Cache'); | ||||
|         core.debug(`Cache Keys: ${keys}`); | ||||
|         try { | ||||
|             yield cacheHttpClient.deleteCache(keys); | ||||
|         } | ||||
|         catch (error) { | ||||
|             core.warning(`Failed to delete: ${error.message}`); | ||||
|         } | ||||
|     }); | ||||
| } | ||||
| exports.deleteCache = deleteCache; | ||||
| //# sourceMappingURL=cache.js.map
 | ||||
| 
 | ||||
| /***/ }), | ||||
| @ -302,7 +319,7 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge | ||||
|     }); | ||||
| }; | ||||
| Object.defineProperty(exports, "__esModule", ({ value: true })); | ||||
| exports.saveCache = exports.reserveCache = exports.reportCacheRestore = exports.downloadCache = exports.getCacheEntry = exports.getCacheVersion = void 0; | ||||
| exports.deleteCache = exports.saveCache = exports.reserveCache = exports.reportCacheRestore = exports.downloadCache = exports.getCacheEntry = exports.getCacheVersion = void 0; | ||||
| const core = __importStar(__nccwpck_require__(2186)); | ||||
| const http_client_1 = __nccwpck_require__(6255); | ||||
| const auth_1 = __nccwpck_require__(5526); | ||||
| @ -548,6 +565,17 @@ function saveCache(key, version, uploadId, urls, archivePath, archiveTimeMs, opt | ||||
|     }); | ||||
| } | ||||
| exports.saveCache = saveCache; | ||||
| function deleteCache(keys) { | ||||
|     return __awaiter(this, void 0, void 0, function* () { | ||||
|         const httpClient = createHttpClient(); | ||||
|         const resource = `cache?keys=${encodeURIComponent(keys.join(','))}`; | ||||
|         const response = yield (0, requestUtils_1.retryHttpClientResponse)('deleteCache', () => __awaiter(this, void 0, void 0, function* () { return httpClient.del(getCacheApiUrl(resource)); })); | ||||
|         if (!(0, requestUtils_1.isSuccessStatusCode)(response.message.statusCode)) { | ||||
|             throw new Error(`Cache service responded with ${response.message.statusCode}`); | ||||
|         } | ||||
|     }); | ||||
| } | ||||
| exports.deleteCache = deleteCache; | ||||
| //# sourceMappingURL=cacheHttpClient.js.map
 | ||||
| 
 | ||||
| /***/ }), | ||||
| @ -1500,7 +1528,7 @@ exports.createTar = createTar; | ||||
| 
 | ||||
| Object.defineProperty(exports, "__esModule", ({ value: true })); | ||||
| exports.LIB_VERSION = void 0; | ||||
| exports.LIB_VERSION = "0.1.2"; | ||||
| exports.LIB_VERSION = "0.2.0"; | ||||
| //# sourceMappingURL=version.js.map
 | ||||
| 
 | ||||
| /***/ }), | ||||
| @ -3001,10 +3029,7 @@ function getCacheVersion(paths, compressionMethod, enableCrossOsArchive = false) | ||||
|     } | ||||
|     // Add salt to cache version to support breaking changes in cache entry
 | ||||
|     components.push(versionSalt); | ||||
|     return crypto | ||||
|         .createHash('sha256') | ||||
|         .update(components.join('|')) | ||||
|         .digest('hex'); | ||||
|     return crypto.createHash('sha256').update(components.join('|')).digest('hex'); | ||||
| } | ||||
| exports.getCacheVersion = getCacheVersion; | ||||
| function getCacheEntry(keys, paths, options) { | ||||
| @ -3057,13 +3082,21 @@ function downloadCache(archiveLocation, archivePath, options) { | ||||
|     return __awaiter(this, void 0, void 0, function* () { | ||||
|         const archiveUrl = new url_1.URL(archiveLocation); | ||||
|         const downloadOptions = (0, options_1.getDownloadOptions)(options); | ||||
|         if (downloadOptions.useAzureSdk && | ||||
|             archiveUrl.hostname.endsWith('.blob.core.windows.net')) { | ||||
|             // Use Azure storage SDK to download caches hosted on Azure to improve speed and reliability.
 | ||||
|             yield (0, downloadUtils_1.downloadCacheStorageSDK)(archiveLocation, archivePath, downloadOptions); | ||||
|         if (archiveUrl.hostname.endsWith('.blob.core.windows.net')) { | ||||
|             if (downloadOptions.useAzureSdk) { | ||||
|                 // Use Azure storage SDK to download caches hosted on Azure to improve speed and reliability.
 | ||||
|                 yield (0, downloadUtils_1.downloadCacheStorageSDK)(archiveLocation, archivePath, downloadOptions); | ||||
|             } | ||||
|             else if (downloadOptions.concurrentBlobDownloads) { | ||||
|                 // Use concurrent implementation with HttpClient to work around blob SDK issue
 | ||||
|                 yield (0, downloadUtils_1.downloadCacheHttpClientConcurrent)(archiveLocation, archivePath, downloadOptions); | ||||
|             } | ||||
|             else { | ||||
|                 // Otherwise, download using the Actions http-client.
 | ||||
|                 yield (0, downloadUtils_1.downloadCacheHttpClient)(archiveLocation, archivePath); | ||||
|             } | ||||
|         } | ||||
|         else { | ||||
|             // Otherwise, download using the Actions http-client.
 | ||||
|             yield (0, downloadUtils_1.downloadCacheHttpClient)(archiveLocation, archivePath); | ||||
|         } | ||||
|     }); | ||||
| @ -3096,9 +3129,7 @@ function getContentRange(start, end) { | ||||
| } | ||||
| function uploadChunk(httpClient, resourceUrl, openStream, start, end) { | ||||
|     return __awaiter(this, void 0, void 0, function* () { | ||||
|         core.debug(`Uploading chunk of size ${end - | ||||
|             start + | ||||
|             1} bytes at offset ${start} with content range: ${getContentRange(start, end)}`);
 | ||||
|         core.debug(`Uploading chunk of size ${end - start + 1} bytes at offset ${start} with content range: ${getContentRange(start, end)}`); | ||||
|         const additionalHeaders = { | ||||
|             'Content-Type': 'application/octet-stream', | ||||
|             'Content-Range': getContentRange(start, end) | ||||
| @ -3266,35 +3297,42 @@ function getArchiveFileSizeInBytes(filePath) { | ||||
| } | ||||
| exports.getArchiveFileSizeInBytes = getArchiveFileSizeInBytes; | ||||
| function resolvePaths(patterns) { | ||||
|     var e_1, _a; | ||||
|     var _b; | ||||
|     var _a, e_1, _b, _c; | ||||
|     var _d; | ||||
|     return __awaiter(this, void 0, void 0, function* () { | ||||
|         const paths = []; | ||||
|         const workspace = (_b = process.env['GITHUB_WORKSPACE']) !== null && _b !== void 0 ? _b : process.cwd(); | ||||
|         const workspace = (_d = process.env['GITHUB_WORKSPACE']) !== null && _d !== void 0 ? _d : process.cwd(); | ||||
|         const globber = yield glob.create(patterns.join('\n'), { | ||||
|             implicitDescendants: false | ||||
|         }); | ||||
|         try { | ||||
|             for (var _c = __asyncValues(globber.globGenerator()), _d; _d = yield _c.next(), !_d.done;) { | ||||
|                 const file = _d.value; | ||||
|                 const relativeFile = path | ||||
|                     .relative(workspace, file) | ||||
|                     .replace(new RegExp(`\\${path.sep}`, 'g'), '/'); | ||||
|                 core.debug(`Matched: ${relativeFile}`); | ||||
|                 // Paths are made relative so the tar entries are all relative to the root of the workspace.
 | ||||
|                 if (relativeFile === '') { | ||||
|                     // path.relative returns empty string if workspace and file are equal
 | ||||
|                     paths.push('.'); | ||||
|             for (var _e = true, _f = __asyncValues(globber.globGenerator()), _g; _g = yield _f.next(), _a = _g.done, !_a;) { | ||||
|                 _c = _g.value; | ||||
|                 _e = false; | ||||
|                 try { | ||||
|                     const file = _c; | ||||
|                     const relativeFile = path | ||||
|                         .relative(workspace, file) | ||||
|                         .replace(new RegExp(`\\${path.sep}`, 'g'), '/'); | ||||
|                     core.debug(`Matched: ${relativeFile}`); | ||||
|                     // Paths are made relative so the tar entries are all relative to the root of the workspace.
 | ||||
|                     if (relativeFile === '') { | ||||
|                         // path.relative returns empty string if workspace and file are equal
 | ||||
|                         paths.push('.'); | ||||
|                     } | ||||
|                     else { | ||||
|                         paths.push(`${relativeFile}`); | ||||
|                     } | ||||
|                 } | ||||
|                 else { | ||||
|                     paths.push(`${relativeFile}`); | ||||
|                 finally { | ||||
|                     _e = true; | ||||
|                 } | ||||
|             } | ||||
|         } | ||||
|         catch (e_1_1) { e_1 = { error: e_1_1 }; } | ||||
|         finally { | ||||
|             try { | ||||
|                 if (_d && !_d.done && (_a = _c.return)) yield _a.call(_c); | ||||
|                 if (!_e && !_a && (_b = _f.return)) yield _b.call(_f); | ||||
|             } | ||||
|             finally { if (e_1) throw e_1.error; } | ||||
|         } | ||||
| @ -3459,7 +3497,7 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge | ||||
|     }); | ||||
| }; | ||||
| Object.defineProperty(exports, "__esModule", ({ value: true })); | ||||
| exports.downloadCacheStorageSDK = exports.downloadCacheHttpClient = exports.DownloadProgress = void 0; | ||||
| exports.downloadCacheStorageSDK = exports.downloadCacheHttpClientConcurrent = exports.downloadCacheHttpClient = exports.DownloadProgress = void 0; | ||||
| const core = __importStar(__nccwpck_require__(2186)); | ||||
| const http_client_1 = __nccwpck_require__(6255); | ||||
| const storage_blob_1 = __nccwpck_require__(4100); | ||||
| @ -3616,6 +3654,115 @@ function downloadCacheHttpClient(archiveLocation, archivePath) { | ||||
|     }); | ||||
| } | ||||
| exports.downloadCacheHttpClient = downloadCacheHttpClient; | ||||
| /** | ||||
|  * Download the cache using the Actions toolkit http-client concurrently | ||||
|  * | ||||
|  * @param archiveLocation the URL for the cache | ||||
|  * @param archivePath the local path where the cache is saved | ||||
|  */ | ||||
| function downloadCacheHttpClientConcurrent(archiveLocation, archivePath, options) { | ||||
|     var _a; | ||||
|     return __awaiter(this, void 0, void 0, function* () { | ||||
|         const archiveDescriptor = yield fs.promises.open(archivePath, 'w'); | ||||
|         const httpClient = new http_client_1.HttpClient('actions/cache', undefined, { | ||||
|             socketTimeout: options.timeoutInMs, | ||||
|             keepAlive: true | ||||
|         }); | ||||
|         try { | ||||
|             const res = yield (0, requestUtils_1.retryHttpClientResponse)('downloadCacheMetadata', () => __awaiter(this, void 0, void 0, function* () { return yield httpClient.request('HEAD', archiveLocation, null, {}); })); | ||||
|             const lengthHeader = res.message.headers['content-length']; | ||||
|             if (lengthHeader === undefined || lengthHeader === null) { | ||||
|                 throw new Error('Content-Length not found on blob response'); | ||||
|             } | ||||
|             const length = parseInt(lengthHeader); | ||||
|             if (Number.isNaN(length)) { | ||||
|                 throw new Error(`Could not interpret Content-Length: ${length}`); | ||||
|             } | ||||
|             const downloads = []; | ||||
|             const blockSize = 4 * 1024 * 1024; | ||||
|             for (let offset = 0; offset < length; offset += blockSize) { | ||||
|                 const count = Math.min(blockSize, length - offset); | ||||
|                 downloads.push({ | ||||
|                     offset, | ||||
|                     promiseGetter: () => __awaiter(this, void 0, void 0, function* () { | ||||
|                         return yield downloadSegmentRetry(httpClient, archiveLocation, offset, count); | ||||
|                     }) | ||||
|                 }); | ||||
|             } | ||||
|             // reverse to use .pop instead of .shift
 | ||||
|             downloads.reverse(); | ||||
|             let actives = 0; | ||||
|             let bytesDownloaded = 0; | ||||
|             const progress = new DownloadProgress(length); | ||||
|             progress.startDisplayTimer(); | ||||
|             const progressFn = progress.onProgress(); | ||||
|             const activeDownloads = []; | ||||
|             let nextDownload; | ||||
|             const waitAndWrite = () => __awaiter(this, void 0, void 0, function* () { | ||||
|                 const segment = yield Promise.race(Object.values(activeDownloads)); | ||||
|                 yield archiveDescriptor.write(segment.buffer, 0, segment.count, segment.offset); | ||||
|                 actives--; | ||||
|                 delete activeDownloads[segment.offset]; | ||||
|                 bytesDownloaded += segment.count; | ||||
|                 progressFn({ loadedBytes: bytesDownloaded }); | ||||
|             }); | ||||
|             while ((nextDownload = downloads.pop())) { | ||||
|                 activeDownloads[nextDownload.offset] = nextDownload.promiseGetter(); | ||||
|                 actives++; | ||||
|                 if (actives >= ((_a = options.downloadConcurrency) !== null && _a !== void 0 ? _a : 10)) { | ||||
|                     yield waitAndWrite(); | ||||
|                 } | ||||
|             } | ||||
|             while (actives > 0) { | ||||
|                 yield waitAndWrite(); | ||||
|             } | ||||
|         } | ||||
|         finally { | ||||
|             httpClient.dispose(); | ||||
|             yield archiveDescriptor.close(); | ||||
|         } | ||||
|     }); | ||||
| } | ||||
| exports.downloadCacheHttpClientConcurrent = downloadCacheHttpClientConcurrent; | ||||
| function downloadSegmentRetry(httpClient, archiveLocation, offset, count) { | ||||
|     return __awaiter(this, void 0, void 0, function* () { | ||||
|         const retries = 5; | ||||
|         let failures = 0; | ||||
|         while (true) { | ||||
|             try { | ||||
|                 const timeout = 30000; | ||||
|                 const result = yield promiseWithTimeout(timeout, downloadSegment(httpClient, archiveLocation, offset, count)); | ||||
|                 if (typeof result === 'string') { | ||||
|                     throw new Error('downloadSegmentRetry failed due to timeout'); | ||||
|                 } | ||||
|                 return result; | ||||
|             } | ||||
|             catch (err) { | ||||
|                 if (failures >= retries) { | ||||
|                     throw err; | ||||
|                 } | ||||
|                 failures++; | ||||
|             } | ||||
|         } | ||||
|     }); | ||||
| } | ||||
| function downloadSegment(httpClient, archiveLocation, offset, count) { | ||||
|     return __awaiter(this, void 0, void 0, function* () { | ||||
|         const partRes = yield (0, requestUtils_1.retryHttpClientResponse)('downloadCachePart', () => __awaiter(this, void 0, void 0, function* () { | ||||
|             return yield httpClient.get(archiveLocation, { | ||||
|                 Range: `bytes=${offset}-${offset + count - 1}` | ||||
|             }); | ||||
|         })); | ||||
|         if (!partRes.readBodyBuffer) { | ||||
|             throw new Error('Expected HttpClientResponse to implement readBodyBuffer'); | ||||
|         } | ||||
|         return { | ||||
|             offset, | ||||
|             count, | ||||
|             buffer: yield partRes.readBodyBuffer() | ||||
|         }; | ||||
|     }); | ||||
| } | ||||
| /** | ||||
|  * Download the cache using the Azure Storage SDK.  Only call this method if the | ||||
|  * URL points to an Azure Storage endpoint. | ||||
| @ -4181,7 +4328,8 @@ exports.getUploadOptions = getUploadOptions; | ||||
|  */ | ||||
| function getDownloadOptions(copy) { | ||||
|     const result = { | ||||
|         useAzureSdk: true, | ||||
|         useAzureSdk: false, | ||||
|         concurrentBlobDownloads: true, | ||||
|         downloadConcurrency: 8, | ||||
|         timeoutInMs: 30000, | ||||
|         segmentTimeoutInMs: 600000, | ||||
| @ -4191,6 +4339,9 @@ function getDownloadOptions(copy) { | ||||
|         if (typeof copy.useAzureSdk === 'boolean') { | ||||
|             result.useAzureSdk = copy.useAzureSdk; | ||||
|         } | ||||
|         if (typeof copy.concurrentBlobDownloads === 'boolean') { | ||||
|             result.concurrentBlobDownloads = copy.concurrentBlobDownloads; | ||||
|         } | ||||
|         if (typeof copy.downloadConcurrency === 'number') { | ||||
|             result.downloadConcurrency = copy.downloadConcurrency; | ||||
|         } | ||||
| @ -9069,6 +9220,19 @@ class HttpClientResponse { | ||||
|             })); | ||||
|         }); | ||||
|     } | ||||
|     readBodyBuffer() { | ||||
|         return __awaiter(this, void 0, void 0, function* () { | ||||
|             return new Promise((resolve) => __awaiter(this, void 0, void 0, function* () { | ||||
|                 const chunks = []; | ||||
|                 this.message.on('data', (chunk) => { | ||||
|                     chunks.push(chunk); | ||||
|                 }); | ||||
|                 this.message.on('end', () => { | ||||
|                     resolve(Buffer.concat(chunks)); | ||||
|                 }); | ||||
|             })); | ||||
|         }); | ||||
|     } | ||||
| } | ||||
| exports.HttpClientResponse = HttpClientResponse; | ||||
| function isHttps(requestUrl) { | ||||
| @ -9573,7 +9737,13 @@ function getProxyUrl(reqUrl) { | ||||
|         } | ||||
|     })(); | ||||
|     if (proxyVar) { | ||||
|         return new URL(proxyVar); | ||||
|         try { | ||||
|             return new URL(proxyVar); | ||||
|         } | ||||
|         catch (_a) { | ||||
|             if (!proxyVar.startsWith('http://') && !proxyVar.startsWith('https://')) | ||||
|                 return new URL(`http://${proxyVar}`); | ||||
|         } | ||||
|     } | ||||
|     else { | ||||
|         return undefined; | ||||
| @ -10379,13 +10549,15 @@ exports.AbortSignal = AbortSignal; | ||||
| /***/ }), | ||||
| 
 | ||||
| /***/ 9645: | ||||
| /***/ ((__unused_webpack_module, exports) => { | ||||
| /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { | ||||
| 
 | ||||
| "use strict"; | ||||
| 
 | ||||
| 
 | ||||
| Object.defineProperty(exports, "__esModule", ({ value: true })); | ||||
| 
 | ||||
| var coreUtil = __nccwpck_require__(1333); | ||||
| 
 | ||||
| // Copyright (c) Microsoft Corporation.
 | ||||
| // Licensed under the MIT license.
 | ||||
| /** | ||||
| @ -10393,6 +10565,12 @@ Object.defineProperty(exports, "__esModule", ({ value: true })); | ||||
|  * the underlying key value. | ||||
|  */ | ||||
| class AzureKeyCredential { | ||||
|     /** | ||||
|      * The value of the key to be used in authentication | ||||
|      */ | ||||
|     get key() { | ||||
|         return this._key; | ||||
|     } | ||||
|     /** | ||||
|      * Create an instance of an AzureKeyCredential for use | ||||
|      * with a service client. | ||||
| @ -10405,12 +10583,6 @@ class AzureKeyCredential { | ||||
|         } | ||||
|         this._key = key; | ||||
|     } | ||||
|     /** | ||||
|      * The value of the key to be used in authentication | ||||
|      */ | ||||
|     get key() { | ||||
|         return this._key; | ||||
|     } | ||||
|     /** | ||||
|      * Change the value of the key. | ||||
|      * | ||||
| @ -10424,51 +10596,24 @@ class AzureKeyCredential { | ||||
|     } | ||||
| } | ||||
| 
 | ||||
| // Copyright (c) Microsoft Corporation.
 | ||||
| // Licensed under the MIT license.
 | ||||
| /** | ||||
|  * Helper TypeGuard that checks if something is defined or not. | ||||
|  * @param thing - Anything | ||||
|  * @internal | ||||
|  */ | ||||
| function isDefined(thing) { | ||||
|     return typeof thing !== "undefined" && thing !== null; | ||||
| } | ||||
| /** | ||||
|  * Helper TypeGuard that checks if the input is an object with the specified properties. | ||||
|  * Note: The properties may be inherited. | ||||
|  * @param thing - Anything. | ||||
|  * @param properties - The name of the properties that should appear in the object. | ||||
|  * @internal | ||||
|  */ | ||||
| function isObjectWithProperties(thing, properties) { | ||||
|     if (!isDefined(thing) || typeof thing !== "object") { | ||||
|         return false; | ||||
|     } | ||||
|     for (const property of properties) { | ||||
|         if (!objectHasProperty(thing, property)) { | ||||
|             return false; | ||||
|         } | ||||
|     } | ||||
|     return true; | ||||
| } | ||||
| /** | ||||
|  * Helper TypeGuard that checks if the input is an object with the specified property. | ||||
|  * Note: The property may be inherited. | ||||
|  * @param thing - Any object. | ||||
|  * @param property - The name of the property that should appear in the object. | ||||
|  * @internal | ||||
|  */ | ||||
| function objectHasProperty(thing, property) { | ||||
|     return typeof thing === "object" && property in thing; | ||||
| } | ||||
| 
 | ||||
| // Copyright (c) Microsoft Corporation.
 | ||||
| /** | ||||
|  * A static name/key-based credential that supports updating | ||||
|  * the underlying name and key values. | ||||
|  */ | ||||
| class AzureNamedKeyCredential { | ||||
|     /** | ||||
|      * The value of the key to be used in authentication. | ||||
|      */ | ||||
|     get key() { | ||||
|         return this._key; | ||||
|     } | ||||
|     /** | ||||
|      * The value of the name to be used in authentication. | ||||
|      */ | ||||
|     get name() { | ||||
|         return this._name; | ||||
|     } | ||||
|     /** | ||||
|      * Create an instance of an AzureNamedKeyCredential for use | ||||
|      * with a service client. | ||||
| @ -10483,18 +10628,6 @@ class AzureNamedKeyCredential { | ||||
|         this._name = name; | ||||
|         this._key = key; | ||||
|     } | ||||
|     /** | ||||
|      * The value of the key to be used in authentication. | ||||
|      */ | ||||
|     get key() { | ||||
|         return this._key; | ||||
|     } | ||||
|     /** | ||||
|      * The value of the name to be used in authentication. | ||||
|      */ | ||||
|     get name() { | ||||
|         return this._name; | ||||
|     } | ||||
|     /** | ||||
|      * Change the value of the key. | ||||
|      * | ||||
| @ -10518,7 +10651,7 @@ class AzureNamedKeyCredential { | ||||
|  * @param credential - The assumed NamedKeyCredential to be tested. | ||||
|  */ | ||||
| function isNamedKeyCredential(credential) { | ||||
|     return (isObjectWithProperties(credential, ["name", "key"]) && | ||||
|     return (coreUtil.isObjectWithProperties(credential, ["name", "key"]) && | ||||
|         typeof credential.key === "string" && | ||||
|         typeof credential.name === "string"); | ||||
| } | ||||
| @ -10529,6 +10662,12 @@ function isNamedKeyCredential(credential) { | ||||
|  * the underlying signature value. | ||||
|  */ | ||||
| class AzureSASCredential { | ||||
|     /** | ||||
|      * The value of the shared access signature to be used in authentication | ||||
|      */ | ||||
|     get signature() { | ||||
|         return this._signature; | ||||
|     } | ||||
|     /** | ||||
|      * Create an instance of an AzureSASCredential for use | ||||
|      * with a service client. | ||||
| @ -10541,12 +10680,6 @@ class AzureSASCredential { | ||||
|         } | ||||
|         this._signature = signature; | ||||
|     } | ||||
|     /** | ||||
|      * The value of the shared access signature to be used in authentication | ||||
|      */ | ||||
|     get signature() { | ||||
|         return this._signature; | ||||
|     } | ||||
|     /** | ||||
|      * Change the value of the signature. | ||||
|      * | ||||
| @ -10568,7 +10701,7 @@ class AzureSASCredential { | ||||
|  * @param credential - The assumed SASCredential to be tested. | ||||
|  */ | ||||
| function isSASCredential(credential) { | ||||
|     return (isObjectWithProperties(credential, ["signature"]) && typeof credential.signature === "string"); | ||||
|     return (coreUtil.isObjectWithProperties(credential, ["signature"]) && typeof credential.signature === "string"); | ||||
| } | ||||
| 
 | ||||
| // Copyright (c) Microsoft Corporation.
 | ||||
| @ -18771,14 +18904,6 @@ Object.defineProperty(exports, "__esModule", ({ value: true })); | ||||
| var abortController = __nccwpck_require__(978); | ||||
| var crypto = __nccwpck_require__(6113); | ||||
| 
 | ||||
| // Copyright (c) Microsoft Corporation.
 | ||||
| // Licensed under the MIT license.
 | ||||
| var _a$1; | ||||
| /** | ||||
|  * A constant that indicates whether the environment the code is running is Node.JS. | ||||
|  */ | ||||
| const isNode = typeof process !== "undefined" && Boolean(process.version) && Boolean((_a$1 = process.versions) === null || _a$1 === void 0 ? void 0 : _a$1.node); | ||||
| 
 | ||||
| // Copyright (c) Microsoft Corporation.
 | ||||
| /** | ||||
|  * Creates an abortable promise. | ||||
| @ -19005,9 +19130,9 @@ function generateUUID() { | ||||
| 
 | ||||
| // Copyright (c) Microsoft Corporation.
 | ||||
| // Licensed under the MIT license.
 | ||||
| var _a; | ||||
| var _a$1; | ||||
| // NOTE: This is a workaround until we can use `globalThis.crypto.randomUUID` in Node.js 19+.
 | ||||
| let uuidFunction = typeof ((_a = globalThis === null || globalThis === void 0 ? void 0 : globalThis.crypto) === null || _a === void 0 ? void 0 : _a.randomUUID) === "function" | ||||
| let uuidFunction = typeof ((_a$1 = globalThis === null || globalThis === void 0 ? void 0 : globalThis.crypto) === null || _a$1 === void 0 ? void 0 : _a$1.randomUUID) === "function" | ||||
|     ? globalThis.crypto.randomUUID.bind(globalThis.crypto) | ||||
|     : crypto.randomUUID; | ||||
| // Not defined in earlier versions of Node.js 14
 | ||||
| @ -19023,19 +19148,139 @@ function randomUUID() { | ||||
|     return uuidFunction(); | ||||
| } | ||||
| 
 | ||||
| // Copyright (c) Microsoft Corporation.
 | ||||
| // Licensed under the MIT license.
 | ||||
| var _a, _b, _c, _d; | ||||
| /** | ||||
|  * A constant that indicates whether the environment the code is running is a Web Browser. | ||||
|  */ | ||||
| // eslint-disable-next-line @azure/azure-sdk/ts-no-window
 | ||||
| const isBrowser = typeof window !== "undefined" && typeof window.document !== "undefined"; | ||||
| /** | ||||
|  * A constant that indicates whether the environment the code is running is a Web Worker. | ||||
|  */ | ||||
| const isWebWorker = typeof self === "object" && | ||||
|     typeof (self === null || self === void 0 ? void 0 : self.importScripts) === "function" && | ||||
|     (((_a = self.constructor) === null || _a === void 0 ? void 0 : _a.name) === "DedicatedWorkerGlobalScope" || | ||||
|         ((_b = self.constructor) === null || _b === void 0 ? void 0 : _b.name) === "ServiceWorkerGlobalScope" || | ||||
|         ((_c = self.constructor) === null || _c === void 0 ? void 0 : _c.name) === "SharedWorkerGlobalScope"); | ||||
| /** | ||||
|  * A constant that indicates whether the environment the code is running is Node.JS. | ||||
|  */ | ||||
| const isNode = typeof process !== "undefined" && Boolean(process.version) && Boolean((_d = process.versions) === null || _d === void 0 ? void 0 : _d.node); | ||||
| /** | ||||
|  * A constant that indicates whether the environment the code is running is Deno. | ||||
|  */ | ||||
| const isDeno = typeof Deno !== "undefined" && | ||||
|     typeof Deno.version !== "undefined" && | ||||
|     typeof Deno.version.deno !== "undefined"; | ||||
| /** | ||||
|  * A constant that indicates whether the environment the code is running is Bun.sh. | ||||
|  */ | ||||
| const isBun = typeof Bun !== "undefined" && typeof Bun.version !== "undefined"; | ||||
| /** | ||||
|  * A constant that indicates whether the environment the code is running is in React-Native. | ||||
|  */ | ||||
| // https://github.com/facebook/react-native/blob/main/packages/react-native/Libraries/Core/setUpNavigator.js
 | ||||
| const isReactNative = typeof navigator !== "undefined" && (navigator === null || navigator === void 0 ? void 0 : navigator.product) === "ReactNative"; | ||||
| 
 | ||||
| // Copyright (c) Microsoft Corporation.
 | ||||
| // Licensed under the MIT license.
 | ||||
| /** | ||||
|  * The helper that transforms bytes with specific character encoding into string | ||||
|  * @param bytes - the uint8array bytes | ||||
|  * @param format - the format we use to encode the byte | ||||
|  * @returns a string of the encoded string | ||||
|  */ | ||||
| function uint8ArrayToString(bytes, format) { | ||||
|     switch (format) { | ||||
|         case "utf-8": | ||||
|             return uint8ArrayToUtf8String(bytes); | ||||
|         case "base64": | ||||
|             return uint8ArrayToBase64(bytes); | ||||
|         case "base64url": | ||||
|             return uint8ArrayToBase64Url(bytes); | ||||
|     } | ||||
| } | ||||
| /** | ||||
|  * The helper that transforms string to specific character encoded bytes array. | ||||
|  * @param value - the string to be converted | ||||
|  * @param format - the format we use to decode the value | ||||
|  * @returns a uint8array | ||||
|  */ | ||||
| function stringToUint8Array(value, format) { | ||||
|     switch (format) { | ||||
|         case "utf-8": | ||||
|             return utf8StringToUint8Array(value); | ||||
|         case "base64": | ||||
|             return base64ToUint8Array(value); | ||||
|         case "base64url": | ||||
|             return base64UrlToUint8Array(value); | ||||
|     } | ||||
| } | ||||
| /** | ||||
|  * Decodes a Uint8Array into a Base64 string. | ||||
|  * @internal | ||||
|  */ | ||||
| function uint8ArrayToBase64(bytes) { | ||||
|     return Buffer.from(bytes).toString("base64"); | ||||
| } | ||||
| /** | ||||
|  * Decodes a Uint8Array into a Base64Url string. | ||||
|  * @internal | ||||
|  */ | ||||
| function uint8ArrayToBase64Url(bytes) { | ||||
|     return Buffer.from(bytes).toString("base64url"); | ||||
| } | ||||
| /** | ||||
|  * Decodes a Uint8Array into a javascript string. | ||||
|  * @internal | ||||
|  */ | ||||
| function uint8ArrayToUtf8String(bytes) { | ||||
|     return Buffer.from(bytes).toString("utf-8"); | ||||
| } | ||||
| /** | ||||
|  * Encodes a JavaScript string into a Uint8Array. | ||||
|  * @internal | ||||
|  */ | ||||
| function utf8StringToUint8Array(value) { | ||||
|     return Buffer.from(value); | ||||
| } | ||||
| /** | ||||
|  * Encodes a Base64 string into a Uint8Array. | ||||
|  * @internal | ||||
|  */ | ||||
| function base64ToUint8Array(value) { | ||||
|     return Buffer.from(value, "base64"); | ||||
| } | ||||
| /** | ||||
|  * Encodes a Base64Url string into a Uint8Array. | ||||
|  * @internal | ||||
|  */ | ||||
| function base64UrlToUint8Array(value) { | ||||
|     return Buffer.from(value, "base64url"); | ||||
| } | ||||
| 
 | ||||
| exports.computeSha256Hash = computeSha256Hash; | ||||
| exports.computeSha256Hmac = computeSha256Hmac; | ||||
| exports.createAbortablePromise = createAbortablePromise; | ||||
| exports.delay = delay; | ||||
| exports.getErrorMessage = getErrorMessage; | ||||
| exports.getRandomIntegerInclusive = getRandomIntegerInclusive; | ||||
| exports.isBrowser = isBrowser; | ||||
| exports.isBun = isBun; | ||||
| exports.isDefined = isDefined; | ||||
| exports.isDeno = isDeno; | ||||
| exports.isError = isError; | ||||
| exports.isNode = isNode; | ||||
| exports.isObject = isObject; | ||||
| exports.isObjectWithProperties = isObjectWithProperties; | ||||
| exports.isReactNative = isReactNative; | ||||
| exports.isWebWorker = isWebWorker; | ||||
| exports.objectHasProperty = objectHasProperty; | ||||
| exports.randomUUID = randomUUID; | ||||
| exports.stringToUint8Array = stringToUint8Array; | ||||
| exports.uint8ArrayToString = uint8ArrayToString; | ||||
| //# sourceMappingURL=index.js.map
 | ||||
| 
 | ||||
| 
 | ||||
| @ -50699,10 +50944,6 @@ function getNodeRequestOptions(request) { | ||||
| 		agent = agent(parsedURL); | ||||
| 	} | ||||
| 
 | ||||
| 	if (!headers.has('Connection') && !agent) { | ||||
| 		headers.set('Connection', 'close'); | ||||
| 	} | ||||
| 
 | ||||
| 	// HTTP-network fetch step 4.2
 | ||||
| 	// chunked encoding is handled by Node.js
 | ||||
| 
 | ||||
|  | ||||
							
								
								
									
										57
									
								
								package-lock.json
									
									
									
										generated
									
									
									
								
							
							
						
						
									
										57
									
								
								package-lock.json
									
									
									
										generated
									
									
									
								
							| @ -9,8 +9,8 @@ | ||||
|       "version": "2.6.1", | ||||
|       "license": "LGPL-3.0", | ||||
|       "dependencies": { | ||||
|         "@actions/buildjet-cache": "npm:github-actions.cache-buildjet@0.1.2", | ||||
|         "@actions/cache": "^3.2.1", | ||||
|         "@actions/buildjet-cache": "npm:github-actions.cache-buildjet@0.2.0", | ||||
|         "@actions/cache": "^3.2.2", | ||||
|         "@actions/core": "^1.10.0", | ||||
|         "@actions/exec": "^1.1.1", | ||||
|         "@actions/glob": "^0.4.0", | ||||
| @ -28,9 +28,9 @@ | ||||
|     }, | ||||
|     "node_modules/@actions/buildjet-cache": { | ||||
|       "name": "github-actions.cache-buildjet", | ||||
|       "version": "0.1.2", | ||||
|       "resolved": "https://registry.npmjs.org/github-actions.cache-buildjet/-/github-actions.cache-buildjet-0.1.2.tgz", | ||||
|       "integrity": "sha512-mBgIxCYgDDSzkCCK1/DbVF36K0k2uaSx+Dk4LANat8KMzq7XtYK96ZnS7/fOosqzjtK7AlZtXsBkOoY5NKlcHw==", | ||||
|       "version": "0.2.0", | ||||
|       "resolved": "https://registry.npmjs.org/github-actions.cache-buildjet/-/github-actions.cache-buildjet-0.2.0.tgz", | ||||
|       "integrity": "sha512-Dm1ZL9EBRo3JOwQKrGodxlqRsmFKerhwZj8DYTnWiAIU+qgR4cV8le97Dw/grmBgnAvANNMQlRP6+sNE1auQ6g==", | ||||
|       "dependencies": { | ||||
|         "@actions/core": "^1.10.0", | ||||
|         "@actions/exec": "^1.0.1", | ||||
| @ -51,14 +51,14 @@ | ||||
|       } | ||||
|     }, | ||||
|     "node_modules/@actions/cache": { | ||||
|       "version": "3.2.1", | ||||
|       "resolved": "https://registry.npmjs.org/@actions/cache/-/cache-3.2.1.tgz", | ||||
|       "integrity": "sha512-QurbMiY//02+0kN1adJkMHN44RcZ5kAXfhSnKUZmtSmhMTNqLitGArG1xOkt93NNyByTlLGAc5wIOF/dZ2ENOQ==", | ||||
|       "version": "3.2.2", | ||||
|       "resolved": "https://registry.npmjs.org/@actions/cache/-/cache-3.2.2.tgz", | ||||
|       "integrity": "sha512-6D0Jq5JrLZRQ3VApeQwQkkV20ZZXjXsHNYXd9VjNUdi9E0h93wESpxfMJ2JWLCUCgHNLcfY0v3GjNM+2FdRMlg==", | ||||
|       "dependencies": { | ||||
|         "@actions/core": "^1.10.0", | ||||
|         "@actions/exec": "^1.0.1", | ||||
|         "@actions/glob": "^0.1.0", | ||||
|         "@actions/http-client": "^2.0.1", | ||||
|         "@actions/http-client": "^2.1.1", | ||||
|         "@actions/io": "^1.0.1", | ||||
|         "@azure/abort-controller": "^1.1.0", | ||||
|         "@azure/ms-rest-js": "^2.6.0", | ||||
| @ -111,9 +111,9 @@ | ||||
|       } | ||||
|     }, | ||||
|     "node_modules/@actions/http-client": { | ||||
|       "version": "2.1.0", | ||||
|       "resolved": "https://registry.npmjs.org/@actions/http-client/-/http-client-2.1.0.tgz", | ||||
|       "integrity": "sha512-BonhODnXr3amchh4qkmjPMUO8mFi/zLaaCeCAJZqch8iQqyDnVIkySjB38VHAC8IJ+bnlgfOqlhpyCUZHlQsqw==", | ||||
|       "version": "2.1.1", | ||||
|       "resolved": "https://registry.npmjs.org/@actions/http-client/-/http-client-2.1.1.tgz", | ||||
|       "integrity": "sha512-qhrkRMB40bbbLo7gF+0vu+X+UawOvQQqNAA/5Unx774RS8poaOhThDOG6BGmxvAnxhQnDp2BG/ZUm65xZILTpw==", | ||||
|       "dependencies": { | ||||
|         "tunnel": "^0.0.6" | ||||
|       } | ||||
| @ -135,15 +135,16 @@ | ||||
|       } | ||||
|     }, | ||||
|     "node_modules/@azure/core-auth": { | ||||
|       "version": "1.4.0", | ||||
|       "resolved": "https://registry.npmjs.org/@azure/core-auth/-/core-auth-1.4.0.tgz", | ||||
|       "integrity": "sha512-HFrcTgmuSuukRf/EdPmqBrc5l6Q5Uu+2TbuhaKbgaCpP2TfAeiNaQPAadxO+CYBRHGUzIDteMAjFspFLDLnKVQ==", | ||||
|       "version": "1.5.0", | ||||
|       "resolved": "https://registry.npmjs.org/@azure/core-auth/-/core-auth-1.5.0.tgz", | ||||
|       "integrity": "sha512-udzoBuYG1VBoHVohDTrvKjyzel34zt77Bhp7dQntVGGD0ehVq48owENbBG8fIgkHRNUBQH5k1r0hpoMu5L8+kw==", | ||||
|       "dependencies": { | ||||
|         "@azure/abort-controller": "^1.0.0", | ||||
|         "@azure/core-util": "^1.1.0", | ||||
|         "tslib": "^2.2.0" | ||||
|       }, | ||||
|       "engines": { | ||||
|         "node": ">=12.0.0" | ||||
|         "node": ">=14.0.0" | ||||
|       } | ||||
|     }, | ||||
|     "node_modules/@azure/core-http": { | ||||
| @ -229,9 +230,9 @@ | ||||
|       } | ||||
|     }, | ||||
|     "node_modules/@azure/core-util": { | ||||
|       "version": "1.3.2", | ||||
|       "resolved": "https://registry.npmjs.org/@azure/core-util/-/core-util-1.3.2.tgz", | ||||
|       "integrity": "sha512-2bECOUh88RvL1pMZTcc6OzfobBeWDBf5oBbhjIhT1MV9otMVWCzpOJkkiKtrnO88y5GGBelgY8At73KGAdbkeQ==", | ||||
|       "version": "1.4.0", | ||||
|       "resolved": "https://registry.npmjs.org/@azure/core-util/-/core-util-1.4.0.tgz", | ||||
|       "integrity": "sha512-eGAyJpm3skVQoLiRqm/xPa+SXi/NPDdSHMxbRAz2lSprd+Zs+qrpQGQQ2VQ3Nttu+nSZR4XoYQC71LbEI7jsig==", | ||||
|       "dependencies": { | ||||
|         "@azure/abort-controller": "^1.0.0", | ||||
|         "tslib": "^2.2.0" | ||||
| @ -306,9 +307,9 @@ | ||||
|       } | ||||
|     }, | ||||
|     "node_modules/@types/node": { | ||||
|       "version": "20.4.5", | ||||
|       "resolved": "https://registry.npmjs.org/@types/node/-/node-20.4.5.tgz", | ||||
|       "integrity": "sha512-rt40Nk13II9JwQBdeYqmbn2Q6IVTA5uPhvSO+JVqdXw/6/4glI6oR9ezty/A9Hg5u7JH4OmYmuQ+XvjKm0Datg==" | ||||
|       "version": "20.5.1", | ||||
|       "resolved": "https://registry.npmjs.org/@types/node/-/node-20.5.1.tgz", | ||||
|       "integrity": "sha512-4tT2UrL5LBqDwoed9wZ6N3umC4Yhz3W3FloMmiiG4JwmUJWpie0c7lcnUNd4gtMKuDEO4wRVS8B6Xa0uMRsMKg==" | ||||
|     }, | ||||
|     "node_modules/@types/node-fetch": { | ||||
|       "version": "2.6.4", | ||||
| @ -472,9 +473,9 @@ | ||||
|       } | ||||
|     }, | ||||
|     "node_modules/node-fetch": { | ||||
|       "version": "2.6.12", | ||||
|       "resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-2.6.12.tgz", | ||||
|       "integrity": "sha512-C/fGU2E8ToujUivIO0H+tpQ6HWo4eEmchoPIoXtxCrVghxdKq+QOHqEZW7tuP3KlV3bC8FRMO5nMCC7Zm1VP6g==", | ||||
|       "version": "2.6.13", | ||||
|       "resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-2.6.13.tgz", | ||||
|       "integrity": "sha512-StxNAxh15zr77QvvkmveSQ8uCQ4+v5FkvNTj0OESmiHu+VRi/gXArXtkWMElOsOUNLtUEvI4yS+rdtOHZTwlQA==", | ||||
|       "dependencies": { | ||||
|         "whatwg-url": "^5.0.0" | ||||
|       }, | ||||
| @ -526,9 +527,9 @@ | ||||
|       "integrity": "sha512-N3WMsuqV66lT30CrXNbEjx4GEwlow3v6rr4mCcv6prnfwhS01rkgyFdjPNBYd9br7LpXV1+Emh01fHnq2Gdgrw==" | ||||
|     }, | ||||
|     "node_modules/tslib": { | ||||
|       "version": "2.6.1", | ||||
|       "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.6.1.tgz", | ||||
|       "integrity": "sha512-t0hLfiEKfMUoqhG+U1oid7Pva4bbDPHYfJNiB7BiIjRkj1pyC++4N3huJfqY6aRH6VTB0rvtzQwjM4K6qpfOig==" | ||||
|       "version": "2.6.2", | ||||
|       "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.6.2.tgz", | ||||
|       "integrity": "sha512-AEYxH93jGFPn/a2iVAwW87VuUIkR1FVUKB77NwMF7nBTDkDrrT/Hpt/IrCJ0QXhW27jTBDcf5ZY7w6RiqTMw2Q==" | ||||
|     }, | ||||
|     "node_modules/tunnel": { | ||||
|       "version": "0.0.6", | ||||
|  | ||||
| @ -22,8 +22,8 @@ | ||||
|   }, | ||||
|   "homepage": "https://github.com/Swatinem/rust-cache#readme", | ||||
|   "dependencies": { | ||||
|     "@actions/buildjet-cache": "npm:github-actions.cache-buildjet@0.1.2", | ||||
|     "@actions/cache": "^3.2.1", | ||||
|     "@actions/buildjet-cache": "npm:github-actions.cache-buildjet@0.2.0", | ||||
|     "@actions/cache": "^3.2.2", | ||||
|     "@actions/core": "^1.10.0", | ||||
|     "@actions/exec": "^1.1.1", | ||||
|     "@actions/glob": "^0.4.0", | ||||
|  | ||||
		Loading…
	
	
			
			x
			
			
		
	
		Reference in New Issue
	
	Block a user