From 127851b3990661ebf44282b5f297622c88ecac3f Mon Sep 17 00:00:00 2001 From: "Michael B. Gale" Date: Mon, 27 Oct 2025 15:42:43 +0000 Subject: [PATCH 1/5] Add environment variable for skipping workflow validation --- lib/init-action.js | 20 +++++++++++--------- src/environment.ts | 6 ++++++ src/init-action.ts | 21 ++++++++++++--------- 3 files changed, 29 insertions(+), 18 deletions(-) diff --git a/lib/init-action.js b/lib/init-action.js index 202465611f..f5a537666b 100644 --- a/lib/init-action.js +++ b/lib/init-action.js @@ -92345,16 +92345,18 @@ async function run() { toolsVersion = initCodeQLResult.toolsVersion; toolsSource = initCodeQLResult.toolsSource; zstdAvailability = initCodeQLResult.zstdAvailability; - core13.startGroup("Validating workflow"); - const validateWorkflowResult = await validateWorkflow(codeql, logger); - if (validateWorkflowResult === void 0) { - logger.info("Detected no issues with the code scanning workflow."); - } else { - logger.warning( - `Unable to validate code scanning workflow: ${validateWorkflowResult}` - ); + if (process.env["CODEQL_ACTION_SKIP_WORKFLOW_VALIDATION" /* SKIP_WORKFLOW_VALIDATION */] !== "true") { + core13.startGroup("Validating workflow"); + const validateWorkflowResult = await validateWorkflow(codeql, logger); + if (validateWorkflowResult === void 0) { + logger.info("Detected no issues with the code scanning workflow."); + } else { + logger.warning( + `Unable to validate code scanning workflow: ${validateWorkflowResult}` + ); + } + core13.endGroup(); } - core13.endGroup(); if ( // Only enable the experimental features env variable for Rust analysis if the user has explicitly // requested rust - don't enable it via language autodetection. diff --git a/src/environment.ts b/src/environment.ts index 6986641222..16a016aaaa 100644 --- a/src/environment.ts +++ b/src/environment.ts @@ -137,4 +137,10 @@ export enum EnvVar { * This setting is more specific than `CODEQL_ACTION_TEST_MODE`, which implies this option. */ SKIP_SARIF_UPLOAD = "CODEQL_ACTION_SKIP_SARIF_UPLOAD", + + /** + * Whether to skip workflow validation. Intended for internal use, where we know that + * the workflow is valid and validation is not necessary. + */ + SKIP_WORKFLOW_VALIDATION = "CODEQL_ACTION_SKIP_WORKFLOW_VALIDATION", } diff --git a/src/init-action.ts b/src/init-action.ts index 8961b09f4f..d10c6a81a9 100644 --- a/src/init-action.ts +++ b/src/init-action.ts @@ -288,16 +288,19 @@ async function run() { toolsSource = initCodeQLResult.toolsSource; zstdAvailability = initCodeQLResult.zstdAvailability; - core.startGroup("Validating workflow"); - const validateWorkflowResult = await validateWorkflow(codeql, logger); - if (validateWorkflowResult === undefined) { - logger.info("Detected no issues with the code scanning workflow."); - } else { - logger.warning( - `Unable to validate code scanning workflow: ${validateWorkflowResult}`, - ); + // Check the workflow for problems, unless `SKIP_WORKFLOW_VALIDATION` is `true`. + if (process.env[EnvVar.SKIP_WORKFLOW_VALIDATION] !== "true") { + core.startGroup("Validating workflow"); + const validateWorkflowResult = await validateWorkflow(codeql, logger); + if (validateWorkflowResult === undefined) { + logger.info("Detected no issues with the code scanning workflow."); + } else { + logger.warning( + `Unable to validate code scanning workflow: ${validateWorkflowResult}`, + ); + } + core.endGroup(); } - core.endGroup(); // Set CODEQL_ENABLE_EXPERIMENTAL_FEATURES for Rust if between 2.19.3 (included) and 2.22.1 (excluded) // We need to set this environment variable before initializing the config, otherwise Rust From 06fbd897c4b729f9b9042ad9189b05925934abc0 Mon Sep 17 00:00:00 2001 From: "Michael B. Gale" Date: Mon, 27 Oct 2025 15:57:44 +0000 Subject: [PATCH 2/5] Move workflow check to a function in `init.ts` and add tests --- lib/analyze-action.js | 376 ++++++++-------- lib/init-action-post.js | 863 +++++++++++++++++++------------------ lib/init-action.js | 778 ++++++++++++++++----------------- lib/setup-codeql-action.js | 354 +++++++-------- lib/upload-lib.js | 332 +++++++------- lib/upload-sarif-action.js | 344 ++++++++------- src/init-action.ts | 18 +- src/init.test.ts | 58 +++ src/init.ts | 25 ++ 9 files changed, 1653 insertions(+), 1495 deletions(-) diff --git a/lib/analyze-action.js b/lib/analyze-action.js index d3148efde0..6b4092b81c 100644 --- a/lib/analyze-action.js +++ b/lib/analyze-action.js @@ -19743,10 +19743,10 @@ Support boolean input list: \`true | True | TRUE | false | False | FALSE\``); (0, command_1.issueCommand)("error", (0, utils_1.toCommandProperties)(properties), message instanceof Error ? message.toString() : message); } exports2.error = error2; - function warning7(message, properties = {}) { + function warning8(message, properties = {}) { (0, command_1.issueCommand)("warning", (0, utils_1.toCommandProperties)(properties), message instanceof Error ? message.toString() : message); } - exports2.warning = warning7; + exports2.warning = warning8; function notice(message, properties = {}) { (0, command_1.issueCommand)("notice", (0, utils_1.toCommandProperties)(properties), message instanceof Error ? message.toString() : message); } @@ -19755,22 +19755,22 @@ Support boolean input list: \`true | True | TRUE | false | False | FALSE\``); process.stdout.write(message + os5.EOL); } exports2.info = info4; - function startGroup3(name) { + function startGroup4(name) { (0, command_1.issue)("group", name); } - exports2.startGroup = startGroup3; - function endGroup3() { + exports2.startGroup = startGroup4; + function endGroup4() { (0, command_1.issue)("endgroup"); } - exports2.endGroup = endGroup3; + exports2.endGroup = endGroup4; function group(name, fn) { return __awaiter4(this, void 0, void 0, function* () { - startGroup3(name); + startGroup4(name); let result; try { result = yield fn(); } finally { - endGroup3(); + endGroup4(); } return result; }); @@ -35312,7 +35312,7 @@ var require_internal_glob_options_helper = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.getOptions = void 0; - var core15 = __importStar4(require_core()); + var core17 = __importStar4(require_core()); function getOptions(copy) { const result = { followSymbolicLinks: true, @@ -35322,15 +35322,15 @@ var require_internal_glob_options_helper = __commonJS({ if (copy) { if (typeof copy.followSymbolicLinks === "boolean") { result.followSymbolicLinks = copy.followSymbolicLinks; - core15.debug(`followSymbolicLinks '${result.followSymbolicLinks}'`); + core17.debug(`followSymbolicLinks '${result.followSymbolicLinks}'`); } if (typeof copy.implicitDescendants === "boolean") { result.implicitDescendants = copy.implicitDescendants; - core15.debug(`implicitDescendants '${result.implicitDescendants}'`); + core17.debug(`implicitDescendants '${result.implicitDescendants}'`); } if (typeof copy.omitBrokenSymbolicLinks === "boolean") { result.omitBrokenSymbolicLinks = copy.omitBrokenSymbolicLinks; - core15.debug(`omitBrokenSymbolicLinks '${result.omitBrokenSymbolicLinks}'`); + core17.debug(`omitBrokenSymbolicLinks '${result.omitBrokenSymbolicLinks}'`); } } return result; @@ -36784,7 +36784,7 @@ var require_internal_globber = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.DefaultGlobber = void 0; - var core15 = __importStar4(require_core()); + var core17 = __importStar4(require_core()); var fs20 = __importStar4(require("fs")); var globOptionsHelper = __importStar4(require_internal_glob_options_helper()); var path20 = __importStar4(require("path")); @@ -36835,7 +36835,7 @@ var require_internal_globber = __commonJS({ } const stack = []; for (const searchPath of patternHelper.getSearchPaths(patterns)) { - core15.debug(`Search path '${searchPath}'`); + core17.debug(`Search path '${searchPath}'`); try { yield __await4(fs20.promises.lstat(searchPath)); } catch (err) { @@ -36907,7 +36907,7 @@ var require_internal_globber = __commonJS({ } catch (err) { if (err.code === "ENOENT") { if (options.omitBrokenSymbolicLinks) { - core15.debug(`Broken symlink '${item.path}'`); + core17.debug(`Broken symlink '${item.path}'`); return void 0; } throw new Error(`No information found for the path '${item.path}'. This may indicate a broken symbolic link.`); @@ -36923,7 +36923,7 @@ var require_internal_globber = __commonJS({ traversalChain.pop(); } if (traversalChain.some((x) => x === realPath)) { - core15.debug(`Symlink cycle detected for path '${item.path}' and realpath '${realPath}'`); + core17.debug(`Symlink cycle detected for path '${item.path}' and realpath '${realPath}'`); return void 0; } traversalChain.push(realPath); @@ -38247,7 +38247,7 @@ var require_cacheUtils = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.getRuntimeToken = exports2.getCacheVersion = exports2.assertDefined = exports2.getGnuTarPathOnWindows = exports2.getCacheFileName = exports2.getCompressionMethod = exports2.unlinkFile = exports2.resolvePaths = exports2.getArchiveFileSizeInBytes = exports2.createTempDirectory = void 0; - var core15 = __importStar4(require_core()); + var core17 = __importStar4(require_core()); var exec2 = __importStar4(require_exec()); var glob2 = __importStar4(require_glob()); var io7 = __importStar4(require_io()); @@ -38300,7 +38300,7 @@ var require_cacheUtils = __commonJS({ _e = false; const file = _c; const relativeFile = path20.relative(workspace, file).replace(new RegExp(`\\${path20.sep}`, "g"), "/"); - core15.debug(`Matched: ${relativeFile}`); + core17.debug(`Matched: ${relativeFile}`); if (relativeFile === "") { paths.push("."); } else { @@ -38330,7 +38330,7 @@ var require_cacheUtils = __commonJS({ return __awaiter4(this, void 0, void 0, function* () { let versionOutput = ""; additionalArgs.push("--version"); - core15.debug(`Checking ${app} ${additionalArgs.join(" ")}`); + core17.debug(`Checking ${app} ${additionalArgs.join(" ")}`); try { yield exec2.exec(`${app}`, additionalArgs, { ignoreReturnCode: true, @@ -38341,10 +38341,10 @@ var require_cacheUtils = __commonJS({ } }); } catch (err) { - core15.debug(err.message); + core17.debug(err.message); } versionOutput = versionOutput.trim(); - core15.debug(versionOutput); + core17.debug(versionOutput); return versionOutput; }); } @@ -38352,7 +38352,7 @@ var require_cacheUtils = __commonJS({ return __awaiter4(this, void 0, void 0, function* () { const versionOutput = yield getVersion("zstd", ["--quiet"]); const version = semver8.clean(versionOutput); - core15.debug(`zstd version: ${version}`); + core17.debug(`zstd version: ${version}`); if (versionOutput === "") { return constants_1.CompressionMethod.Gzip; } else { @@ -73664,7 +73664,7 @@ var require_uploadUtils = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.uploadCacheArchiveSDK = exports2.UploadProgress = void 0; - var core15 = __importStar4(require_core()); + var core17 = __importStar4(require_core()); var storage_blob_1 = require_dist7(); var errors_1 = require_errors2(); var UploadProgress = class { @@ -73706,7 +73706,7 @@ var require_uploadUtils = __commonJS({ const percentage = (100 * (transferredBytes / this.contentLength)).toFixed(1); const elapsedTime = Date.now() - this.startTime; const uploadSpeed = (transferredBytes / (1024 * 1024) / (elapsedTime / 1e3)).toFixed(1); - core15.info(`Sent ${transferredBytes} of ${this.contentLength} (${percentage}%), ${uploadSpeed} MBs/sec`); + core17.info(`Sent ${transferredBytes} of ${this.contentLength} (${percentage}%), ${uploadSpeed} MBs/sec`); if (this.isDone()) { this.displayedComplete = true; } @@ -73761,14 +73761,14 @@ var require_uploadUtils = __commonJS({ }; try { uploadProgress.startDisplayTimer(); - core15.debug(`BlobClient: ${blobClient.name}:${blobClient.accountName}:${blobClient.containerName}`); + core17.debug(`BlobClient: ${blobClient.name}:${blobClient.accountName}:${blobClient.containerName}`); const response = yield blockBlobClient.uploadFile(archivePath, uploadOptions); if (response._response.status >= 400) { throw new errors_1.InvalidResponseError(`uploadCacheArchiveSDK: upload failed with status code ${response._response.status}`); } return response; } catch (error2) { - core15.warning(`uploadCacheArchiveSDK: internal error uploading cache archive: ${error2.message}`); + core17.warning(`uploadCacheArchiveSDK: internal error uploading cache archive: ${error2.message}`); throw error2; } finally { uploadProgress.stopDisplayTimer(); @@ -73839,7 +73839,7 @@ var require_requestUtils = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.retryHttpClientResponse = exports2.retryTypedResponse = exports2.retry = exports2.isRetryableStatusCode = exports2.isServerErrorStatusCode = exports2.isSuccessStatusCode = void 0; - var core15 = __importStar4(require_core()); + var core17 = __importStar4(require_core()); var http_client_1 = require_lib(); var constants_1 = require_constants10(); function isSuccessStatusCode(statusCode) { @@ -73900,9 +73900,9 @@ var require_requestUtils = __commonJS({ isRetryable = isRetryableStatusCode(statusCode); errorMessage = `Cache service responded with ${statusCode}`; } - core15.debug(`${name} - Attempt ${attempt} of ${maxAttempts} failed with error: ${errorMessage}`); + core17.debug(`${name} - Attempt ${attempt} of ${maxAttempts} failed with error: ${errorMessage}`); if (!isRetryable) { - core15.debug(`${name} - Error is not retryable`); + core17.debug(`${name} - Error is not retryable`); break; } yield sleep(delay2); @@ -74007,7 +74007,7 @@ var require_downloadUtils = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.downloadCacheStorageSDK = exports2.downloadCacheHttpClientConcurrent = exports2.downloadCacheHttpClient = exports2.DownloadProgress = void 0; - var core15 = __importStar4(require_core()); + var core17 = __importStar4(require_core()); var http_client_1 = require_lib(); var storage_blob_1 = require_dist7(); var buffer = __importStar4(require("buffer")); @@ -74045,7 +74045,7 @@ var require_downloadUtils = __commonJS({ this.segmentIndex = this.segmentIndex + 1; this.segmentSize = segmentSize; this.receivedBytes = 0; - core15.debug(`Downloading segment at offset ${this.segmentOffset} with length ${this.segmentSize}...`); + core17.debug(`Downloading segment at offset ${this.segmentOffset} with length ${this.segmentSize}...`); } /** * Sets the number of bytes received for the current segment. @@ -74079,7 +74079,7 @@ var require_downloadUtils = __commonJS({ const percentage = (100 * (transferredBytes / this.contentLength)).toFixed(1); const elapsedTime = Date.now() - this.startTime; const downloadSpeed = (transferredBytes / (1024 * 1024) / (elapsedTime / 1e3)).toFixed(1); - core15.info(`Received ${transferredBytes} of ${this.contentLength} (${percentage}%), ${downloadSpeed} MBs/sec`); + core17.info(`Received ${transferredBytes} of ${this.contentLength} (${percentage}%), ${downloadSpeed} MBs/sec`); if (this.isDone()) { this.displayedComplete = true; } @@ -74129,7 +74129,7 @@ var require_downloadUtils = __commonJS({ })); downloadResponse.message.socket.setTimeout(constants_1.SocketTimeout, () => { downloadResponse.message.destroy(); - core15.debug(`Aborting download, socket timed out after ${constants_1.SocketTimeout} ms`); + core17.debug(`Aborting download, socket timed out after ${constants_1.SocketTimeout} ms`); }); yield pipeResponseToStream(downloadResponse, writeStream); const contentLengthHeader = downloadResponse.message.headers["content-length"]; @@ -74140,7 +74140,7 @@ var require_downloadUtils = __commonJS({ throw new Error(`Incomplete download. Expected file size: ${expectedLength}, actual file size: ${actualLength}`); } } else { - core15.debug("Unable to validate download, no Content-Length header"); + core17.debug("Unable to validate download, no Content-Length header"); } }); } @@ -74260,7 +74260,7 @@ var require_downloadUtils = __commonJS({ const properties = yield client.getProperties(); const contentLength = (_a = properties.contentLength) !== null && _a !== void 0 ? _a : -1; if (contentLength < 0) { - core15.debug("Unable to determine content length, downloading file with http-client..."); + core17.debug("Unable to determine content length, downloading file with http-client..."); yield downloadCacheHttpClient(archiveLocation, archivePath); } else { const maxSegmentSize = Math.min(134217728, buffer.constants.MAX_LENGTH); @@ -74340,7 +74340,7 @@ var require_options = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.getDownloadOptions = exports2.getUploadOptions = void 0; - var core15 = __importStar4(require_core()); + var core17 = __importStar4(require_core()); function getUploadOptions(copy) { const result = { useAzureSdk: false, @@ -74360,9 +74360,9 @@ var require_options = __commonJS({ } result.uploadConcurrency = !isNaN(Number(process.env["CACHE_UPLOAD_CONCURRENCY"])) ? Math.min(32, Number(process.env["CACHE_UPLOAD_CONCURRENCY"])) : result.uploadConcurrency; result.uploadChunkSize = !isNaN(Number(process.env["CACHE_UPLOAD_CHUNK_SIZE"])) ? Math.min(128 * 1024 * 1024, Number(process.env["CACHE_UPLOAD_CHUNK_SIZE"]) * 1024 * 1024) : result.uploadChunkSize; - core15.debug(`Use Azure SDK: ${result.useAzureSdk}`); - core15.debug(`Upload concurrency: ${result.uploadConcurrency}`); - core15.debug(`Upload chunk size: ${result.uploadChunkSize}`); + core17.debug(`Use Azure SDK: ${result.useAzureSdk}`); + core17.debug(`Upload concurrency: ${result.uploadConcurrency}`); + core17.debug(`Upload chunk size: ${result.uploadChunkSize}`); return result; } exports2.getUploadOptions = getUploadOptions; @@ -74399,12 +74399,12 @@ var require_options = __commonJS({ if (segmentDownloadTimeoutMins && !isNaN(Number(segmentDownloadTimeoutMins)) && isFinite(Number(segmentDownloadTimeoutMins))) { result.segmentTimeoutInMs = Number(segmentDownloadTimeoutMins) * 60 * 1e3; } - core15.debug(`Use Azure SDK: ${result.useAzureSdk}`); - core15.debug(`Download concurrency: ${result.downloadConcurrency}`); - core15.debug(`Request timeout (ms): ${result.timeoutInMs}`); - core15.debug(`Cache segment download timeout mins env var: ${process.env["SEGMENT_DOWNLOAD_TIMEOUT_MINS"]}`); - core15.debug(`Segment download timeout (ms): ${result.segmentTimeoutInMs}`); - core15.debug(`Lookup only: ${result.lookupOnly}`); + core17.debug(`Use Azure SDK: ${result.useAzureSdk}`); + core17.debug(`Download concurrency: ${result.downloadConcurrency}`); + core17.debug(`Request timeout (ms): ${result.timeoutInMs}`); + core17.debug(`Cache segment download timeout mins env var: ${process.env["SEGMENT_DOWNLOAD_TIMEOUT_MINS"]}`); + core17.debug(`Segment download timeout (ms): ${result.segmentTimeoutInMs}`); + core17.debug(`Lookup only: ${result.lookupOnly}`); return result; } exports2.getDownloadOptions = getDownloadOptions; @@ -74584,7 +74584,7 @@ var require_cacheHttpClient = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.saveCache = exports2.reserveCache = exports2.downloadCache = exports2.getCacheEntry = void 0; - var core15 = __importStar4(require_core()); + var core17 = __importStar4(require_core()); var http_client_1 = require_lib(); var auth_1 = require_auth(); var fs20 = __importStar4(require("fs")); @@ -74602,7 +74602,7 @@ var require_cacheHttpClient = __commonJS({ throw new Error("Cache Service Url not found, unable to restore cache."); } const url2 = `${baseUrl}_apis/artifactcache/${resource}`; - core15.debug(`Resource Url: ${url2}`); + core17.debug(`Resource Url: ${url2}`); return url2; } function createAcceptHeader(type2, apiVersion) { @@ -74630,7 +74630,7 @@ var require_cacheHttpClient = __commonJS({ return httpClient.getJson(getCacheApiUrl(resource)); })); if (response.statusCode === 204) { - if (core15.isDebug()) { + if (core17.isDebug()) { yield printCachesListForDiagnostics(keys[0], httpClient, version); } return null; @@ -74643,9 +74643,9 @@ var require_cacheHttpClient = __commonJS({ if (!cacheDownloadUrl) { throw new Error("Cache not found."); } - core15.setSecret(cacheDownloadUrl); - core15.debug(`Cache Result:`); - core15.debug(JSON.stringify(cacheResult)); + core17.setSecret(cacheDownloadUrl); + core17.debug(`Cache Result:`); + core17.debug(JSON.stringify(cacheResult)); return cacheResult; }); } @@ -74660,10 +74660,10 @@ var require_cacheHttpClient = __commonJS({ const cacheListResult = response.result; const totalCount = cacheListResult === null || cacheListResult === void 0 ? void 0 : cacheListResult.totalCount; if (totalCount && totalCount > 0) { - core15.debug(`No matching cache found for cache key '${key}', version '${version} and scope ${process.env["GITHUB_REF"]}. There exist one or more cache(s) with similar key but they have different version or scope. See more info on cache matching here: https://docs.github.com/en/actions/using-workflows/caching-dependencies-to-speed-up-workflows#matching-a-cache-key + core17.debug(`No matching cache found for cache key '${key}', version '${version} and scope ${process.env["GITHUB_REF"]}. There exist one or more cache(s) with similar key but they have different version or scope. See more info on cache matching here: https://docs.github.com/en/actions/using-workflows/caching-dependencies-to-speed-up-workflows#matching-a-cache-key Other caches with similar key:`); for (const cacheEntry of (cacheListResult === null || cacheListResult === void 0 ? void 0 : cacheListResult.artifactCaches) || []) { - core15.debug(`Cache Key: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.cacheKey}, Cache Version: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.cacheVersion}, Cache Scope: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.scope}, Cache Created: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.creationTime}`); + core17.debug(`Cache Key: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.cacheKey}, Cache Version: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.cacheVersion}, Cache Scope: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.scope}, Cache Created: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.creationTime}`); } } } @@ -74708,7 +74708,7 @@ Other caches with similar key:`); } function uploadChunk(httpClient, resourceUrl, openStream, start, end) { return __awaiter4(this, void 0, void 0, function* () { - core15.debug(`Uploading chunk of size ${end - start + 1} bytes at offset ${start} with content range: ${getContentRange(start, end)}`); + core17.debug(`Uploading chunk of size ${end - start + 1} bytes at offset ${start} with content range: ${getContentRange(start, end)}`); const additionalHeaders = { "Content-Type": "application/octet-stream", "Content-Range": getContentRange(start, end) @@ -74730,7 +74730,7 @@ Other caches with similar key:`); const concurrency = utils.assertDefined("uploadConcurrency", uploadOptions.uploadConcurrency); const maxChunkSize = utils.assertDefined("uploadChunkSize", uploadOptions.uploadChunkSize); const parallelUploads = [...new Array(concurrency).keys()]; - core15.debug("Awaiting all uploads"); + core17.debug("Awaiting all uploads"); let offset = 0; try { yield Promise.all(parallelUploads.map(() => __awaiter4(this, void 0, void 0, function* () { @@ -74773,16 +74773,16 @@ Other caches with similar key:`); yield (0, uploadUtils_1.uploadCacheArchiveSDK)(signedUploadURL, archivePath, options); } else { const httpClient = createHttpClient(); - core15.debug("Upload cache"); + core17.debug("Upload cache"); yield uploadFile(httpClient, cacheId, archivePath, options); - core15.debug("Commiting cache"); + core17.debug("Commiting cache"); const cacheSize = utils.getArchiveFileSizeInBytes(archivePath); - core15.info(`Cache Size: ~${Math.round(cacheSize / (1024 * 1024))} MB (${cacheSize} B)`); + core17.info(`Cache Size: ~${Math.round(cacheSize / (1024 * 1024))} MB (${cacheSize} B)`); const commitCacheResponse = yield commitCache(httpClient, cacheId, cacheSize); if (!(0, requestUtils_1.isSuccessStatusCode)(commitCacheResponse.statusCode)) { throw new Error(`Cache service responded with ${commitCacheResponse.statusCode} during commit cache.`); } - core15.info("Cache saved successfully"); + core17.info("Cache saved successfully"); } }); } @@ -80234,7 +80234,7 @@ var require_cache3 = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.saveCache = exports2.restoreCache = exports2.isFeatureAvailable = exports2.FinalizeCacheError = exports2.ReserveCacheError = exports2.ValidationError = void 0; - var core15 = __importStar4(require_core()); + var core17 = __importStar4(require_core()); var path20 = __importStar4(require("path")); var utils = __importStar4(require_cacheUtils()); var cacheHttpClient = __importStar4(require_cacheHttpClient()); @@ -80294,7 +80294,7 @@ var require_cache3 = __commonJS({ function restoreCache4(paths, primaryKey, restoreKeys, options, enableCrossOsArchive = false) { return __awaiter4(this, void 0, void 0, function* () { const cacheServiceVersion = (0, config_1.getCacheServiceVersion)(); - core15.debug(`Cache service version: ${cacheServiceVersion}`); + core17.debug(`Cache service version: ${cacheServiceVersion}`); checkPaths(paths); switch (cacheServiceVersion) { case "v2": @@ -80310,8 +80310,8 @@ var require_cache3 = __commonJS({ return __awaiter4(this, void 0, void 0, function* () { restoreKeys = restoreKeys || []; const keys = [primaryKey, ...restoreKeys]; - core15.debug("Resolved Keys:"); - core15.debug(JSON.stringify(keys)); + core17.debug("Resolved Keys:"); + core17.debug(JSON.stringify(keys)); if (keys.length > 10) { throw new ValidationError(`Key Validation Error: Keys are limited to a maximum of 10.`); } @@ -80329,19 +80329,19 @@ var require_cache3 = __commonJS({ return void 0; } if (options === null || options === void 0 ? void 0 : options.lookupOnly) { - core15.info("Lookup only - skipping download"); + core17.info("Lookup only - skipping download"); return cacheEntry.cacheKey; } archivePath = path20.join(yield utils.createTempDirectory(), utils.getCacheFileName(compressionMethod)); - core15.debug(`Archive Path: ${archivePath}`); + core17.debug(`Archive Path: ${archivePath}`); yield cacheHttpClient.downloadCache(cacheEntry.archiveLocation, archivePath, options); - if (core15.isDebug()) { + if (core17.isDebug()) { yield (0, tar_1.listTar)(archivePath, compressionMethod); } const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath); - core15.info(`Cache Size: ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B)`); + core17.info(`Cache Size: ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B)`); yield (0, tar_1.extractTar)(archivePath, compressionMethod); - core15.info("Cache restored successfully"); + core17.info("Cache restored successfully"); return cacheEntry.cacheKey; } catch (error2) { const typedError = error2; @@ -80349,16 +80349,16 @@ var require_cache3 = __commonJS({ throw error2; } else { if (typedError instanceof http_client_1.HttpClientError && typeof typedError.statusCode === "number" && typedError.statusCode >= 500) { - core15.error(`Failed to restore: ${error2.message}`); + core17.error(`Failed to restore: ${error2.message}`); } else { - core15.warning(`Failed to restore: ${error2.message}`); + core17.warning(`Failed to restore: ${error2.message}`); } } } finally { try { yield utils.unlinkFile(archivePath); } catch (error2) { - core15.debug(`Failed to delete archive: ${error2}`); + core17.debug(`Failed to delete archive: ${error2}`); } } return void 0; @@ -80369,8 +80369,8 @@ var require_cache3 = __commonJS({ options = Object.assign(Object.assign({}, options), { useAzureSdk: true }); restoreKeys = restoreKeys || []; const keys = [primaryKey, ...restoreKeys]; - core15.debug("Resolved Keys:"); - core15.debug(JSON.stringify(keys)); + core17.debug("Resolved Keys:"); + core17.debug(JSON.stringify(keys)); if (keys.length > 10) { throw new ValidationError(`Key Validation Error: Keys are limited to a maximum of 10.`); } @@ -80388,30 +80388,30 @@ var require_cache3 = __commonJS({ }; const response = yield twirpClient.GetCacheEntryDownloadURL(request); if (!response.ok) { - core15.debug(`Cache not found for version ${request.version} of keys: ${keys.join(", ")}`); + core17.debug(`Cache not found for version ${request.version} of keys: ${keys.join(", ")}`); return void 0; } const isRestoreKeyMatch = request.key !== response.matchedKey; if (isRestoreKeyMatch) { - core15.info(`Cache hit for restore-key: ${response.matchedKey}`); + core17.info(`Cache hit for restore-key: ${response.matchedKey}`); } else { - core15.info(`Cache hit for: ${response.matchedKey}`); + core17.info(`Cache hit for: ${response.matchedKey}`); } if (options === null || options === void 0 ? void 0 : options.lookupOnly) { - core15.info("Lookup only - skipping download"); + core17.info("Lookup only - skipping download"); return response.matchedKey; } archivePath = path20.join(yield utils.createTempDirectory(), utils.getCacheFileName(compressionMethod)); - core15.debug(`Archive path: ${archivePath}`); - core15.debug(`Starting download of archive to: ${archivePath}`); + core17.debug(`Archive path: ${archivePath}`); + core17.debug(`Starting download of archive to: ${archivePath}`); yield cacheHttpClient.downloadCache(response.signedDownloadUrl, archivePath, options); const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath); - core15.info(`Cache Size: ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B)`); - if (core15.isDebug()) { + core17.info(`Cache Size: ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B)`); + if (core17.isDebug()) { yield (0, tar_1.listTar)(archivePath, compressionMethod); } yield (0, tar_1.extractTar)(archivePath, compressionMethod); - core15.info("Cache restored successfully"); + core17.info("Cache restored successfully"); return response.matchedKey; } catch (error2) { const typedError = error2; @@ -80419,9 +80419,9 @@ var require_cache3 = __commonJS({ throw error2; } else { if (typedError instanceof http_client_1.HttpClientError && typeof typedError.statusCode === "number" && typedError.statusCode >= 500) { - core15.error(`Failed to restore: ${error2.message}`); + core17.error(`Failed to restore: ${error2.message}`); } else { - core15.warning(`Failed to restore: ${error2.message}`); + core17.warning(`Failed to restore: ${error2.message}`); } } } finally { @@ -80430,7 +80430,7 @@ var require_cache3 = __commonJS({ yield utils.unlinkFile(archivePath); } } catch (error2) { - core15.debug(`Failed to delete archive: ${error2}`); + core17.debug(`Failed to delete archive: ${error2}`); } } return void 0; @@ -80439,7 +80439,7 @@ var require_cache3 = __commonJS({ function saveCache4(paths, key, options, enableCrossOsArchive = false) { return __awaiter4(this, void 0, void 0, function* () { const cacheServiceVersion = (0, config_1.getCacheServiceVersion)(); - core15.debug(`Cache service version: ${cacheServiceVersion}`); + core17.debug(`Cache service version: ${cacheServiceVersion}`); checkPaths(paths); checkKey(key); switch (cacheServiceVersion) { @@ -80458,26 +80458,26 @@ var require_cache3 = __commonJS({ const compressionMethod = yield utils.getCompressionMethod(); let cacheId = -1; const cachePaths = yield utils.resolvePaths(paths); - core15.debug("Cache Paths:"); - core15.debug(`${JSON.stringify(cachePaths)}`); + core17.debug("Cache Paths:"); + core17.debug(`${JSON.stringify(cachePaths)}`); if (cachePaths.length === 0) { throw new Error(`Path Validation Error: Path(s) specified in the action for caching do(es) not exist, hence no cache is being saved.`); } const archiveFolder = yield utils.createTempDirectory(); const archivePath = path20.join(archiveFolder, utils.getCacheFileName(compressionMethod)); - core15.debug(`Archive Path: ${archivePath}`); + core17.debug(`Archive Path: ${archivePath}`); try { yield (0, tar_1.createTar)(archiveFolder, cachePaths, compressionMethod); - if (core15.isDebug()) { + if (core17.isDebug()) { yield (0, tar_1.listTar)(archivePath, compressionMethod); } const fileSizeLimit = 10 * 1024 * 1024 * 1024; const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath); - core15.debug(`File Size: ${archiveFileSize}`); + core17.debug(`File Size: ${archiveFileSize}`); if (archiveFileSize > fileSizeLimit && !(0, config_1.isGhes)()) { throw new Error(`Cache size of ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B) is over the 10GB limit, not saving cache.`); } - core15.debug("Reserving Cache"); + core17.debug("Reserving Cache"); const reserveCacheResponse = yield cacheHttpClient.reserveCache(key, paths, { compressionMethod, enableCrossOsArchive, @@ -80490,26 +80490,26 @@ var require_cache3 = __commonJS({ } else { throw new ReserveCacheError2(`Unable to reserve cache with key ${key}, another job may be creating this cache. More details: ${(_e = reserveCacheResponse === null || reserveCacheResponse === void 0 ? void 0 : reserveCacheResponse.error) === null || _e === void 0 ? void 0 : _e.message}`); } - core15.debug(`Saving Cache (ID: ${cacheId})`); + core17.debug(`Saving Cache (ID: ${cacheId})`); yield cacheHttpClient.saveCache(cacheId, archivePath, "", options); } catch (error2) { const typedError = error2; if (typedError.name === ValidationError.name) { throw error2; } else if (typedError.name === ReserveCacheError2.name) { - core15.info(`Failed to save: ${typedError.message}`); + core17.info(`Failed to save: ${typedError.message}`); } else { if (typedError instanceof http_client_1.HttpClientError && typeof typedError.statusCode === "number" && typedError.statusCode >= 500) { - core15.error(`Failed to save: ${typedError.message}`); + core17.error(`Failed to save: ${typedError.message}`); } else { - core15.warning(`Failed to save: ${typedError.message}`); + core17.warning(`Failed to save: ${typedError.message}`); } } } finally { try { yield utils.unlinkFile(archivePath); } catch (error2) { - core15.debug(`Failed to delete archive: ${error2}`); + core17.debug(`Failed to delete archive: ${error2}`); } } return cacheId; @@ -80522,23 +80522,23 @@ var require_cache3 = __commonJS({ const twirpClient = cacheTwirpClient.internalCacheTwirpClient(); let cacheId = -1; const cachePaths = yield utils.resolvePaths(paths); - core15.debug("Cache Paths:"); - core15.debug(`${JSON.stringify(cachePaths)}`); + core17.debug("Cache Paths:"); + core17.debug(`${JSON.stringify(cachePaths)}`); if (cachePaths.length === 0) { throw new Error(`Path Validation Error: Path(s) specified in the action for caching do(es) not exist, hence no cache is being saved.`); } const archiveFolder = yield utils.createTempDirectory(); const archivePath = path20.join(archiveFolder, utils.getCacheFileName(compressionMethod)); - core15.debug(`Archive Path: ${archivePath}`); + core17.debug(`Archive Path: ${archivePath}`); try { yield (0, tar_1.createTar)(archiveFolder, cachePaths, compressionMethod); - if (core15.isDebug()) { + if (core17.isDebug()) { yield (0, tar_1.listTar)(archivePath, compressionMethod); } const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath); - core15.debug(`File Size: ${archiveFileSize}`); + core17.debug(`File Size: ${archiveFileSize}`); options.archiveSizeBytes = archiveFileSize; - core15.debug("Reserving Cache"); + core17.debug("Reserving Cache"); const version = utils.getCacheVersion(paths, compressionMethod, enableCrossOsArchive); const request = { key, @@ -80549,16 +80549,16 @@ var require_cache3 = __commonJS({ const response = yield twirpClient.CreateCacheEntry(request); if (!response.ok) { if (response.message) { - core15.warning(`Cache reservation failed: ${response.message}`); + core17.warning(`Cache reservation failed: ${response.message}`); } throw new Error(response.message || "Response was not ok"); } signedUploadUrl = response.signedUploadUrl; } catch (error2) { - core15.debug(`Failed to reserve cache: ${error2}`); + core17.debug(`Failed to reserve cache: ${error2}`); throw new ReserveCacheError2(`Unable to reserve cache with key ${key}, another job may be creating this cache.`); } - core15.debug(`Attempting to upload cache located at: ${archivePath}`); + core17.debug(`Attempting to upload cache located at: ${archivePath}`); yield cacheHttpClient.saveCache(cacheId, archivePath, signedUploadUrl, options); const finalizeRequest = { key, @@ -80566,7 +80566,7 @@ var require_cache3 = __commonJS({ sizeBytes: `${archiveFileSize}` }; const finalizeResponse = yield twirpClient.FinalizeCacheEntryUpload(finalizeRequest); - core15.debug(`FinalizeCacheEntryUploadResponse: ${finalizeResponse.ok}`); + core17.debug(`FinalizeCacheEntryUploadResponse: ${finalizeResponse.ok}`); if (!finalizeResponse.ok) { if (finalizeResponse.message) { throw new FinalizeCacheError(finalizeResponse.message); @@ -80579,21 +80579,21 @@ var require_cache3 = __commonJS({ if (typedError.name === ValidationError.name) { throw error2; } else if (typedError.name === ReserveCacheError2.name) { - core15.info(`Failed to save: ${typedError.message}`); + core17.info(`Failed to save: ${typedError.message}`); } else if (typedError.name === FinalizeCacheError.name) { - core15.warning(typedError.message); + core17.warning(typedError.message); } else { if (typedError instanceof http_client_1.HttpClientError && typeof typedError.statusCode === "number" && typedError.statusCode >= 500) { - core15.error(`Failed to save: ${typedError.message}`); + core17.error(`Failed to save: ${typedError.message}`); } else { - core15.warning(`Failed to save: ${typedError.message}`); + core17.warning(`Failed to save: ${typedError.message}`); } } } finally { try { yield utils.unlinkFile(archivePath); } catch (error2) { - core15.debug(`Failed to delete archive: ${error2}`); + core17.debug(`Failed to delete archive: ${error2}`); } } return cacheId; @@ -80801,7 +80801,7 @@ var require_retry_helper = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.RetryHelper = void 0; - var core15 = __importStar4(require_core()); + var core17 = __importStar4(require_core()); var RetryHelper = class { constructor(maxAttempts, minSeconds, maxSeconds) { if (maxAttempts < 1) { @@ -80824,10 +80824,10 @@ var require_retry_helper = __commonJS({ if (isRetryable && !isRetryable(err)) { throw err; } - core15.info(err.message); + core17.info(err.message); } const seconds = this.getSleepAmount(); - core15.info(`Waiting ${seconds} seconds before trying again`); + core17.info(`Waiting ${seconds} seconds before trying again`); yield this.sleep(seconds); attempt++; } @@ -80907,7 +80907,7 @@ var require_tool_cache = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.evaluateVersions = exports2.isExplicitVersion = exports2.findFromManifest = exports2.getManifestFromRepo = exports2.findAllVersions = exports2.find = exports2.cacheFile = exports2.cacheDir = exports2.extractZip = exports2.extractXar = exports2.extractTar = exports2.extract7z = exports2.downloadTool = exports2.HTTPError = void 0; - var core15 = __importStar4(require_core()); + var core17 = __importStar4(require_core()); var io7 = __importStar4(require_io()); var crypto2 = __importStar4(require("crypto")); var fs20 = __importStar4(require("fs")); @@ -80936,8 +80936,8 @@ var require_tool_cache = __commonJS({ return __awaiter4(this, void 0, void 0, function* () { dest = dest || path20.join(_getTempDirectory(), crypto2.randomUUID()); yield io7.mkdirP(path20.dirname(dest)); - core15.debug(`Downloading ${url2}`); - core15.debug(`Destination ${dest}`); + core17.debug(`Downloading ${url2}`); + core17.debug(`Destination ${dest}`); const maxAttempts = 3; const minSeconds = _getGlobal("TEST_DOWNLOAD_TOOL_RETRY_MIN_SECONDS", 10); const maxSeconds = _getGlobal("TEST_DOWNLOAD_TOOL_RETRY_MAX_SECONDS", 20); @@ -80964,7 +80964,7 @@ var require_tool_cache = __commonJS({ allowRetries: false }); if (auth) { - core15.debug("set auth"); + core17.debug("set auth"); if (headers === void 0) { headers = {}; } @@ -80973,7 +80973,7 @@ var require_tool_cache = __commonJS({ const response = yield http.get(url2, headers); if (response.message.statusCode !== 200) { const err = new HTTPError2(response.message.statusCode); - core15.debug(`Failed to download from "${url2}". Code(${response.message.statusCode}) Message(${response.message.statusMessage})`); + core17.debug(`Failed to download from "${url2}". Code(${response.message.statusCode}) Message(${response.message.statusMessage})`); throw err; } const pipeline = util.promisify(stream2.pipeline); @@ -80982,16 +80982,16 @@ var require_tool_cache = __commonJS({ let succeeded = false; try { yield pipeline(readStream, fs20.createWriteStream(dest)); - core15.debug("download complete"); + core17.debug("download complete"); succeeded = true; return dest; } finally { if (!succeeded) { - core15.debug("download failed"); + core17.debug("download failed"); try { yield io7.rmRF(dest); } catch (err) { - core15.debug(`Failed to delete '${dest}'. ${err.message}`); + core17.debug(`Failed to delete '${dest}'. ${err.message}`); } } } @@ -81006,7 +81006,7 @@ var require_tool_cache = __commonJS({ process.chdir(dest); if (_7zPath) { try { - const logLevel = core15.isDebug() ? "-bb1" : "-bb0"; + const logLevel = core17.isDebug() ? "-bb1" : "-bb0"; const args = [ "x", logLevel, @@ -81056,7 +81056,7 @@ var require_tool_cache = __commonJS({ throw new Error("parameter 'file' is required"); } dest = yield _createExtractFolder(dest); - core15.debug("Checking tar --version"); + core17.debug("Checking tar --version"); let versionOutput = ""; yield (0, exec_1.exec)("tar --version", [], { ignoreReturnCode: true, @@ -81066,7 +81066,7 @@ var require_tool_cache = __commonJS({ stderr: (data) => versionOutput += data.toString() } }); - core15.debug(versionOutput.trim()); + core17.debug(versionOutput.trim()); const isGnuTar = versionOutput.toUpperCase().includes("GNU TAR"); let args; if (flags instanceof Array) { @@ -81074,7 +81074,7 @@ var require_tool_cache = __commonJS({ } else { args = [flags]; } - if (core15.isDebug() && !flags.includes("v")) { + if (core17.isDebug() && !flags.includes("v")) { args.push("-v"); } let destArg = dest; @@ -81106,7 +81106,7 @@ var require_tool_cache = __commonJS({ args = [flags]; } args.push("-x", "-C", dest, "-f", file); - if (core15.isDebug()) { + if (core17.isDebug()) { args.push("-v"); } const xarPath = yield io7.which("xar", true); @@ -81151,7 +81151,7 @@ var require_tool_cache = __commonJS({ "-Command", pwshCommand ]; - core15.debug(`Using pwsh at path: ${pwshPath}`); + core17.debug(`Using pwsh at path: ${pwshPath}`); yield (0, exec_1.exec)(`"${pwshPath}"`, args); } else { const powershellCommand = [ @@ -81171,7 +81171,7 @@ var require_tool_cache = __commonJS({ powershellCommand ]; const powershellPath = yield io7.which("powershell", true); - core15.debug(`Using powershell at path: ${powershellPath}`); + core17.debug(`Using powershell at path: ${powershellPath}`); yield (0, exec_1.exec)(`"${powershellPath}"`, args); } }); @@ -81180,7 +81180,7 @@ var require_tool_cache = __commonJS({ return __awaiter4(this, void 0, void 0, function* () { const unzipPath = yield io7.which("unzip", true); const args = [file]; - if (!core15.isDebug()) { + if (!core17.isDebug()) { args.unshift("-q"); } args.unshift("-o"); @@ -81191,8 +81191,8 @@ var require_tool_cache = __commonJS({ return __awaiter4(this, void 0, void 0, function* () { version = semver8.clean(version) || version; arch2 = arch2 || os5.arch(); - core15.debug(`Caching tool ${tool} ${version} ${arch2}`); - core15.debug(`source dir: ${sourceDir}`); + core17.debug(`Caching tool ${tool} ${version} ${arch2}`); + core17.debug(`source dir: ${sourceDir}`); if (!fs20.statSync(sourceDir).isDirectory()) { throw new Error("sourceDir is not a directory"); } @@ -81210,14 +81210,14 @@ var require_tool_cache = __commonJS({ return __awaiter4(this, void 0, void 0, function* () { version = semver8.clean(version) || version; arch2 = arch2 || os5.arch(); - core15.debug(`Caching tool ${tool} ${version} ${arch2}`); - core15.debug(`source file: ${sourceFile}`); + core17.debug(`Caching tool ${tool} ${version} ${arch2}`); + core17.debug(`source file: ${sourceFile}`); if (!fs20.statSync(sourceFile).isFile()) { throw new Error("sourceFile is not a file"); } const destFolder = yield _createToolPath(tool, version, arch2); const destPath = path20.join(destFolder, targetFile); - core15.debug(`destination file ${destPath}`); + core17.debug(`destination file ${destPath}`); yield io7.cp(sourceFile, destPath); _completeToolPath(tool, version, arch2); return destFolder; @@ -81241,12 +81241,12 @@ var require_tool_cache = __commonJS({ if (versionSpec) { versionSpec = semver8.clean(versionSpec) || ""; const cachePath = path20.join(_getCacheDirectory(), toolName, versionSpec, arch2); - core15.debug(`checking cache: ${cachePath}`); + core17.debug(`checking cache: ${cachePath}`); if (fs20.existsSync(cachePath) && fs20.existsSync(`${cachePath}.complete`)) { - core15.debug(`Found tool in cache ${toolName} ${versionSpec} ${arch2}`); + core17.debug(`Found tool in cache ${toolName} ${versionSpec} ${arch2}`); toolPath = cachePath; } else { - core15.debug("not found"); + core17.debug("not found"); } } return toolPath; @@ -81277,7 +81277,7 @@ var require_tool_cache = __commonJS({ const http = new httpm.HttpClient("tool-cache"); const headers = {}; if (auth) { - core15.debug("set auth"); + core17.debug("set auth"); headers.authorization = auth; } const response = yield http.getJson(treeUrl, headers); @@ -81298,7 +81298,7 @@ var require_tool_cache = __commonJS({ try { releases = JSON.parse(versionsRaw); } catch (_a) { - core15.debug("Invalid json"); + core17.debug("Invalid json"); } } return releases; @@ -81324,7 +81324,7 @@ var require_tool_cache = __commonJS({ function _createToolPath(tool, version, arch2) { return __awaiter4(this, void 0, void 0, function* () { const folderPath = path20.join(_getCacheDirectory(), tool, semver8.clean(version) || version, arch2 || ""); - core15.debug(`destination ${folderPath}`); + core17.debug(`destination ${folderPath}`); const markerPath = `${folderPath}.complete`; yield io7.rmRF(folderPath); yield io7.rmRF(markerPath); @@ -81336,19 +81336,19 @@ var require_tool_cache = __commonJS({ const folderPath = path20.join(_getCacheDirectory(), tool, semver8.clean(version) || version, arch2 || ""); const markerPath = `${folderPath}.complete`; fs20.writeFileSync(markerPath, ""); - core15.debug("finished caching tool"); + core17.debug("finished caching tool"); } function isExplicitVersion(versionSpec) { const c = semver8.clean(versionSpec) || ""; - core15.debug(`isExplicit: ${c}`); + core17.debug(`isExplicit: ${c}`); const valid3 = semver8.valid(c) != null; - core15.debug(`explicit? ${valid3}`); + core17.debug(`explicit? ${valid3}`); return valid3; } exports2.isExplicitVersion = isExplicitVersion; function evaluateVersions(versions, versionSpec) { let version = ""; - core15.debug(`evaluating ${versions.length} versions`); + core17.debug(`evaluating ${versions.length} versions`); versions = versions.sort((a, b) => { if (semver8.gt(a, b)) { return 1; @@ -81364,9 +81364,9 @@ var require_tool_cache = __commonJS({ } } if (version) { - core15.debug(`matched: ${version}`); + core17.debug(`matched: ${version}`); } else { - core15.debug("match not found"); + core17.debug("match not found"); } return version; } @@ -81975,7 +81975,7 @@ var require_internal_glob_options_helper2 = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.getOptions = void 0; - var core15 = __importStar4(require_core()); + var core17 = __importStar4(require_core()); function getOptions(copy) { const result = { followSymbolicLinks: true, @@ -81987,23 +81987,23 @@ var require_internal_glob_options_helper2 = __commonJS({ if (copy) { if (typeof copy.followSymbolicLinks === "boolean") { result.followSymbolicLinks = copy.followSymbolicLinks; - core15.debug(`followSymbolicLinks '${result.followSymbolicLinks}'`); + core17.debug(`followSymbolicLinks '${result.followSymbolicLinks}'`); } if (typeof copy.implicitDescendants === "boolean") { result.implicitDescendants = copy.implicitDescendants; - core15.debug(`implicitDescendants '${result.implicitDescendants}'`); + core17.debug(`implicitDescendants '${result.implicitDescendants}'`); } if (typeof copy.matchDirectories === "boolean") { result.matchDirectories = copy.matchDirectories; - core15.debug(`matchDirectories '${result.matchDirectories}'`); + core17.debug(`matchDirectories '${result.matchDirectories}'`); } if (typeof copy.omitBrokenSymbolicLinks === "boolean") { result.omitBrokenSymbolicLinks = copy.omitBrokenSymbolicLinks; - core15.debug(`omitBrokenSymbolicLinks '${result.omitBrokenSymbolicLinks}'`); + core17.debug(`omitBrokenSymbolicLinks '${result.omitBrokenSymbolicLinks}'`); } if (typeof copy.excludeHiddenFiles === "boolean") { result.excludeHiddenFiles = copy.excludeHiddenFiles; - core15.debug(`excludeHiddenFiles '${result.excludeHiddenFiles}'`); + core17.debug(`excludeHiddenFiles '${result.excludeHiddenFiles}'`); } } return result; @@ -82687,7 +82687,7 @@ var require_internal_globber2 = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.DefaultGlobber = void 0; - var core15 = __importStar4(require_core()); + var core17 = __importStar4(require_core()); var fs20 = __importStar4(require("fs")); var globOptionsHelper = __importStar4(require_internal_glob_options_helper2()); var path20 = __importStar4(require("path")); @@ -82740,7 +82740,7 @@ var require_internal_globber2 = __commonJS({ } const stack = []; for (const searchPath of patternHelper.getSearchPaths(patterns)) { - core15.debug(`Search path '${searchPath}'`); + core17.debug(`Search path '${searchPath}'`); try { yield __await4(fs20.promises.lstat(searchPath)); } catch (err) { @@ -82815,7 +82815,7 @@ var require_internal_globber2 = __commonJS({ } catch (err) { if (err.code === "ENOENT") { if (options.omitBrokenSymbolicLinks) { - core15.debug(`Broken symlink '${item.path}'`); + core17.debug(`Broken symlink '${item.path}'`); return void 0; } throw new Error(`No information found for the path '${item.path}'. This may indicate a broken symbolic link.`); @@ -82831,7 +82831,7 @@ var require_internal_globber2 = __commonJS({ traversalChain.pop(); } if (traversalChain.some((x) => x === realPath)) { - core15.debug(`Symlink cycle detected for path '${item.path}' and realpath '${realPath}'`); + core17.debug(`Symlink cycle detected for path '${item.path}' and realpath '${realPath}'`); return void 0; } traversalChain.push(realPath); @@ -82924,7 +82924,7 @@ var require_internal_hash_files = __commonJS({ Object.defineProperty(exports2, "__esModule", { value: true }); exports2.hashFiles = void 0; var crypto2 = __importStar4(require("crypto")); - var core15 = __importStar4(require_core()); + var core17 = __importStar4(require_core()); var fs20 = __importStar4(require("fs")); var stream2 = __importStar4(require("stream")); var util = __importStar4(require("util")); @@ -82933,7 +82933,7 @@ var require_internal_hash_files = __commonJS({ var _a, e_1, _b, _c; var _d; return __awaiter4(this, void 0, void 0, function* () { - const writeDelegate = verbose ? core15.info : core15.debug; + const writeDelegate = verbose ? core17.info : core17.debug; let hasMatch = false; const githubWorkspace = currentWorkspace ? currentWorkspace : (_d = process.env["GITHUB_WORKSPACE"]) !== null && _d !== void 0 ? _d : process.cwd(); const result = crypto2.createHash("sha256"); @@ -85944,7 +85944,7 @@ module.exports = __toCommonJS(analyze_action_exports); var fs19 = __toESM(require("fs")); var import_path4 = __toESM(require("path")); var import_perf_hooks3 = require("perf_hooks"); -var core14 = __toESM(require_core()); +var core16 = __toESM(require_core()); // src/actions-util.ts var fs5 = __toESM(require("fs")); @@ -94350,7 +94350,7 @@ var fs18 = __toESM(require("fs")); var path18 = __toESM(require("path")); var url = __toESM(require("url")); var import_zlib = __toESM(require("zlib")); -var core13 = __toESM(require_core()); +var core15 = __toESM(require_core()); var jsonschema2 = __toESM(require_lib2()); // src/fingerprints.ts @@ -95476,8 +95476,28 @@ async function addFingerprints(sarif, sourceRoot, logger) { } // src/init.ts +var core14 = __toESM(require_core()); var toolrunner4 = __toESM(require_toolrunner()); var io6 = __toESM(require_io()); + +// src/workflow.ts +var core13 = __toESM(require_core()); +function toCodedErrors(errors) { + return Object.entries(errors).reduce( + (acc, [code, message]) => { + acc[code] = { message, code }; + return acc; + }, + {} + ); +} +var WorkflowErrors = toCodedErrors({ + MissingPushHook: `Please specify an on.push hook to analyze and see code scanning alerts from the default branch on the Security tab.`, + CheckoutWrongHead: `git checkout HEAD^2 is no longer necessary. Please remove this step as Code Scanning recommends analyzing the merge commit for best results.`, + InconsistentActionVersion: `Not all workflow steps that use \`github/codeql-action\` actions use the same version. Please ensure that all such steps use the same version to avoid compatibility issues.` +}); + +// src/init.ts async function initCodeQL(toolsInput, apiDetails, tempDir, variant, defaultCliVersion, features, logger) { logger.startGroup("Setup CodeQL tools"); const { @@ -95607,7 +95627,7 @@ async function combineSarifFilesUsingCLI(sarifFiles, gitHubVersion, features, lo logger.warning( `Uploading multiple SARIF runs with the same category is deprecated ${deprecationWarningMessage}. Please update your workflow to upload a single run per category. ${deprecationMoreInformationMessage}` ); - core13.exportVariable("CODEQL_MERGE_SARIF_DEPRECATION_WARNING", "true"); + core15.exportVariable("CODEQL_MERGE_SARIF_DEPRECATION_WARNING", "true"); } return combineSarifFiles(sarifFiles, logger); } @@ -95706,13 +95726,13 @@ async function uploadPayload(payload, repositoryNwo, logger, analysis) { if (httpError !== void 0) { switch (httpError.status) { case 403: - core13.warning(httpError.message || GENERIC_403_MSG); + core15.warning(httpError.message || GENERIC_403_MSG); break; case 404: - core13.warning(httpError.message || GENERIC_404_MSG); + core15.warning(httpError.message || GENERIC_404_MSG); break; default: - core13.warning(httpError.message); + core15.warning(httpError.message); break; } } @@ -95842,9 +95862,9 @@ function validateSarifFileSchema(sarif, sarifFilePath, logger) { const warnings = (result.errors ?? []).filter( (err) => err.name === "format" && typeof err.argument === "string" && warningAttributes.includes(err.argument) ); - for (const warning7 of warnings) { + for (const warning8 of warnings) { logger.info( - `Warning: '${warning7.instance}' is not a valid URI in '${warning7.property}'.` + `Warning: '${warning8.instance}' is not a valid URI in '${warning8.property}'.` ); } if (errors.length > 0) { @@ -96143,7 +96163,7 @@ function validateUniqueCategory(sarif, sentinelPrefix) { `Aborting upload: only one run of the codeql/analyze or codeql/upload-sarif actions is allowed per job per tool/category. The easiest fix is to specify a unique value for the \`category\` input. If .runs[].automationDetails.id is specified in the sarif file, that will take precedence over your configured \`category\`. Category: (${id ? id : "none"}) Tool: (${tool ? tool : "none"})` ); } - core13.exportVariable(sentinelEnvVar, sentinelEnvVar); + core15.exportVariable(sentinelEnvVar, sentinelEnvVar); } } function sanitize(str2) { @@ -96361,7 +96381,7 @@ async function run() { } const apiDetails = getApiDetails(); const outputDir = getRequiredInput("output"); - core14.exportVariable("CODEQL_ACTION_SARIF_RESULTS_OUTPUT_DIR" /* SARIF_RESULTS_OUTPUT_DIR */, outputDir); + core16.exportVariable("CODEQL_ACTION_SARIF_RESULTS_OUTPUT_DIR" /* SARIF_RESULTS_OUTPUT_DIR */, outputDir); const threads = getThreadsFlag( getOptionalInput("threads") || process.env["CODEQL_THREADS"], logger @@ -96413,8 +96433,8 @@ async function run() { for (const language of config.languages) { dbLocations[language] = getCodeQLDatabasePath(config, language); } - core14.setOutput("db-locations", dbLocations); - core14.setOutput("sarif-output", import_path4.default.resolve(outputDir)); + core16.setOutput("db-locations", dbLocations); + core16.setOutput("sarif-output", import_path4.default.resolve(outputDir)); const uploadKind = getUploadValue( getOptionalInput("upload") ); @@ -96458,13 +96478,13 @@ async function run() { logger.info("Not uploading results"); } if (uploadResults["code-scanning" /* CodeScanning */] !== void 0) { - core14.setOutput( + core16.setOutput( "sarif-id", uploadResults["code-scanning" /* CodeScanning */].sarifID ); } if (uploadResults["code-quality" /* CodeQuality */] !== void 0) { - core14.setOutput( + core16.setOutput( "quality-sarif-id", uploadResults["code-quality" /* CodeQuality */].sarifID ); @@ -96503,15 +96523,15 @@ async function run() { ); } if (getOptionalInput("expect-error") === "true") { - core14.setFailed( + core16.setFailed( `expect-error input was set to true but no error was thrown.` ); } - core14.exportVariable("CODEQL_ACTION_ANALYZE_DID_COMPLETE_SUCCESSFULLY" /* ANALYZE_DID_COMPLETE_SUCCESSFULLY */, "true"); + core16.exportVariable("CODEQL_ACTION_ANALYZE_DID_COMPLETE_SUCCESSFULLY" /* ANALYZE_DID_COMPLETE_SUCCESSFULLY */, "true"); } catch (unwrappedError) { const error2 = wrapError(unwrappedError); if (getOptionalInput("expect-error") !== "true" || hasBadExpectErrorInput()) { - core14.setFailed(error2.message); + core16.setFailed(error2.message); } await sendStatusReport2( startedAt, @@ -96576,7 +96596,7 @@ async function runWrapper() { try { await runPromise; } catch (error2) { - core14.setFailed(`analyze action failed: ${getErrorMessage(error2)}`); + core16.setFailed(`analyze action failed: ${getErrorMessage(error2)}`); } await checkForTimeout(); } diff --git a/lib/init-action-post.js b/lib/init-action-post.js index ba2c283386..d24c949014 100644 --- a/lib/init-action-post.js +++ b/lib/init-action-post.js @@ -19755,22 +19755,22 @@ Support boolean input list: \`true | True | TRUE | false | False | FALSE\``); process.stdout.write(message + os3.EOL); } exports2.info = info5; - function startGroup3(name) { + function startGroup4(name) { (0, command_1.issue)("group", name); } - exports2.startGroup = startGroup3; - function endGroup3() { + exports2.startGroup = startGroup4; + function endGroup4() { (0, command_1.issue)("endgroup"); } - exports2.endGroup = endGroup3; + exports2.endGroup = endGroup4; function group(name, fn) { return __awaiter4(this, void 0, void 0, function* () { - startGroup3(name); + startGroup4(name); let result; try { result = yield fn(); } finally { - endGroup3(); + endGroup4(); } return result; }); @@ -35312,7 +35312,7 @@ var require_internal_glob_options_helper = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.getOptions = void 0; - var core18 = __importStar4(require_core()); + var core19 = __importStar4(require_core()); function getOptions(copy) { const result = { followSymbolicLinks: true, @@ -35322,15 +35322,15 @@ var require_internal_glob_options_helper = __commonJS({ if (copy) { if (typeof copy.followSymbolicLinks === "boolean") { result.followSymbolicLinks = copy.followSymbolicLinks; - core18.debug(`followSymbolicLinks '${result.followSymbolicLinks}'`); + core19.debug(`followSymbolicLinks '${result.followSymbolicLinks}'`); } if (typeof copy.implicitDescendants === "boolean") { result.implicitDescendants = copy.implicitDescendants; - core18.debug(`implicitDescendants '${result.implicitDescendants}'`); + core19.debug(`implicitDescendants '${result.implicitDescendants}'`); } if (typeof copy.omitBrokenSymbolicLinks === "boolean") { result.omitBrokenSymbolicLinks = copy.omitBrokenSymbolicLinks; - core18.debug(`omitBrokenSymbolicLinks '${result.omitBrokenSymbolicLinks}'`); + core19.debug(`omitBrokenSymbolicLinks '${result.omitBrokenSymbolicLinks}'`); } } return result; @@ -36784,7 +36784,7 @@ var require_internal_globber = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.DefaultGlobber = void 0; - var core18 = __importStar4(require_core()); + var core19 = __importStar4(require_core()); var fs20 = __importStar4(require("fs")); var globOptionsHelper = __importStar4(require_internal_glob_options_helper()); var path19 = __importStar4(require("path")); @@ -36835,7 +36835,7 @@ var require_internal_globber = __commonJS({ } const stack = []; for (const searchPath of patternHelper.getSearchPaths(patterns)) { - core18.debug(`Search path '${searchPath}'`); + core19.debug(`Search path '${searchPath}'`); try { yield __await4(fs20.promises.lstat(searchPath)); } catch (err) { @@ -36907,7 +36907,7 @@ var require_internal_globber = __commonJS({ } catch (err) { if (err.code === "ENOENT") { if (options.omitBrokenSymbolicLinks) { - core18.debug(`Broken symlink '${item.path}'`); + core19.debug(`Broken symlink '${item.path}'`); return void 0; } throw new Error(`No information found for the path '${item.path}'. This may indicate a broken symbolic link.`); @@ -36923,7 +36923,7 @@ var require_internal_globber = __commonJS({ traversalChain.pop(); } if (traversalChain.some((x) => x === realPath)) { - core18.debug(`Symlink cycle detected for path '${item.path}' and realpath '${realPath}'`); + core19.debug(`Symlink cycle detected for path '${item.path}' and realpath '${realPath}'`); return void 0; } traversalChain.push(realPath); @@ -38247,7 +38247,7 @@ var require_cacheUtils = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.getRuntimeToken = exports2.getCacheVersion = exports2.assertDefined = exports2.getGnuTarPathOnWindows = exports2.getCacheFileName = exports2.getCompressionMethod = exports2.unlinkFile = exports2.resolvePaths = exports2.getArchiveFileSizeInBytes = exports2.createTempDirectory = void 0; - var core18 = __importStar4(require_core()); + var core19 = __importStar4(require_core()); var exec2 = __importStar4(require_exec()); var glob2 = __importStar4(require_glob()); var io7 = __importStar4(require_io()); @@ -38300,7 +38300,7 @@ var require_cacheUtils = __commonJS({ _e = false; const file = _c; const relativeFile = path19.relative(workspace, file).replace(new RegExp(`\\${path19.sep}`, "g"), "/"); - core18.debug(`Matched: ${relativeFile}`); + core19.debug(`Matched: ${relativeFile}`); if (relativeFile === "") { paths.push("."); } else { @@ -38330,7 +38330,7 @@ var require_cacheUtils = __commonJS({ return __awaiter4(this, void 0, void 0, function* () { let versionOutput = ""; additionalArgs.push("--version"); - core18.debug(`Checking ${app} ${additionalArgs.join(" ")}`); + core19.debug(`Checking ${app} ${additionalArgs.join(" ")}`); try { yield exec2.exec(`${app}`, additionalArgs, { ignoreReturnCode: true, @@ -38341,10 +38341,10 @@ var require_cacheUtils = __commonJS({ } }); } catch (err) { - core18.debug(err.message); + core19.debug(err.message); } versionOutput = versionOutput.trim(); - core18.debug(versionOutput); + core19.debug(versionOutput); return versionOutput; }); } @@ -38352,7 +38352,7 @@ var require_cacheUtils = __commonJS({ return __awaiter4(this, void 0, void 0, function* () { const versionOutput = yield getVersion("zstd", ["--quiet"]); const version = semver8.clean(versionOutput); - core18.debug(`zstd version: ${version}`); + core19.debug(`zstd version: ${version}`); if (versionOutput === "") { return constants_1.CompressionMethod.Gzip; } else { @@ -73664,7 +73664,7 @@ var require_uploadUtils = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.uploadCacheArchiveSDK = exports2.UploadProgress = void 0; - var core18 = __importStar4(require_core()); + var core19 = __importStar4(require_core()); var storage_blob_1 = require_dist7(); var errors_1 = require_errors2(); var UploadProgress = class { @@ -73706,7 +73706,7 @@ var require_uploadUtils = __commonJS({ const percentage = (100 * (transferredBytes / this.contentLength)).toFixed(1); const elapsedTime = Date.now() - this.startTime; const uploadSpeed = (transferredBytes / (1024 * 1024) / (elapsedTime / 1e3)).toFixed(1); - core18.info(`Sent ${transferredBytes} of ${this.contentLength} (${percentage}%), ${uploadSpeed} MBs/sec`); + core19.info(`Sent ${transferredBytes} of ${this.contentLength} (${percentage}%), ${uploadSpeed} MBs/sec`); if (this.isDone()) { this.displayedComplete = true; } @@ -73761,14 +73761,14 @@ var require_uploadUtils = __commonJS({ }; try { uploadProgress.startDisplayTimer(); - core18.debug(`BlobClient: ${blobClient.name}:${blobClient.accountName}:${blobClient.containerName}`); + core19.debug(`BlobClient: ${blobClient.name}:${blobClient.accountName}:${blobClient.containerName}`); const response = yield blockBlobClient.uploadFile(archivePath, uploadOptions); if (response._response.status >= 400) { throw new errors_1.InvalidResponseError(`uploadCacheArchiveSDK: upload failed with status code ${response._response.status}`); } return response; } catch (error2) { - core18.warning(`uploadCacheArchiveSDK: internal error uploading cache archive: ${error2.message}`); + core19.warning(`uploadCacheArchiveSDK: internal error uploading cache archive: ${error2.message}`); throw error2; } finally { uploadProgress.stopDisplayTimer(); @@ -73839,7 +73839,7 @@ var require_requestUtils = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.retryHttpClientResponse = exports2.retryTypedResponse = exports2.retry = exports2.isRetryableStatusCode = exports2.isServerErrorStatusCode = exports2.isSuccessStatusCode = void 0; - var core18 = __importStar4(require_core()); + var core19 = __importStar4(require_core()); var http_client_1 = require_lib(); var constants_1 = require_constants10(); function isSuccessStatusCode(statusCode) { @@ -73900,9 +73900,9 @@ var require_requestUtils = __commonJS({ isRetryable = isRetryableStatusCode(statusCode); errorMessage = `Cache service responded with ${statusCode}`; } - core18.debug(`${name} - Attempt ${attempt} of ${maxAttempts} failed with error: ${errorMessage}`); + core19.debug(`${name} - Attempt ${attempt} of ${maxAttempts} failed with error: ${errorMessage}`); if (!isRetryable) { - core18.debug(`${name} - Error is not retryable`); + core19.debug(`${name} - Error is not retryable`); break; } yield sleep(delay2); @@ -74007,7 +74007,7 @@ var require_downloadUtils = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.downloadCacheStorageSDK = exports2.downloadCacheHttpClientConcurrent = exports2.downloadCacheHttpClient = exports2.DownloadProgress = void 0; - var core18 = __importStar4(require_core()); + var core19 = __importStar4(require_core()); var http_client_1 = require_lib(); var storage_blob_1 = require_dist7(); var buffer = __importStar4(require("buffer")); @@ -74045,7 +74045,7 @@ var require_downloadUtils = __commonJS({ this.segmentIndex = this.segmentIndex + 1; this.segmentSize = segmentSize; this.receivedBytes = 0; - core18.debug(`Downloading segment at offset ${this.segmentOffset} with length ${this.segmentSize}...`); + core19.debug(`Downloading segment at offset ${this.segmentOffset} with length ${this.segmentSize}...`); } /** * Sets the number of bytes received for the current segment. @@ -74079,7 +74079,7 @@ var require_downloadUtils = __commonJS({ const percentage = (100 * (transferredBytes / this.contentLength)).toFixed(1); const elapsedTime = Date.now() - this.startTime; const downloadSpeed = (transferredBytes / (1024 * 1024) / (elapsedTime / 1e3)).toFixed(1); - core18.info(`Received ${transferredBytes} of ${this.contentLength} (${percentage}%), ${downloadSpeed} MBs/sec`); + core19.info(`Received ${transferredBytes} of ${this.contentLength} (${percentage}%), ${downloadSpeed} MBs/sec`); if (this.isDone()) { this.displayedComplete = true; } @@ -74129,7 +74129,7 @@ var require_downloadUtils = __commonJS({ })); downloadResponse.message.socket.setTimeout(constants_1.SocketTimeout, () => { downloadResponse.message.destroy(); - core18.debug(`Aborting download, socket timed out after ${constants_1.SocketTimeout} ms`); + core19.debug(`Aborting download, socket timed out after ${constants_1.SocketTimeout} ms`); }); yield pipeResponseToStream(downloadResponse, writeStream); const contentLengthHeader = downloadResponse.message.headers["content-length"]; @@ -74140,7 +74140,7 @@ var require_downloadUtils = __commonJS({ throw new Error(`Incomplete download. Expected file size: ${expectedLength}, actual file size: ${actualLength}`); } } else { - core18.debug("Unable to validate download, no Content-Length header"); + core19.debug("Unable to validate download, no Content-Length header"); } }); } @@ -74260,7 +74260,7 @@ var require_downloadUtils = __commonJS({ const properties = yield client.getProperties(); const contentLength = (_a = properties.contentLength) !== null && _a !== void 0 ? _a : -1; if (contentLength < 0) { - core18.debug("Unable to determine content length, downloading file with http-client..."); + core19.debug("Unable to determine content length, downloading file with http-client..."); yield downloadCacheHttpClient(archiveLocation, archivePath); } else { const maxSegmentSize = Math.min(134217728, buffer.constants.MAX_LENGTH); @@ -74340,7 +74340,7 @@ var require_options = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.getDownloadOptions = exports2.getUploadOptions = void 0; - var core18 = __importStar4(require_core()); + var core19 = __importStar4(require_core()); function getUploadOptions(copy) { const result = { useAzureSdk: false, @@ -74360,9 +74360,9 @@ var require_options = __commonJS({ } result.uploadConcurrency = !isNaN(Number(process.env["CACHE_UPLOAD_CONCURRENCY"])) ? Math.min(32, Number(process.env["CACHE_UPLOAD_CONCURRENCY"])) : result.uploadConcurrency; result.uploadChunkSize = !isNaN(Number(process.env["CACHE_UPLOAD_CHUNK_SIZE"])) ? Math.min(128 * 1024 * 1024, Number(process.env["CACHE_UPLOAD_CHUNK_SIZE"]) * 1024 * 1024) : result.uploadChunkSize; - core18.debug(`Use Azure SDK: ${result.useAzureSdk}`); - core18.debug(`Upload concurrency: ${result.uploadConcurrency}`); - core18.debug(`Upload chunk size: ${result.uploadChunkSize}`); + core19.debug(`Use Azure SDK: ${result.useAzureSdk}`); + core19.debug(`Upload concurrency: ${result.uploadConcurrency}`); + core19.debug(`Upload chunk size: ${result.uploadChunkSize}`); return result; } exports2.getUploadOptions = getUploadOptions; @@ -74399,12 +74399,12 @@ var require_options = __commonJS({ if (segmentDownloadTimeoutMins && !isNaN(Number(segmentDownloadTimeoutMins)) && isFinite(Number(segmentDownloadTimeoutMins))) { result.segmentTimeoutInMs = Number(segmentDownloadTimeoutMins) * 60 * 1e3; } - core18.debug(`Use Azure SDK: ${result.useAzureSdk}`); - core18.debug(`Download concurrency: ${result.downloadConcurrency}`); - core18.debug(`Request timeout (ms): ${result.timeoutInMs}`); - core18.debug(`Cache segment download timeout mins env var: ${process.env["SEGMENT_DOWNLOAD_TIMEOUT_MINS"]}`); - core18.debug(`Segment download timeout (ms): ${result.segmentTimeoutInMs}`); - core18.debug(`Lookup only: ${result.lookupOnly}`); + core19.debug(`Use Azure SDK: ${result.useAzureSdk}`); + core19.debug(`Download concurrency: ${result.downloadConcurrency}`); + core19.debug(`Request timeout (ms): ${result.timeoutInMs}`); + core19.debug(`Cache segment download timeout mins env var: ${process.env["SEGMENT_DOWNLOAD_TIMEOUT_MINS"]}`); + core19.debug(`Segment download timeout (ms): ${result.segmentTimeoutInMs}`); + core19.debug(`Lookup only: ${result.lookupOnly}`); return result; } exports2.getDownloadOptions = getDownloadOptions; @@ -74584,7 +74584,7 @@ var require_cacheHttpClient = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.saveCache = exports2.reserveCache = exports2.downloadCache = exports2.getCacheEntry = void 0; - var core18 = __importStar4(require_core()); + var core19 = __importStar4(require_core()); var http_client_1 = require_lib(); var auth_1 = require_auth(); var fs20 = __importStar4(require("fs")); @@ -74602,7 +74602,7 @@ var require_cacheHttpClient = __commonJS({ throw new Error("Cache Service Url not found, unable to restore cache."); } const url2 = `${baseUrl}_apis/artifactcache/${resource}`; - core18.debug(`Resource Url: ${url2}`); + core19.debug(`Resource Url: ${url2}`); return url2; } function createAcceptHeader(type2, apiVersion) { @@ -74630,7 +74630,7 @@ var require_cacheHttpClient = __commonJS({ return httpClient.getJson(getCacheApiUrl(resource)); })); if (response.statusCode === 204) { - if (core18.isDebug()) { + if (core19.isDebug()) { yield printCachesListForDiagnostics(keys[0], httpClient, version); } return null; @@ -74643,9 +74643,9 @@ var require_cacheHttpClient = __commonJS({ if (!cacheDownloadUrl) { throw new Error("Cache not found."); } - core18.setSecret(cacheDownloadUrl); - core18.debug(`Cache Result:`); - core18.debug(JSON.stringify(cacheResult)); + core19.setSecret(cacheDownloadUrl); + core19.debug(`Cache Result:`); + core19.debug(JSON.stringify(cacheResult)); return cacheResult; }); } @@ -74660,10 +74660,10 @@ var require_cacheHttpClient = __commonJS({ const cacheListResult = response.result; const totalCount = cacheListResult === null || cacheListResult === void 0 ? void 0 : cacheListResult.totalCount; if (totalCount && totalCount > 0) { - core18.debug(`No matching cache found for cache key '${key}', version '${version} and scope ${process.env["GITHUB_REF"]}. There exist one or more cache(s) with similar key but they have different version or scope. See more info on cache matching here: https://docs.github.com/en/actions/using-workflows/caching-dependencies-to-speed-up-workflows#matching-a-cache-key + core19.debug(`No matching cache found for cache key '${key}', version '${version} and scope ${process.env["GITHUB_REF"]}. There exist one or more cache(s) with similar key but they have different version or scope. See more info on cache matching here: https://docs.github.com/en/actions/using-workflows/caching-dependencies-to-speed-up-workflows#matching-a-cache-key Other caches with similar key:`); for (const cacheEntry of (cacheListResult === null || cacheListResult === void 0 ? void 0 : cacheListResult.artifactCaches) || []) { - core18.debug(`Cache Key: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.cacheKey}, Cache Version: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.cacheVersion}, Cache Scope: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.scope}, Cache Created: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.creationTime}`); + core19.debug(`Cache Key: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.cacheKey}, Cache Version: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.cacheVersion}, Cache Scope: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.scope}, Cache Created: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.creationTime}`); } } } @@ -74708,7 +74708,7 @@ Other caches with similar key:`); } function uploadChunk(httpClient, resourceUrl, openStream, start, end) { return __awaiter4(this, void 0, void 0, function* () { - core18.debug(`Uploading chunk of size ${end - start + 1} bytes at offset ${start} with content range: ${getContentRange(start, end)}`); + core19.debug(`Uploading chunk of size ${end - start + 1} bytes at offset ${start} with content range: ${getContentRange(start, end)}`); const additionalHeaders = { "Content-Type": "application/octet-stream", "Content-Range": getContentRange(start, end) @@ -74730,7 +74730,7 @@ Other caches with similar key:`); const concurrency = utils.assertDefined("uploadConcurrency", uploadOptions.uploadConcurrency); const maxChunkSize = utils.assertDefined("uploadChunkSize", uploadOptions.uploadChunkSize); const parallelUploads = [...new Array(concurrency).keys()]; - core18.debug("Awaiting all uploads"); + core19.debug("Awaiting all uploads"); let offset = 0; try { yield Promise.all(parallelUploads.map(() => __awaiter4(this, void 0, void 0, function* () { @@ -74773,16 +74773,16 @@ Other caches with similar key:`); yield (0, uploadUtils_1.uploadCacheArchiveSDK)(signedUploadURL, archivePath, options); } else { const httpClient = createHttpClient(); - core18.debug("Upload cache"); + core19.debug("Upload cache"); yield uploadFile(httpClient, cacheId, archivePath, options); - core18.debug("Commiting cache"); + core19.debug("Commiting cache"); const cacheSize = utils.getArchiveFileSizeInBytes(archivePath); - core18.info(`Cache Size: ~${Math.round(cacheSize / (1024 * 1024))} MB (${cacheSize} B)`); + core19.info(`Cache Size: ~${Math.round(cacheSize / (1024 * 1024))} MB (${cacheSize} B)`); const commitCacheResponse = yield commitCache(httpClient, cacheId, cacheSize); if (!(0, requestUtils_1.isSuccessStatusCode)(commitCacheResponse.statusCode)) { throw new Error(`Cache service responded with ${commitCacheResponse.statusCode} during commit cache.`); } - core18.info("Cache saved successfully"); + core19.info("Cache saved successfully"); } }); } @@ -80234,7 +80234,7 @@ var require_cache3 = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.saveCache = exports2.restoreCache = exports2.isFeatureAvailable = exports2.FinalizeCacheError = exports2.ReserveCacheError = exports2.ValidationError = void 0; - var core18 = __importStar4(require_core()); + var core19 = __importStar4(require_core()); var path19 = __importStar4(require("path")); var utils = __importStar4(require_cacheUtils()); var cacheHttpClient = __importStar4(require_cacheHttpClient()); @@ -80294,7 +80294,7 @@ var require_cache3 = __commonJS({ function restoreCache4(paths, primaryKey, restoreKeys, options, enableCrossOsArchive = false) { return __awaiter4(this, void 0, void 0, function* () { const cacheServiceVersion = (0, config_1.getCacheServiceVersion)(); - core18.debug(`Cache service version: ${cacheServiceVersion}`); + core19.debug(`Cache service version: ${cacheServiceVersion}`); checkPaths(paths); switch (cacheServiceVersion) { case "v2": @@ -80310,8 +80310,8 @@ var require_cache3 = __commonJS({ return __awaiter4(this, void 0, void 0, function* () { restoreKeys = restoreKeys || []; const keys = [primaryKey, ...restoreKeys]; - core18.debug("Resolved Keys:"); - core18.debug(JSON.stringify(keys)); + core19.debug("Resolved Keys:"); + core19.debug(JSON.stringify(keys)); if (keys.length > 10) { throw new ValidationError(`Key Validation Error: Keys are limited to a maximum of 10.`); } @@ -80329,19 +80329,19 @@ var require_cache3 = __commonJS({ return void 0; } if (options === null || options === void 0 ? void 0 : options.lookupOnly) { - core18.info("Lookup only - skipping download"); + core19.info("Lookup only - skipping download"); return cacheEntry.cacheKey; } archivePath = path19.join(yield utils.createTempDirectory(), utils.getCacheFileName(compressionMethod)); - core18.debug(`Archive Path: ${archivePath}`); + core19.debug(`Archive Path: ${archivePath}`); yield cacheHttpClient.downloadCache(cacheEntry.archiveLocation, archivePath, options); - if (core18.isDebug()) { + if (core19.isDebug()) { yield (0, tar_1.listTar)(archivePath, compressionMethod); } const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath); - core18.info(`Cache Size: ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B)`); + core19.info(`Cache Size: ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B)`); yield (0, tar_1.extractTar)(archivePath, compressionMethod); - core18.info("Cache restored successfully"); + core19.info("Cache restored successfully"); return cacheEntry.cacheKey; } catch (error2) { const typedError = error2; @@ -80349,16 +80349,16 @@ var require_cache3 = __commonJS({ throw error2; } else { if (typedError instanceof http_client_1.HttpClientError && typeof typedError.statusCode === "number" && typedError.statusCode >= 500) { - core18.error(`Failed to restore: ${error2.message}`); + core19.error(`Failed to restore: ${error2.message}`); } else { - core18.warning(`Failed to restore: ${error2.message}`); + core19.warning(`Failed to restore: ${error2.message}`); } } } finally { try { yield utils.unlinkFile(archivePath); } catch (error2) { - core18.debug(`Failed to delete archive: ${error2}`); + core19.debug(`Failed to delete archive: ${error2}`); } } return void 0; @@ -80369,8 +80369,8 @@ var require_cache3 = __commonJS({ options = Object.assign(Object.assign({}, options), { useAzureSdk: true }); restoreKeys = restoreKeys || []; const keys = [primaryKey, ...restoreKeys]; - core18.debug("Resolved Keys:"); - core18.debug(JSON.stringify(keys)); + core19.debug("Resolved Keys:"); + core19.debug(JSON.stringify(keys)); if (keys.length > 10) { throw new ValidationError(`Key Validation Error: Keys are limited to a maximum of 10.`); } @@ -80388,30 +80388,30 @@ var require_cache3 = __commonJS({ }; const response = yield twirpClient.GetCacheEntryDownloadURL(request); if (!response.ok) { - core18.debug(`Cache not found for version ${request.version} of keys: ${keys.join(", ")}`); + core19.debug(`Cache not found for version ${request.version} of keys: ${keys.join(", ")}`); return void 0; } const isRestoreKeyMatch = request.key !== response.matchedKey; if (isRestoreKeyMatch) { - core18.info(`Cache hit for restore-key: ${response.matchedKey}`); + core19.info(`Cache hit for restore-key: ${response.matchedKey}`); } else { - core18.info(`Cache hit for: ${response.matchedKey}`); + core19.info(`Cache hit for: ${response.matchedKey}`); } if (options === null || options === void 0 ? void 0 : options.lookupOnly) { - core18.info("Lookup only - skipping download"); + core19.info("Lookup only - skipping download"); return response.matchedKey; } archivePath = path19.join(yield utils.createTempDirectory(), utils.getCacheFileName(compressionMethod)); - core18.debug(`Archive path: ${archivePath}`); - core18.debug(`Starting download of archive to: ${archivePath}`); + core19.debug(`Archive path: ${archivePath}`); + core19.debug(`Starting download of archive to: ${archivePath}`); yield cacheHttpClient.downloadCache(response.signedDownloadUrl, archivePath, options); const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath); - core18.info(`Cache Size: ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B)`); - if (core18.isDebug()) { + core19.info(`Cache Size: ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B)`); + if (core19.isDebug()) { yield (0, tar_1.listTar)(archivePath, compressionMethod); } yield (0, tar_1.extractTar)(archivePath, compressionMethod); - core18.info("Cache restored successfully"); + core19.info("Cache restored successfully"); return response.matchedKey; } catch (error2) { const typedError = error2; @@ -80419,9 +80419,9 @@ var require_cache3 = __commonJS({ throw error2; } else { if (typedError instanceof http_client_1.HttpClientError && typeof typedError.statusCode === "number" && typedError.statusCode >= 500) { - core18.error(`Failed to restore: ${error2.message}`); + core19.error(`Failed to restore: ${error2.message}`); } else { - core18.warning(`Failed to restore: ${error2.message}`); + core19.warning(`Failed to restore: ${error2.message}`); } } } finally { @@ -80430,7 +80430,7 @@ var require_cache3 = __commonJS({ yield utils.unlinkFile(archivePath); } } catch (error2) { - core18.debug(`Failed to delete archive: ${error2}`); + core19.debug(`Failed to delete archive: ${error2}`); } } return void 0; @@ -80439,7 +80439,7 @@ var require_cache3 = __commonJS({ function saveCache4(paths, key, options, enableCrossOsArchive = false) { return __awaiter4(this, void 0, void 0, function* () { const cacheServiceVersion = (0, config_1.getCacheServiceVersion)(); - core18.debug(`Cache service version: ${cacheServiceVersion}`); + core19.debug(`Cache service version: ${cacheServiceVersion}`); checkPaths(paths); checkKey(key); switch (cacheServiceVersion) { @@ -80458,26 +80458,26 @@ var require_cache3 = __commonJS({ const compressionMethod = yield utils.getCompressionMethod(); let cacheId = -1; const cachePaths = yield utils.resolvePaths(paths); - core18.debug("Cache Paths:"); - core18.debug(`${JSON.stringify(cachePaths)}`); + core19.debug("Cache Paths:"); + core19.debug(`${JSON.stringify(cachePaths)}`); if (cachePaths.length === 0) { throw new Error(`Path Validation Error: Path(s) specified in the action for caching do(es) not exist, hence no cache is being saved.`); } const archiveFolder = yield utils.createTempDirectory(); const archivePath = path19.join(archiveFolder, utils.getCacheFileName(compressionMethod)); - core18.debug(`Archive Path: ${archivePath}`); + core19.debug(`Archive Path: ${archivePath}`); try { yield (0, tar_1.createTar)(archiveFolder, cachePaths, compressionMethod); - if (core18.isDebug()) { + if (core19.isDebug()) { yield (0, tar_1.listTar)(archivePath, compressionMethod); } const fileSizeLimit = 10 * 1024 * 1024 * 1024; const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath); - core18.debug(`File Size: ${archiveFileSize}`); + core19.debug(`File Size: ${archiveFileSize}`); if (archiveFileSize > fileSizeLimit && !(0, config_1.isGhes)()) { throw new Error(`Cache size of ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B) is over the 10GB limit, not saving cache.`); } - core18.debug("Reserving Cache"); + core19.debug("Reserving Cache"); const reserveCacheResponse = yield cacheHttpClient.reserveCache(key, paths, { compressionMethod, enableCrossOsArchive, @@ -80490,26 +80490,26 @@ var require_cache3 = __commonJS({ } else { throw new ReserveCacheError2(`Unable to reserve cache with key ${key}, another job may be creating this cache. More details: ${(_e = reserveCacheResponse === null || reserveCacheResponse === void 0 ? void 0 : reserveCacheResponse.error) === null || _e === void 0 ? void 0 : _e.message}`); } - core18.debug(`Saving Cache (ID: ${cacheId})`); + core19.debug(`Saving Cache (ID: ${cacheId})`); yield cacheHttpClient.saveCache(cacheId, archivePath, "", options); } catch (error2) { const typedError = error2; if (typedError.name === ValidationError.name) { throw error2; } else if (typedError.name === ReserveCacheError2.name) { - core18.info(`Failed to save: ${typedError.message}`); + core19.info(`Failed to save: ${typedError.message}`); } else { if (typedError instanceof http_client_1.HttpClientError && typeof typedError.statusCode === "number" && typedError.statusCode >= 500) { - core18.error(`Failed to save: ${typedError.message}`); + core19.error(`Failed to save: ${typedError.message}`); } else { - core18.warning(`Failed to save: ${typedError.message}`); + core19.warning(`Failed to save: ${typedError.message}`); } } } finally { try { yield utils.unlinkFile(archivePath); } catch (error2) { - core18.debug(`Failed to delete archive: ${error2}`); + core19.debug(`Failed to delete archive: ${error2}`); } } return cacheId; @@ -80522,23 +80522,23 @@ var require_cache3 = __commonJS({ const twirpClient = cacheTwirpClient.internalCacheTwirpClient(); let cacheId = -1; const cachePaths = yield utils.resolvePaths(paths); - core18.debug("Cache Paths:"); - core18.debug(`${JSON.stringify(cachePaths)}`); + core19.debug("Cache Paths:"); + core19.debug(`${JSON.stringify(cachePaths)}`); if (cachePaths.length === 0) { throw new Error(`Path Validation Error: Path(s) specified in the action for caching do(es) not exist, hence no cache is being saved.`); } const archiveFolder = yield utils.createTempDirectory(); const archivePath = path19.join(archiveFolder, utils.getCacheFileName(compressionMethod)); - core18.debug(`Archive Path: ${archivePath}`); + core19.debug(`Archive Path: ${archivePath}`); try { yield (0, tar_1.createTar)(archiveFolder, cachePaths, compressionMethod); - if (core18.isDebug()) { + if (core19.isDebug()) { yield (0, tar_1.listTar)(archivePath, compressionMethod); } const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath); - core18.debug(`File Size: ${archiveFileSize}`); + core19.debug(`File Size: ${archiveFileSize}`); options.archiveSizeBytes = archiveFileSize; - core18.debug("Reserving Cache"); + core19.debug("Reserving Cache"); const version = utils.getCacheVersion(paths, compressionMethod, enableCrossOsArchive); const request = { key, @@ -80549,16 +80549,16 @@ var require_cache3 = __commonJS({ const response = yield twirpClient.CreateCacheEntry(request); if (!response.ok) { if (response.message) { - core18.warning(`Cache reservation failed: ${response.message}`); + core19.warning(`Cache reservation failed: ${response.message}`); } throw new Error(response.message || "Response was not ok"); } signedUploadUrl = response.signedUploadUrl; } catch (error2) { - core18.debug(`Failed to reserve cache: ${error2}`); + core19.debug(`Failed to reserve cache: ${error2}`); throw new ReserveCacheError2(`Unable to reserve cache with key ${key}, another job may be creating this cache.`); } - core18.debug(`Attempting to upload cache located at: ${archivePath}`); + core19.debug(`Attempting to upload cache located at: ${archivePath}`); yield cacheHttpClient.saveCache(cacheId, archivePath, signedUploadUrl, options); const finalizeRequest = { key, @@ -80566,7 +80566,7 @@ var require_cache3 = __commonJS({ sizeBytes: `${archiveFileSize}` }; const finalizeResponse = yield twirpClient.FinalizeCacheEntryUpload(finalizeRequest); - core18.debug(`FinalizeCacheEntryUploadResponse: ${finalizeResponse.ok}`); + core19.debug(`FinalizeCacheEntryUploadResponse: ${finalizeResponse.ok}`); if (!finalizeResponse.ok) { if (finalizeResponse.message) { throw new FinalizeCacheError(finalizeResponse.message); @@ -80579,21 +80579,21 @@ var require_cache3 = __commonJS({ if (typedError.name === ValidationError.name) { throw error2; } else if (typedError.name === ReserveCacheError2.name) { - core18.info(`Failed to save: ${typedError.message}`); + core19.info(`Failed to save: ${typedError.message}`); } else if (typedError.name === FinalizeCacheError.name) { - core18.warning(typedError.message); + core19.warning(typedError.message); } else { if (typedError instanceof http_client_1.HttpClientError && typeof typedError.statusCode === "number" && typedError.statusCode >= 500) { - core18.error(`Failed to save: ${typedError.message}`); + core19.error(`Failed to save: ${typedError.message}`); } else { - core18.warning(`Failed to save: ${typedError.message}`); + core19.warning(`Failed to save: ${typedError.message}`); } } } finally { try { yield utils.unlinkFile(archivePath); } catch (error2) { - core18.debug(`Failed to delete archive: ${error2}`); + core19.debug(`Failed to delete archive: ${error2}`); } } return cacheId; @@ -80801,7 +80801,7 @@ var require_retry_helper = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.RetryHelper = void 0; - var core18 = __importStar4(require_core()); + var core19 = __importStar4(require_core()); var RetryHelper = class { constructor(maxAttempts, minSeconds, maxSeconds) { if (maxAttempts < 1) { @@ -80824,10 +80824,10 @@ var require_retry_helper = __commonJS({ if (isRetryable && !isRetryable(err)) { throw err; } - core18.info(err.message); + core19.info(err.message); } const seconds = this.getSleepAmount(); - core18.info(`Waiting ${seconds} seconds before trying again`); + core19.info(`Waiting ${seconds} seconds before trying again`); yield this.sleep(seconds); attempt++; } @@ -80907,7 +80907,7 @@ var require_tool_cache = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.evaluateVersions = exports2.isExplicitVersion = exports2.findFromManifest = exports2.getManifestFromRepo = exports2.findAllVersions = exports2.find = exports2.cacheFile = exports2.cacheDir = exports2.extractZip = exports2.extractXar = exports2.extractTar = exports2.extract7z = exports2.downloadTool = exports2.HTTPError = void 0; - var core18 = __importStar4(require_core()); + var core19 = __importStar4(require_core()); var io7 = __importStar4(require_io()); var crypto = __importStar4(require("crypto")); var fs20 = __importStar4(require("fs")); @@ -80936,8 +80936,8 @@ var require_tool_cache = __commonJS({ return __awaiter4(this, void 0, void 0, function* () { dest = dest || path19.join(_getTempDirectory(), crypto.randomUUID()); yield io7.mkdirP(path19.dirname(dest)); - core18.debug(`Downloading ${url2}`); - core18.debug(`Destination ${dest}`); + core19.debug(`Downloading ${url2}`); + core19.debug(`Destination ${dest}`); const maxAttempts = 3; const minSeconds = _getGlobal("TEST_DOWNLOAD_TOOL_RETRY_MIN_SECONDS", 10); const maxSeconds = _getGlobal("TEST_DOWNLOAD_TOOL_RETRY_MAX_SECONDS", 20); @@ -80964,7 +80964,7 @@ var require_tool_cache = __commonJS({ allowRetries: false }); if (auth) { - core18.debug("set auth"); + core19.debug("set auth"); if (headers === void 0) { headers = {}; } @@ -80973,7 +80973,7 @@ var require_tool_cache = __commonJS({ const response = yield http.get(url2, headers); if (response.message.statusCode !== 200) { const err = new HTTPError2(response.message.statusCode); - core18.debug(`Failed to download from "${url2}". Code(${response.message.statusCode}) Message(${response.message.statusMessage})`); + core19.debug(`Failed to download from "${url2}". Code(${response.message.statusCode}) Message(${response.message.statusMessage})`); throw err; } const pipeline = util.promisify(stream2.pipeline); @@ -80982,16 +80982,16 @@ var require_tool_cache = __commonJS({ let succeeded = false; try { yield pipeline(readStream, fs20.createWriteStream(dest)); - core18.debug("download complete"); + core19.debug("download complete"); succeeded = true; return dest; } finally { if (!succeeded) { - core18.debug("download failed"); + core19.debug("download failed"); try { yield io7.rmRF(dest); } catch (err) { - core18.debug(`Failed to delete '${dest}'. ${err.message}`); + core19.debug(`Failed to delete '${dest}'. ${err.message}`); } } } @@ -81006,7 +81006,7 @@ var require_tool_cache = __commonJS({ process.chdir(dest); if (_7zPath) { try { - const logLevel = core18.isDebug() ? "-bb1" : "-bb0"; + const logLevel = core19.isDebug() ? "-bb1" : "-bb0"; const args = [ "x", logLevel, @@ -81056,7 +81056,7 @@ var require_tool_cache = __commonJS({ throw new Error("parameter 'file' is required"); } dest = yield _createExtractFolder(dest); - core18.debug("Checking tar --version"); + core19.debug("Checking tar --version"); let versionOutput = ""; yield (0, exec_1.exec)("tar --version", [], { ignoreReturnCode: true, @@ -81066,7 +81066,7 @@ var require_tool_cache = __commonJS({ stderr: (data) => versionOutput += data.toString() } }); - core18.debug(versionOutput.trim()); + core19.debug(versionOutput.trim()); const isGnuTar = versionOutput.toUpperCase().includes("GNU TAR"); let args; if (flags instanceof Array) { @@ -81074,7 +81074,7 @@ var require_tool_cache = __commonJS({ } else { args = [flags]; } - if (core18.isDebug() && !flags.includes("v")) { + if (core19.isDebug() && !flags.includes("v")) { args.push("-v"); } let destArg = dest; @@ -81106,7 +81106,7 @@ var require_tool_cache = __commonJS({ args = [flags]; } args.push("-x", "-C", dest, "-f", file); - if (core18.isDebug()) { + if (core19.isDebug()) { args.push("-v"); } const xarPath = yield io7.which("xar", true); @@ -81151,7 +81151,7 @@ var require_tool_cache = __commonJS({ "-Command", pwshCommand ]; - core18.debug(`Using pwsh at path: ${pwshPath}`); + core19.debug(`Using pwsh at path: ${pwshPath}`); yield (0, exec_1.exec)(`"${pwshPath}"`, args); } else { const powershellCommand = [ @@ -81171,7 +81171,7 @@ var require_tool_cache = __commonJS({ powershellCommand ]; const powershellPath = yield io7.which("powershell", true); - core18.debug(`Using powershell at path: ${powershellPath}`); + core19.debug(`Using powershell at path: ${powershellPath}`); yield (0, exec_1.exec)(`"${powershellPath}"`, args); } }); @@ -81180,7 +81180,7 @@ var require_tool_cache = __commonJS({ return __awaiter4(this, void 0, void 0, function* () { const unzipPath = yield io7.which("unzip", true); const args = [file]; - if (!core18.isDebug()) { + if (!core19.isDebug()) { args.unshift("-q"); } args.unshift("-o"); @@ -81191,8 +81191,8 @@ var require_tool_cache = __commonJS({ return __awaiter4(this, void 0, void 0, function* () { version = semver8.clean(version) || version; arch2 = arch2 || os3.arch(); - core18.debug(`Caching tool ${tool} ${version} ${arch2}`); - core18.debug(`source dir: ${sourceDir}`); + core19.debug(`Caching tool ${tool} ${version} ${arch2}`); + core19.debug(`source dir: ${sourceDir}`); if (!fs20.statSync(sourceDir).isDirectory()) { throw new Error("sourceDir is not a directory"); } @@ -81210,14 +81210,14 @@ var require_tool_cache = __commonJS({ return __awaiter4(this, void 0, void 0, function* () { version = semver8.clean(version) || version; arch2 = arch2 || os3.arch(); - core18.debug(`Caching tool ${tool} ${version} ${arch2}`); - core18.debug(`source file: ${sourceFile}`); + core19.debug(`Caching tool ${tool} ${version} ${arch2}`); + core19.debug(`source file: ${sourceFile}`); if (!fs20.statSync(sourceFile).isFile()) { throw new Error("sourceFile is not a file"); } const destFolder = yield _createToolPath(tool, version, arch2); const destPath = path19.join(destFolder, targetFile); - core18.debug(`destination file ${destPath}`); + core19.debug(`destination file ${destPath}`); yield io7.cp(sourceFile, destPath); _completeToolPath(tool, version, arch2); return destFolder; @@ -81241,12 +81241,12 @@ var require_tool_cache = __commonJS({ if (versionSpec) { versionSpec = semver8.clean(versionSpec) || ""; const cachePath = path19.join(_getCacheDirectory(), toolName, versionSpec, arch2); - core18.debug(`checking cache: ${cachePath}`); + core19.debug(`checking cache: ${cachePath}`); if (fs20.existsSync(cachePath) && fs20.existsSync(`${cachePath}.complete`)) { - core18.debug(`Found tool in cache ${toolName} ${versionSpec} ${arch2}`); + core19.debug(`Found tool in cache ${toolName} ${versionSpec} ${arch2}`); toolPath = cachePath; } else { - core18.debug("not found"); + core19.debug("not found"); } } return toolPath; @@ -81277,7 +81277,7 @@ var require_tool_cache = __commonJS({ const http = new httpm.HttpClient("tool-cache"); const headers = {}; if (auth) { - core18.debug("set auth"); + core19.debug("set auth"); headers.authorization = auth; } const response = yield http.getJson(treeUrl, headers); @@ -81298,7 +81298,7 @@ var require_tool_cache = __commonJS({ try { releases = JSON.parse(versionsRaw); } catch (_a) { - core18.debug("Invalid json"); + core19.debug("Invalid json"); } } return releases; @@ -81324,7 +81324,7 @@ var require_tool_cache = __commonJS({ function _createToolPath(tool, version, arch2) { return __awaiter4(this, void 0, void 0, function* () { const folderPath = path19.join(_getCacheDirectory(), tool, semver8.clean(version) || version, arch2 || ""); - core18.debug(`destination ${folderPath}`); + core19.debug(`destination ${folderPath}`); const markerPath = `${folderPath}.complete`; yield io7.rmRF(folderPath); yield io7.rmRF(markerPath); @@ -81336,19 +81336,19 @@ var require_tool_cache = __commonJS({ const folderPath = path19.join(_getCacheDirectory(), tool, semver8.clean(version) || version, arch2 || ""); const markerPath = `${folderPath}.complete`; fs20.writeFileSync(markerPath, ""); - core18.debug("finished caching tool"); + core19.debug("finished caching tool"); } function isExplicitVersion(versionSpec) { const c = semver8.clean(versionSpec) || ""; - core18.debug(`isExplicit: ${c}`); + core19.debug(`isExplicit: ${c}`); const valid3 = semver8.valid(c) != null; - core18.debug(`explicit? ${valid3}`); + core19.debug(`explicit? ${valid3}`); return valid3; } exports2.isExplicitVersion = isExplicitVersion; function evaluateVersions(versions, versionSpec) { let version = ""; - core18.debug(`evaluating ${versions.length} versions`); + core19.debug(`evaluating ${versions.length} versions`); versions = versions.sort((a, b) => { if (semver8.gt(a, b)) { return 1; @@ -81364,9 +81364,9 @@ var require_tool_cache = __commonJS({ } } if (version) { - core18.debug(`matched: ${version}`); + core19.debug(`matched: ${version}`); } else { - core18.debug("match not found"); + core19.debug("match not found"); } return version; } @@ -84040,14 +84040,14 @@ var require_retention = __commonJS({ Object.defineProperty(exports2, "__esModule", { value: true }); exports2.getExpiration = void 0; var generated_1 = require_generated(); - var core18 = __importStar4(require_core()); + var core19 = __importStar4(require_core()); function getExpiration(retentionDays) { if (!retentionDays) { return void 0; } const maxRetentionDays = getRetentionDays(); if (maxRetentionDays && maxRetentionDays < retentionDays) { - core18.warning(`Retention days cannot be greater than the maximum allowed retention set within the repository. Using ${maxRetentionDays} instead.`); + core19.warning(`Retention days cannot be greater than the maximum allowed retention set within the repository. Using ${maxRetentionDays} instead.`); retentionDays = maxRetentionDays; } const expirationDate = /* @__PURE__ */ new Date(); @@ -84639,7 +84639,7 @@ var require_util11 = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.getBackendIdsFromToken = void 0; - var core18 = __importStar4(require_core()); + var core19 = __importStar4(require_core()); var config_1 = require_config2(); var jwt_decode_1 = __importDefault4(require_jwt_decode_cjs()); var InvalidJwtError = new Error("Failed to get backend IDs: The provided JWT token is invalid and/or missing claims"); @@ -84665,8 +84665,8 @@ var require_util11 = __commonJS({ workflowRunBackendId: scopeParts[1], workflowJobRunBackendId: scopeParts[2] }; - core18.debug(`Workflow Run Backend ID: ${ids.workflowRunBackendId}`); - core18.debug(`Workflow Job Run Backend ID: ${ids.workflowJobRunBackendId}`); + core19.debug(`Workflow Run Backend ID: ${ids.workflowRunBackendId}`); + core19.debug(`Workflow Job Run Backend ID: ${ids.workflowJobRunBackendId}`); return ids; } throw InvalidJwtError; @@ -84737,7 +84737,7 @@ var require_blob_upload = __commonJS({ exports2.uploadZipToBlobStorage = void 0; var storage_blob_1 = require_dist7(); var config_1 = require_config2(); - var core18 = __importStar4(require_core()); + var core19 = __importStar4(require_core()); var crypto = __importStar4(require("crypto")); var stream2 = __importStar4(require("stream")); var errors_1 = require_errors3(); @@ -84763,9 +84763,9 @@ var require_blob_upload = __commonJS({ const bufferSize = (0, config_1.getUploadChunkSize)(); const blobClient = new storage_blob_1.BlobClient(authenticatedUploadURL); const blockBlobClient = blobClient.getBlockBlobClient(); - core18.debug(`Uploading artifact zip to blob storage with maxConcurrency: ${maxConcurrency}, bufferSize: ${bufferSize}`); + core19.debug(`Uploading artifact zip to blob storage with maxConcurrency: ${maxConcurrency}, bufferSize: ${bufferSize}`); const uploadCallback = (progress) => { - core18.info(`Uploaded bytes ${progress.loadedBytes}`); + core19.info(`Uploaded bytes ${progress.loadedBytes}`); uploadByteCount = progress.loadedBytes; lastProgressTime = Date.now(); }; @@ -84779,7 +84779,7 @@ var require_blob_upload = __commonJS({ const hashStream = crypto.createHash("sha256"); zipUploadStream.pipe(uploadStream); zipUploadStream.pipe(hashStream).setEncoding("hex"); - core18.info("Beginning upload of artifact content to blob storage"); + core19.info("Beginning upload of artifact content to blob storage"); try { yield Promise.race([ blockBlobClient.uploadStream(uploadStream, bufferSize, maxConcurrency, options), @@ -84793,12 +84793,12 @@ var require_blob_upload = __commonJS({ } finally { abortController.abort(); } - core18.info("Finished uploading artifact content to blob storage!"); + core19.info("Finished uploading artifact content to blob storage!"); hashStream.end(); sha256Hash = hashStream.read(); - core18.info(`SHA256 hash of uploaded artifact zip is ${sha256Hash}`); + core19.info(`SHA256 hash of uploaded artifact zip is ${sha256Hash}`); if (uploadByteCount === 0) { - core18.warning(`No data was uploaded to blob storage. Reported upload byte count is 0.`); + core19.warning(`No data was uploaded to blob storage. Reported upload byte count is 0.`); } return { uploadSize: uploadByteCount, @@ -109676,7 +109676,7 @@ var require_zip2 = __commonJS({ var stream2 = __importStar4(require("stream")); var promises_1 = require("fs/promises"); var archiver2 = __importStar4(require_archiver()); - var core18 = __importStar4(require_core()); + var core19 = __importStar4(require_core()); var config_1 = require_config2(); exports2.DEFAULT_COMPRESSION_LEVEL = 6; var ZipUploadStream = class extends stream2.Transform { @@ -109693,7 +109693,7 @@ var require_zip2 = __commonJS({ exports2.ZipUploadStream = ZipUploadStream; function createZipUploadStream(uploadSpecification, compressionLevel = exports2.DEFAULT_COMPRESSION_LEVEL) { return __awaiter4(this, void 0, void 0, function* () { - core18.debug(`Creating Artifact archive with compressionLevel: ${compressionLevel}`); + core19.debug(`Creating Artifact archive with compressionLevel: ${compressionLevel}`); const zip = archiver2.create("zip", { highWaterMark: (0, config_1.getUploadChunkSize)(), zlib: { level: compressionLevel } @@ -109717,8 +109717,8 @@ var require_zip2 = __commonJS({ } const bufferSize = (0, config_1.getUploadChunkSize)(); const zipUploadStream = new ZipUploadStream(bufferSize); - core18.debug(`Zip write high watermark value ${zipUploadStream.writableHighWaterMark}`); - core18.debug(`Zip read high watermark value ${zipUploadStream.readableHighWaterMark}`); + core19.debug(`Zip write high watermark value ${zipUploadStream.writableHighWaterMark}`); + core19.debug(`Zip read high watermark value ${zipUploadStream.readableHighWaterMark}`); zip.pipe(zipUploadStream); zip.finalize(); return zipUploadStream; @@ -109726,24 +109726,24 @@ var require_zip2 = __commonJS({ } exports2.createZipUploadStream = createZipUploadStream; var zipErrorCallback = (error2) => { - core18.error("An error has occurred while creating the zip file for upload"); - core18.info(error2); + core19.error("An error has occurred while creating the zip file for upload"); + core19.info(error2); throw new Error("An error has occurred during zip creation for the artifact"); }; var zipWarningCallback = (error2) => { if (error2.code === "ENOENT") { - core18.warning("ENOENT warning during artifact zip creation. No such file or directory"); - core18.info(error2); + core19.warning("ENOENT warning during artifact zip creation. No such file or directory"); + core19.info(error2); } else { - core18.warning(`A non-blocking warning has occurred during artifact zip creation: ${error2.code}`); - core18.info(error2); + core19.warning(`A non-blocking warning has occurred during artifact zip creation: ${error2.code}`); + core19.info(error2); } }; var zipFinishCallback = () => { - core18.debug("Zip stream for upload has finished."); + core19.debug("Zip stream for upload has finished."); }; var zipEndCallback = () => { - core18.debug("Zip stream for upload has ended."); + core19.debug("Zip stream for upload has ended."); }; } }); @@ -109808,7 +109808,7 @@ var require_upload_artifact = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.uploadArtifact = void 0; - var core18 = __importStar4(require_core()); + var core19 = __importStar4(require_core()); var retention_1 = require_retention(); var path_and_artifact_name_validation_1 = require_path_and_artifact_name_validation(); var artifact_twirp_client_1 = require_artifact_twirp_client2(); @@ -109855,13 +109855,13 @@ var require_upload_artifact = __commonJS({ value: `sha256:${uploadResult.sha256Hash}` }); } - core18.info(`Finalizing artifact upload`); + core19.info(`Finalizing artifact upload`); const finalizeArtifactResp = yield artifactClient.FinalizeArtifact(finalizeArtifactReq); if (!finalizeArtifactResp.ok) { throw new errors_1.InvalidResponseError("FinalizeArtifact: response from backend was not ok"); } const artifactId = BigInt(finalizeArtifactResp.artifactId); - core18.info(`Artifact ${name}.zip successfully finalized. Artifact ID ${artifactId}`); + core19.info(`Artifact ${name}.zip successfully finalized. Artifact ID ${artifactId}`); return { size: uploadResult.uploadSize, digest: uploadResult.sha256Hash, @@ -117009,7 +117009,7 @@ var require_download_artifact = __commonJS({ var crypto = __importStar4(require("crypto")); var stream2 = __importStar4(require("stream")); var github3 = __importStar4(require_github2()); - var core18 = __importStar4(require_core()); + var core19 = __importStar4(require_core()); var httpClient = __importStar4(require_lib()); var unzip_stream_1 = __importDefault4(require_unzip()); var user_agent_1 = require_user_agent2(); @@ -117045,7 +117045,7 @@ var require_download_artifact = __commonJS({ return yield streamExtractExternal(url2, directory); } catch (error2) { retryCount++; - core18.debug(`Failed to download artifact after ${retryCount} retries due to ${error2.message}. Retrying in 5 seconds...`); + core19.debug(`Failed to download artifact after ${retryCount} retries due to ${error2.message}. Retrying in 5 seconds...`); yield new Promise((resolve8) => setTimeout(resolve8, 5e3)); } } @@ -117074,7 +117074,7 @@ var require_download_artifact = __commonJS({ extractStream.on("data", () => { timer.refresh(); }).on("error", (error2) => { - core18.debug(`response.message: Artifact download failed: ${error2.message}`); + core19.debug(`response.message: Artifact download failed: ${error2.message}`); clearTimeout(timer); reject(error2); }).pipe(unzip_stream_1.default.Extract({ path: directory })).on("close", () => { @@ -117082,7 +117082,7 @@ var require_download_artifact = __commonJS({ if (hashStream) { hashStream.end(); sha256Digest = hashStream.read(); - core18.info(`SHA256 digest of downloaded artifact is ${sha256Digest}`); + core19.info(`SHA256 digest of downloaded artifact is ${sha256Digest}`); } resolve8({ sha256Digest: `sha256:${sha256Digest}` }); }).on("error", (error2) => { @@ -117097,7 +117097,7 @@ var require_download_artifact = __commonJS({ const downloadPath = yield resolveOrCreateDirectory(options === null || options === void 0 ? void 0 : options.path); const api = github3.getOctokit(token); let digestMismatch = false; - core18.info(`Downloading artifact '${artifactId}' from '${repositoryOwner}/${repositoryName}'`); + core19.info(`Downloading artifact '${artifactId}' from '${repositoryOwner}/${repositoryName}'`); const { headers, status } = yield api.rest.actions.downloadArtifact({ owner: repositoryOwner, repo: repositoryName, @@ -117114,16 +117114,16 @@ var require_download_artifact = __commonJS({ if (!location) { throw new Error(`Unable to redirect to artifact download url`); } - core18.info(`Redirecting to blob download url: ${scrubQueryParameters(location)}`); + core19.info(`Redirecting to blob download url: ${scrubQueryParameters(location)}`); try { - core18.info(`Starting download of artifact to: ${downloadPath}`); + core19.info(`Starting download of artifact to: ${downloadPath}`); const extractResponse = yield streamExtract(location, downloadPath); - core18.info(`Artifact download completed successfully.`); + core19.info(`Artifact download completed successfully.`); if (options === null || options === void 0 ? void 0 : options.expectedHash) { if ((options === null || options === void 0 ? void 0 : options.expectedHash) !== extractResponse.sha256Digest) { digestMismatch = true; - core18.debug(`Computed digest: ${extractResponse.sha256Digest}`); - core18.debug(`Expected digest: ${options.expectedHash}`); + core19.debug(`Computed digest: ${extractResponse.sha256Digest}`); + core19.debug(`Expected digest: ${options.expectedHash}`); } } } catch (error2) { @@ -117150,7 +117150,7 @@ var require_download_artifact = __commonJS({ Are you trying to download from a different run? Try specifying a github-token with \`actions:read\` scope.`); } if (artifacts.length > 1) { - core18.warning("Multiple artifacts found, defaulting to first."); + core19.warning("Multiple artifacts found, defaulting to first."); } const signedReq = { workflowRunBackendId: artifacts[0].workflowRunBackendId, @@ -117158,16 +117158,16 @@ Are you trying to download from a different run? Try specifying a github-token w name: artifacts[0].name }; const { signedUrl } = yield artifactClient.GetSignedArtifactURL(signedReq); - core18.info(`Redirecting to blob download url: ${scrubQueryParameters(signedUrl)}`); + core19.info(`Redirecting to blob download url: ${scrubQueryParameters(signedUrl)}`); try { - core18.info(`Starting download of artifact to: ${downloadPath}`); + core19.info(`Starting download of artifact to: ${downloadPath}`); const extractResponse = yield streamExtract(signedUrl, downloadPath); - core18.info(`Artifact download completed successfully.`); + core19.info(`Artifact download completed successfully.`); if (options === null || options === void 0 ? void 0 : options.expectedHash) { if ((options === null || options === void 0 ? void 0 : options.expectedHash) !== extractResponse.sha256Digest) { digestMismatch = true; - core18.debug(`Computed digest: ${extractResponse.sha256Digest}`); - core18.debug(`Expected digest: ${options.expectedHash}`); + core19.debug(`Computed digest: ${extractResponse.sha256Digest}`); + core19.debug(`Expected digest: ${options.expectedHash}`); } } } catch (error2) { @@ -117180,10 +117180,10 @@ Are you trying to download from a different run? Try specifying a github-token w function resolveOrCreateDirectory(downloadPath = (0, config_1.getGitHubWorkspaceDir)()) { return __awaiter4(this, void 0, void 0, function* () { if (!(yield exists(downloadPath))) { - core18.debug(`Artifact destination folder does not exist, creating: ${downloadPath}`); + core19.debug(`Artifact destination folder does not exist, creating: ${downloadPath}`); yield promises_1.default.mkdir(downloadPath, { recursive: true }); } else { - core18.debug(`Artifact destination folder already exists: ${downloadPath}`); + core19.debug(`Artifact destination folder already exists: ${downloadPath}`); } return downloadPath; }); @@ -117224,7 +117224,7 @@ var require_retry_options = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.getRetryOptions = void 0; - var core18 = __importStar4(require_core()); + var core19 = __importStar4(require_core()); var defaultMaxRetryNumber = 5; var defaultExemptStatusCodes = [400, 401, 403, 404, 422]; function getRetryOptions(defaultOptions, retries = defaultMaxRetryNumber, exemptStatusCodes = defaultExemptStatusCodes) { @@ -117239,7 +117239,7 @@ var require_retry_options = __commonJS({ retryOptions.doNotRetry = exemptStatusCodes; } const requestOptions = Object.assign(Object.assign({}, defaultOptions.request), { retries }); - core18.debug(`GitHub client configured with: (retries: ${requestOptions.retries}, retry-exempt-status-code: ${(_a = retryOptions.doNotRetry) !== null && _a !== void 0 ? _a : "octokit default: [400, 401, 403, 404, 422]"})`); + core19.debug(`GitHub client configured with: (retries: ${requestOptions.retries}, retry-exempt-status-code: ${(_a = retryOptions.doNotRetry) !== null && _a !== void 0 ? _a : "octokit default: [400, 401, 403, 404, 422]"})`); return [retryOptions, requestOptions]; } exports2.getRetryOptions = getRetryOptions; @@ -117396,7 +117396,7 @@ var require_get_artifact = __commonJS({ exports2.getArtifactInternal = exports2.getArtifactPublic = void 0; var github_1 = require_github2(); var plugin_retry_1 = require_dist_node25(); - var core18 = __importStar4(require_core()); + var core19 = __importStar4(require_core()); var utils_1 = require_utils13(); var retry_options_1 = require_retry_options(); var plugin_request_log_1 = require_dist_node24(); @@ -117434,7 +117434,7 @@ var require_get_artifact = __commonJS({ let artifact2 = getArtifactResp.data.artifacts[0]; if (getArtifactResp.data.artifacts.length > 1) { artifact2 = getArtifactResp.data.artifacts.sort((a, b) => b.id - a.id)[0]; - core18.debug(`More than one artifact found for a single name, returning newest (id: ${artifact2.id})`); + core19.debug(`More than one artifact found for a single name, returning newest (id: ${artifact2.id})`); } return { artifact: { @@ -117467,7 +117467,7 @@ var require_get_artifact = __commonJS({ let artifact2 = res.artifacts[0]; if (res.artifacts.length > 1) { artifact2 = res.artifacts.sort((a, b) => Number(b.databaseId) - Number(a.databaseId))[0]; - core18.debug(`More than one artifact found for a single name, returning newest (id: ${artifact2.databaseId})`); + core19.debug(`More than one artifact found for a single name, returning newest (id: ${artifact2.databaseId})`); } return { artifact: { @@ -119415,7 +119415,7 @@ var require_requestUtils2 = __commonJS({ Object.defineProperty(exports2, "__esModule", { value: true }); exports2.retryHttpClientRequest = exports2.retry = void 0; var utils_1 = require_utils14(); - var core18 = __importStar4(require_core()); + var core19 = __importStar4(require_core()); var config_variables_1 = require_config_variables(); function retry3(name, operation, customErrorMessages, maxAttempts) { return __awaiter4(this, void 0, void 0, function* () { @@ -119442,13 +119442,13 @@ var require_requestUtils2 = __commonJS({ errorMessage = error2.message; } if (!isRetryable) { - core18.info(`${name} - Error is not retryable`); + core19.info(`${name} - Error is not retryable`); if (response) { (0, utils_1.displayHttpDiagnostics)(response); } break; } - core18.info(`${name} - Attempt ${attempt} of ${maxAttempts} failed with error: ${errorMessage}`); + core19.info(`${name} - Attempt ${attempt} of ${maxAttempts} failed with error: ${errorMessage}`); yield (0, utils_1.sleep)((0, utils_1.getExponentialRetryTimeInMilliseconds)(attempt)); attempt++; } @@ -119532,7 +119532,7 @@ var require_upload_http_client = __commonJS({ Object.defineProperty(exports2, "__esModule", { value: true }); exports2.UploadHttpClient = void 0; var fs20 = __importStar4(require("fs")); - var core18 = __importStar4(require_core()); + var core19 = __importStar4(require_core()); var tmp = __importStar4(require_tmp_promise()); var stream2 = __importStar4(require("stream")); var utils_1 = require_utils14(); @@ -119597,7 +119597,7 @@ var require_upload_http_client = __commonJS({ return __awaiter4(this, void 0, void 0, function* () { const FILE_CONCURRENCY = (0, config_variables_1.getUploadFileConcurrency)(); const MAX_CHUNK_SIZE = (0, config_variables_1.getUploadChunkSize)(); - core18.debug(`File Concurrency: ${FILE_CONCURRENCY}, and Chunk Size: ${MAX_CHUNK_SIZE}`); + core19.debug(`File Concurrency: ${FILE_CONCURRENCY}, and Chunk Size: ${MAX_CHUNK_SIZE}`); const parameters = []; let continueOnError = true; if (options) { @@ -119634,15 +119634,15 @@ var require_upload_http_client = __commonJS({ } const startTime = perf_hooks_1.performance.now(); const uploadFileResult = yield this.uploadFileAsync(index, currentFileParameters); - if (core18.isDebug()) { - core18.debug(`File: ${++completedFiles}/${filesToUpload.length}. ${currentFileParameters.file} took ${(perf_hooks_1.performance.now() - startTime).toFixed(3)} milliseconds to finish upload`); + if (core19.isDebug()) { + core19.debug(`File: ${++completedFiles}/${filesToUpload.length}. ${currentFileParameters.file} took ${(perf_hooks_1.performance.now() - startTime).toFixed(3)} milliseconds to finish upload`); } uploadFileSize += uploadFileResult.successfulUploadSize; totalFileSize += uploadFileResult.totalSize; if (uploadFileResult.isSuccess === false) { failedItemsToReport.push(currentFileParameters.file); if (!continueOnError) { - core18.error(`aborting artifact upload`); + core19.error(`aborting artifact upload`); abortPendingFileUploads = true; } } @@ -119651,7 +119651,7 @@ var require_upload_http_client = __commonJS({ }))); this.statusReporter.stop(); this.uploadHttpManager.disposeAndReplaceAllClients(); - core18.info(`Total size of all the files uploaded is ${uploadFileSize} bytes`); + core19.info(`Total size of all the files uploaded is ${uploadFileSize} bytes`); return { uploadSize: uploadFileSize, totalSize: totalFileSize, @@ -119677,16 +119677,16 @@ var require_upload_http_client = __commonJS({ let uploadFileSize = 0; let isGzip = true; if (!isFIFO && totalFileSize < 65536) { - core18.debug(`${parameters.file} is less than 64k in size. Creating a gzip file in-memory to potentially reduce the upload size`); + core19.debug(`${parameters.file} is less than 64k in size. Creating a gzip file in-memory to potentially reduce the upload size`); const buffer = yield (0, upload_gzip_1.createGZipFileInBuffer)(parameters.file); let openUploadStream; if (totalFileSize < buffer.byteLength) { - core18.debug(`The gzip file created for ${parameters.file} did not help with reducing the size of the file. The original file will be uploaded as-is`); + core19.debug(`The gzip file created for ${parameters.file} did not help with reducing the size of the file. The original file will be uploaded as-is`); openUploadStream = () => fs20.createReadStream(parameters.file); isGzip = false; uploadFileSize = totalFileSize; } else { - core18.debug(`A gzip file created for ${parameters.file} helped with reducing the size of the original file. The file will be uploaded using gzip.`); + core19.debug(`A gzip file created for ${parameters.file} helped with reducing the size of the original file. The file will be uploaded using gzip.`); openUploadStream = () => { const passThrough = new stream2.PassThrough(); passThrough.end(buffer); @@ -119698,7 +119698,7 @@ var require_upload_http_client = __commonJS({ if (!result) { isUploadSuccessful = false; failedChunkSizes += uploadFileSize; - core18.warning(`Aborting upload for ${parameters.file} due to failure`); + core19.warning(`Aborting upload for ${parameters.file} due to failure`); } return { isSuccess: isUploadSuccessful, @@ -119707,16 +119707,16 @@ var require_upload_http_client = __commonJS({ }; } else { const tempFile = yield tmp.file(); - core18.debug(`${parameters.file} is greater than 64k in size. Creating a gzip file on-disk ${tempFile.path} to potentially reduce the upload size`); + core19.debug(`${parameters.file} is greater than 64k in size. Creating a gzip file on-disk ${tempFile.path} to potentially reduce the upload size`); uploadFileSize = yield (0, upload_gzip_1.createGZipFileOnDisk)(parameters.file, tempFile.path); let uploadFilePath = tempFile.path; if (!isFIFO && totalFileSize < uploadFileSize) { - core18.debug(`The gzip file created for ${parameters.file} did not help with reducing the size of the file. The original file will be uploaded as-is`); + core19.debug(`The gzip file created for ${parameters.file} did not help with reducing the size of the file. The original file will be uploaded as-is`); uploadFileSize = totalFileSize; uploadFilePath = parameters.file; isGzip = false; } else { - core18.debug(`The gzip file created for ${parameters.file} is smaller than the original file. The file will be uploaded using gzip.`); + core19.debug(`The gzip file created for ${parameters.file} is smaller than the original file. The file will be uploaded using gzip.`); } let abortFileUpload = false; while (offset < uploadFileSize) { @@ -119736,7 +119736,7 @@ var require_upload_http_client = __commonJS({ if (!result) { isUploadSuccessful = false; failedChunkSizes += chunkSize; - core18.warning(`Aborting upload for ${parameters.file} due to failure`); + core19.warning(`Aborting upload for ${parameters.file} due to failure`); abortFileUpload = true; } else { if (uploadFileSize > 8388608) { @@ -119744,7 +119744,7 @@ var require_upload_http_client = __commonJS({ } } } - core18.debug(`deleting temporary gzip file ${tempFile.path}`); + core19.debug(`deleting temporary gzip file ${tempFile.path}`); yield tempFile.cleanup(); return { isSuccess: isUploadSuccessful, @@ -119783,7 +119783,7 @@ var require_upload_http_client = __commonJS({ if (response) { (0, utils_1.displayHttpDiagnostics)(response); } - core18.info(`Retry limit has been reached for chunk at offset ${start} to ${resourceUrl}`); + core19.info(`Retry limit has been reached for chunk at offset ${start} to ${resourceUrl}`); return true; } return false; @@ -119791,14 +119791,14 @@ var require_upload_http_client = __commonJS({ const backOff = (retryAfterValue) => __awaiter4(this, void 0, void 0, function* () { this.uploadHttpManager.disposeAndReplaceClient(httpClientIndex); if (retryAfterValue) { - core18.info(`Backoff due to too many requests, retry #${retryCount}. Waiting for ${retryAfterValue} milliseconds before continuing the upload`); + core19.info(`Backoff due to too many requests, retry #${retryCount}. Waiting for ${retryAfterValue} milliseconds before continuing the upload`); yield (0, utils_1.sleep)(retryAfterValue); } else { const backoffTime = (0, utils_1.getExponentialRetryTimeInMilliseconds)(retryCount); - core18.info(`Exponential backoff for retry #${retryCount}. Waiting for ${backoffTime} milliseconds before continuing the upload at offset ${start}`); + core19.info(`Exponential backoff for retry #${retryCount}. Waiting for ${backoffTime} milliseconds before continuing the upload at offset ${start}`); yield (0, utils_1.sleep)(backoffTime); } - core18.info(`Finished backoff for retry #${retryCount}, continuing with upload`); + core19.info(`Finished backoff for retry #${retryCount}, continuing with upload`); return; }); while (retryCount <= retryLimit) { @@ -119806,7 +119806,7 @@ var require_upload_http_client = __commonJS({ try { response = yield uploadChunkRequest(); } catch (error2) { - core18.info(`An error has been caught http-client index ${httpClientIndex}, retrying the upload`); + core19.info(`An error has been caught http-client index ${httpClientIndex}, retrying the upload`); console.log(error2); if (incrementAndCheckRetryLimit()) { return false; @@ -119818,13 +119818,13 @@ var require_upload_http_client = __commonJS({ if ((0, utils_1.isSuccessStatusCode)(response.message.statusCode)) { return true; } else if ((0, utils_1.isRetryableStatusCode)(response.message.statusCode)) { - core18.info(`A ${response.message.statusCode} status code has been received, will attempt to retry the upload`); + core19.info(`A ${response.message.statusCode} status code has been received, will attempt to retry the upload`); if (incrementAndCheckRetryLimit(response)) { return false; } (0, utils_1.isThrottledStatusCode)(response.message.statusCode) ? yield backOff((0, utils_1.tryGetRetryAfterValueTimeInMilliseconds)(response.message.headers)) : yield backOff(); } else { - core18.error(`Unexpected response. Unable to upload chunk to ${resourceUrl}`); + core19.error(`Unexpected response. Unable to upload chunk to ${resourceUrl}`); (0, utils_1.displayHttpDiagnostics)(response); return false; } @@ -119842,7 +119842,7 @@ var require_upload_http_client = __commonJS({ resourceUrl.searchParams.append("artifactName", artifactName); const parameters = { Size: size }; const data = JSON.stringify(parameters, null, 2); - core18.debug(`URL is ${resourceUrl.toString()}`); + core19.debug(`URL is ${resourceUrl.toString()}`); const client = this.uploadHttpManager.getClient(0); const headers = (0, utils_1.getUploadHeaders)("application/json", false); const customErrorMessages = /* @__PURE__ */ new Map([ @@ -119855,7 +119855,7 @@ var require_upload_http_client = __commonJS({ return client.patch(resourceUrl.toString(), data, headers); }), customErrorMessages); yield response.readBody(); - core18.debug(`Artifact ${artifactName} has been successfully uploaded, total size in bytes: ${size}`); + core19.debug(`Artifact ${artifactName} has been successfully uploaded, total size in bytes: ${size}`); }); } }; @@ -119924,7 +119924,7 @@ var require_download_http_client = __commonJS({ Object.defineProperty(exports2, "__esModule", { value: true }); exports2.DownloadHttpClient = void 0; var fs20 = __importStar4(require("fs")); - var core18 = __importStar4(require_core()); + var core19 = __importStar4(require_core()); var zlib3 = __importStar4(require("zlib")); var utils_1 = require_utils14(); var url_1 = require("url"); @@ -119978,11 +119978,11 @@ var require_download_http_client = __commonJS({ downloadSingleArtifact(downloadItems) { return __awaiter4(this, void 0, void 0, function* () { const DOWNLOAD_CONCURRENCY = (0, config_variables_1.getDownloadFileConcurrency)(); - core18.debug(`Download file concurrency is set to ${DOWNLOAD_CONCURRENCY}`); + core19.debug(`Download file concurrency is set to ${DOWNLOAD_CONCURRENCY}`); const parallelDownloads = [...new Array(DOWNLOAD_CONCURRENCY).keys()]; let currentFile = 0; let downloadedFiles = 0; - core18.info(`Total number of files that will be downloaded: ${downloadItems.length}`); + core19.info(`Total number of files that will be downloaded: ${downloadItems.length}`); this.statusReporter.setTotalNumberOfFilesToProcess(downloadItems.length); this.statusReporter.start(); yield Promise.all(parallelDownloads.map((index) => __awaiter4(this, void 0, void 0, function* () { @@ -119991,8 +119991,8 @@ var require_download_http_client = __commonJS({ currentFile += 1; const startTime = perf_hooks_1.performance.now(); yield this.downloadIndividualFile(index, currentFileToDownload.sourceLocation, currentFileToDownload.targetPath); - if (core18.isDebug()) { - core18.debug(`File: ${++downloadedFiles}/${downloadItems.length}. ${currentFileToDownload.targetPath} took ${(perf_hooks_1.performance.now() - startTime).toFixed(3)} milliseconds to finish downloading`); + if (core19.isDebug()) { + core19.debug(`File: ${++downloadedFiles}/${downloadItems.length}. ${currentFileToDownload.targetPath} took ${(perf_hooks_1.performance.now() - startTime).toFixed(3)} milliseconds to finish downloading`); } this.statusReporter.incrementProcessedCount(); } @@ -120030,19 +120030,19 @@ var require_download_http_client = __commonJS({ } else { this.downloadHttpManager.disposeAndReplaceClient(httpClientIndex); if (retryAfterValue) { - core18.info(`Backoff due to too many requests, retry #${retryCount}. Waiting for ${retryAfterValue} milliseconds before continuing the download`); + core19.info(`Backoff due to too many requests, retry #${retryCount}. Waiting for ${retryAfterValue} milliseconds before continuing the download`); yield (0, utils_1.sleep)(retryAfterValue); } else { const backoffTime = (0, utils_1.getExponentialRetryTimeInMilliseconds)(retryCount); - core18.info(`Exponential backoff for retry #${retryCount}. Waiting for ${backoffTime} milliseconds before continuing the download`); + core19.info(`Exponential backoff for retry #${retryCount}. Waiting for ${backoffTime} milliseconds before continuing the download`); yield (0, utils_1.sleep)(backoffTime); } - core18.info(`Finished backoff for retry #${retryCount}, continuing with download`); + core19.info(`Finished backoff for retry #${retryCount}, continuing with download`); } }); const isAllBytesReceived = (expected, received) => { if (!expected || !received || process.env["ACTIONS_ARTIFACT_SKIP_DOWNLOAD_VALIDATION"]) { - core18.info("Skipping download validation."); + core19.info("Skipping download validation."); return true; } return parseInt(expected) === received; @@ -120063,7 +120063,7 @@ var require_download_http_client = __commonJS({ try { response = yield makeDownloadRequest(); } catch (error2) { - core18.info("An error occurred while attempting to download a file"); + core19.info("An error occurred while attempting to download a file"); console.log(error2); yield backOff(); continue; @@ -120083,7 +120083,7 @@ var require_download_http_client = __commonJS({ } } if (forceRetry || (0, utils_1.isRetryableStatusCode)(response.message.statusCode)) { - core18.info(`A ${response.message.statusCode} response code has been received while attempting to download an artifact`); + core19.info(`A ${response.message.statusCode} response code has been received while attempting to download an artifact`); resetDestinationStream(downloadPath); (0, utils_1.isThrottledStatusCode)(response.message.statusCode) ? yield backOff((0, utils_1.tryGetRetryAfterValueTimeInMilliseconds)(response.message.headers)) : yield backOff(); } else { @@ -120105,29 +120105,29 @@ var require_download_http_client = __commonJS({ if (isGzip) { const gunzip = zlib3.createGunzip(); response.message.on("error", (error2) => { - core18.info(`An error occurred while attempting to read the response stream`); + core19.info(`An error occurred while attempting to read the response stream`); gunzip.close(); destinationStream.close(); reject(error2); }).pipe(gunzip).on("error", (error2) => { - core18.info(`An error occurred while attempting to decompress the response stream`); + core19.info(`An error occurred while attempting to decompress the response stream`); destinationStream.close(); reject(error2); }).pipe(destinationStream).on("close", () => { resolve8(); }).on("error", (error2) => { - core18.info(`An error occurred while writing a downloaded file to ${destinationStream.path}`); + core19.info(`An error occurred while writing a downloaded file to ${destinationStream.path}`); reject(error2); }); } else { response.message.on("error", (error2) => { - core18.info(`An error occurred while attempting to read the response stream`); + core19.info(`An error occurred while attempting to read the response stream`); destinationStream.close(); reject(error2); }).pipe(destinationStream).on("close", () => { resolve8(); }).on("error", (error2) => { - core18.info(`An error occurred while writing a downloaded file to ${destinationStream.path}`); + core19.info(`An error occurred while writing a downloaded file to ${destinationStream.path}`); reject(error2); }); } @@ -120266,7 +120266,7 @@ var require_artifact_client = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.DefaultArtifactClient = void 0; - var core18 = __importStar4(require_core()); + var core19 = __importStar4(require_core()); var upload_specification_1 = require_upload_specification(); var upload_http_client_1 = require_upload_http_client(); var utils_1 = require_utils14(); @@ -120287,7 +120287,7 @@ var require_artifact_client = __commonJS({ */ uploadArtifact(name, files, rootDirectory, options) { return __awaiter4(this, void 0, void 0, function* () { - core18.info(`Starting artifact upload + core19.info(`Starting artifact upload For more detailed logs during the artifact upload process, enable step-debugging: https://docs.github.com/actions/monitoring-and-troubleshooting-workflows/enabling-debug-logging#enabling-step-debug-logging`); (0, path_and_artifact_name_validation_1.checkArtifactName)(name); const uploadSpecification = (0, upload_specification_1.getUploadSpecification)(name, rootDirectory, files); @@ -120299,24 +120299,24 @@ For more detailed logs during the artifact upload process, enable step-debugging }; const uploadHttpClient = new upload_http_client_1.UploadHttpClient(); if (uploadSpecification.length === 0) { - core18.warning(`No files found that can be uploaded`); + core19.warning(`No files found that can be uploaded`); } else { const response = yield uploadHttpClient.createArtifactInFileContainer(name, options); if (!response.fileContainerResourceUrl) { - core18.debug(response.toString()); + core19.debug(response.toString()); throw new Error("No URL provided by the Artifact Service to upload an artifact to"); } - core18.debug(`Upload Resource URL: ${response.fileContainerResourceUrl}`); - core18.info(`Container for artifact "${name}" successfully created. Starting upload of file(s)`); + core19.debug(`Upload Resource URL: ${response.fileContainerResourceUrl}`); + core19.info(`Container for artifact "${name}" successfully created. Starting upload of file(s)`); const uploadResult = yield uploadHttpClient.uploadArtifactToFileContainer(response.fileContainerResourceUrl, uploadSpecification, options); - core18.info(`File upload process has finished. Finalizing the artifact upload`); + core19.info(`File upload process has finished. Finalizing the artifact upload`); yield uploadHttpClient.patchArtifactSize(uploadResult.totalSize, name); if (uploadResult.failedItems.length > 0) { - core18.info(`Upload finished. There were ${uploadResult.failedItems.length} items that failed to upload`); + core19.info(`Upload finished. There were ${uploadResult.failedItems.length} items that failed to upload`); } else { - core18.info(`Artifact has been finalized. All files have been successfully uploaded!`); + core19.info(`Artifact has been finalized. All files have been successfully uploaded!`); } - core18.info(` + core19.info(` The raw size of all the files that were specified for upload is ${uploadResult.totalSize} bytes The size of all the files that were uploaded is ${uploadResult.uploadSize} bytes. This takes into account any gzip compression used to reduce the upload size, time and storage @@ -120350,10 +120350,10 @@ Note: The size of downloaded zips can differ significantly from the reported siz path19 = (0, path_1.resolve)(path19); const downloadSpecification = (0, download_specification_1.getDownloadSpecification)(name, items.value, path19, (options === null || options === void 0 ? void 0 : options.createArtifactFolder) || false); if (downloadSpecification.filesToDownload.length === 0) { - core18.info(`No downloadable files were found for the artifact: ${artifactToDownload.name}`); + core19.info(`No downloadable files were found for the artifact: ${artifactToDownload.name}`); } else { yield (0, utils_1.createDirectoriesForArtifact)(downloadSpecification.directoryStructure); - core18.info("Directory structure has been set up for the artifact"); + core19.info("Directory structure has been set up for the artifact"); yield (0, utils_1.createEmptyFilesForArtifact)(downloadSpecification.emptyFilesToCreate); yield downloadHttpClient.downloadSingleArtifact(downloadSpecification.filesToDownload); } @@ -120369,7 +120369,7 @@ Note: The size of downloaded zips can differ significantly from the reported siz const response = []; const artifacts = yield downloadHttpClient.listArtifacts(); if (artifacts.count === 0) { - core18.info("Unable to find any artifacts for the associated workflow"); + core19.info("Unable to find any artifacts for the associated workflow"); return response; } if (!path19) { @@ -120381,11 +120381,11 @@ Note: The size of downloaded zips can differ significantly from the reported siz while (downloadedArtifacts < artifacts.count) { const currentArtifactToDownload = artifacts.value[downloadedArtifacts]; downloadedArtifacts += 1; - core18.info(`starting download of artifact ${currentArtifactToDownload.name} : ${downloadedArtifacts}/${artifacts.count}`); + core19.info(`starting download of artifact ${currentArtifactToDownload.name} : ${downloadedArtifacts}/${artifacts.count}`); const items = yield downloadHttpClient.getContainerItems(currentArtifactToDownload.name, currentArtifactToDownload.fileContainerResourceUrl); const downloadSpecification = (0, download_specification_1.getDownloadSpecification)(currentArtifactToDownload.name, items.value, path19, true); if (downloadSpecification.filesToDownload.length === 0) { - core18.info(`No downloadable files were found for any artifact ${currentArtifactToDownload.name}`); + core19.info(`No downloadable files were found for any artifact ${currentArtifactToDownload.name}`); } else { yield (0, utils_1.createDirectoriesForArtifact)(downloadSpecification.directoryStructure); yield (0, utils_1.createEmptyFilesForArtifact)(downloadSpecification.emptyFilesToCreate); @@ -120451,7 +120451,7 @@ var require_internal_glob_options_helper2 = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.getOptions = void 0; - var core18 = __importStar4(require_core()); + var core19 = __importStar4(require_core()); function getOptions(copy) { const result = { followSymbolicLinks: true, @@ -120463,23 +120463,23 @@ var require_internal_glob_options_helper2 = __commonJS({ if (copy) { if (typeof copy.followSymbolicLinks === "boolean") { result.followSymbolicLinks = copy.followSymbolicLinks; - core18.debug(`followSymbolicLinks '${result.followSymbolicLinks}'`); + core19.debug(`followSymbolicLinks '${result.followSymbolicLinks}'`); } if (typeof copy.implicitDescendants === "boolean") { result.implicitDescendants = copy.implicitDescendants; - core18.debug(`implicitDescendants '${result.implicitDescendants}'`); + core19.debug(`implicitDescendants '${result.implicitDescendants}'`); } if (typeof copy.matchDirectories === "boolean") { result.matchDirectories = copy.matchDirectories; - core18.debug(`matchDirectories '${result.matchDirectories}'`); + core19.debug(`matchDirectories '${result.matchDirectories}'`); } if (typeof copy.omitBrokenSymbolicLinks === "boolean") { result.omitBrokenSymbolicLinks = copy.omitBrokenSymbolicLinks; - core18.debug(`omitBrokenSymbolicLinks '${result.omitBrokenSymbolicLinks}'`); + core19.debug(`omitBrokenSymbolicLinks '${result.omitBrokenSymbolicLinks}'`); } if (typeof copy.excludeHiddenFiles === "boolean") { result.excludeHiddenFiles = copy.excludeHiddenFiles; - core18.debug(`excludeHiddenFiles '${result.excludeHiddenFiles}'`); + core19.debug(`excludeHiddenFiles '${result.excludeHiddenFiles}'`); } } return result; @@ -121163,7 +121163,7 @@ var require_internal_globber2 = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.DefaultGlobber = void 0; - var core18 = __importStar4(require_core()); + var core19 = __importStar4(require_core()); var fs20 = __importStar4(require("fs")); var globOptionsHelper = __importStar4(require_internal_glob_options_helper2()); var path19 = __importStar4(require("path")); @@ -121216,7 +121216,7 @@ var require_internal_globber2 = __commonJS({ } const stack = []; for (const searchPath of patternHelper.getSearchPaths(patterns)) { - core18.debug(`Search path '${searchPath}'`); + core19.debug(`Search path '${searchPath}'`); try { yield __await4(fs20.promises.lstat(searchPath)); } catch (err) { @@ -121291,7 +121291,7 @@ var require_internal_globber2 = __commonJS({ } catch (err) { if (err.code === "ENOENT") { if (options.omitBrokenSymbolicLinks) { - core18.debug(`Broken symlink '${item.path}'`); + core19.debug(`Broken symlink '${item.path}'`); return void 0; } throw new Error(`No information found for the path '${item.path}'. This may indicate a broken symbolic link.`); @@ -121307,7 +121307,7 @@ var require_internal_globber2 = __commonJS({ traversalChain.pop(); } if (traversalChain.some((x) => x === realPath)) { - core18.debug(`Symlink cycle detected for path '${item.path}' and realpath '${realPath}'`); + core19.debug(`Symlink cycle detected for path '${item.path}' and realpath '${realPath}'`); return void 0; } traversalChain.push(realPath); @@ -121400,7 +121400,7 @@ var require_internal_hash_files = __commonJS({ Object.defineProperty(exports2, "__esModule", { value: true }); exports2.hashFiles = void 0; var crypto = __importStar4(require("crypto")); - var core18 = __importStar4(require_core()); + var core19 = __importStar4(require_core()); var fs20 = __importStar4(require("fs")); var stream2 = __importStar4(require("stream")); var util = __importStar4(require("util")); @@ -121409,7 +121409,7 @@ var require_internal_hash_files = __commonJS({ var _a, e_1, _b, _c; var _d; return __awaiter4(this, void 0, void 0, function* () { - const writeDelegate = verbose ? core18.info : core18.debug; + const writeDelegate = verbose ? core19.info : core19.debug; let hasMatch = false; const githubWorkspace = currentWorkspace ? currentWorkspace : (_d = process.env["GITHUB_WORKSPACE"]) !== null && _d !== void 0 ? _d : process.cwd(); const result = crypto.createHash("sha256"); @@ -124412,7 +124412,7 @@ var require_sarif_schema_2_1_0 = __commonJS({ }); // src/init-action-post.ts -var core17 = __toESM(require_core()); +var core18 = __toESM(require_core()); // src/actions-util.ts var fs5 = __toESM(require("fs")); @@ -131581,7 +131581,7 @@ async function createDatabaseBundleCli(codeql, config, language) { // src/init-action-post-helper.ts var fs19 = __toESM(require("fs")); -var core16 = __toESM(require_core()); +var core17 = __toESM(require_core()); var github2 = __toESM(require_github()); // src/status-report.ts @@ -131787,11 +131787,11 @@ async function sendStatusReport(statusReport) { } // src/upload-lib.ts -var fs17 = __toESM(require("fs")); -var path17 = __toESM(require("path")); +var fs18 = __toESM(require("fs")); +var path18 = __toESM(require("path")); var url = __toESM(require("url")); -var import_zlib = __toESM(require("zlib")); -var core14 = __toESM(require_core()); +var import_zlib2 = __toESM(require("zlib")); +var core16 = __toESM(require_core()); var jsonschema2 = __toESM(require_lib2()); // src/fingerprints.ts @@ -132917,8 +132917,143 @@ async function addFingerprints(sarif, sourceRoot, logger) { } // src/init.ts +var core15 = __toESM(require_core()); var toolrunner4 = __toESM(require_toolrunner()); var io6 = __toESM(require_io()); + +// src/workflow.ts +var fs17 = __toESM(require("fs")); +var path17 = __toESM(require("path")); +var import_zlib = __toESM(require("zlib")); +var core14 = __toESM(require_core()); +function toCodedErrors(errors) { + return Object.entries(errors).reduce( + (acc, [code, message]) => { + acc[code] = { message, code }; + return acc; + }, + {} + ); +} +var WorkflowErrors = toCodedErrors({ + MissingPushHook: `Please specify an on.push hook to analyze and see code scanning alerts from the default branch on the Security tab.`, + CheckoutWrongHead: `git checkout HEAD^2 is no longer necessary. Please remove this step as Code Scanning recommends analyzing the merge commit for best results.`, + InconsistentActionVersion: `Not all workflow steps that use \`github/codeql-action\` actions use the same version. Please ensure that all such steps use the same version to avoid compatibility issues.` +}); +async function getWorkflow(logger) { + const maybeWorkflow = process.env["CODE_SCANNING_WORKFLOW_FILE"]; + if (maybeWorkflow) { + logger.debug( + "Using the workflow specified by the CODE_SCANNING_WORKFLOW_FILE environment variable." + ); + return load( + import_zlib.default.gunzipSync(Buffer.from(maybeWorkflow, "base64")).toString() + ); + } + const workflowPath = await getWorkflowAbsolutePath(logger); + return load(fs17.readFileSync(workflowPath, "utf-8")); +} +async function getWorkflowAbsolutePath(logger) { + const relativePath = await getWorkflowRelativePath(); + const absolutePath = path17.join( + getRequiredEnvParam("GITHUB_WORKSPACE"), + relativePath + ); + if (fs17.existsSync(absolutePath)) { + logger.debug( + `Derived the following absolute path for the currently executing workflow: ${absolutePath}.` + ); + return absolutePath; + } + throw new Error( + `Expected to find a code scanning workflow file at ${absolutePath}, but no such file existed. This can happen if the currently running workflow checks out a branch that doesn't contain the corresponding workflow file.` + ); +} +function getStepsCallingAction(job, actionName) { + if (job.uses) { + throw new Error( + `Could not get steps calling ${actionName} since the job calls a reusable workflow.` + ); + } + const steps = job.steps; + if (!Array.isArray(steps)) { + throw new Error( + `Could not get steps calling ${actionName} since job.steps was not an array.` + ); + } + return steps.filter((step) => step.uses?.includes(actionName)); +} +function getInputOrThrow(workflow, jobName, actionName, inputName, matrixVars) { + const preamble = `Could not get ${inputName} input to ${actionName} since`; + if (!workflow.jobs) { + throw new Error(`${preamble} the workflow has no jobs.`); + } + if (!workflow.jobs[jobName]) { + throw new Error(`${preamble} the workflow has no job named ${jobName}.`); + } + const stepsCallingAction = getStepsCallingAction( + workflow.jobs[jobName], + actionName + ); + if (stepsCallingAction.length === 0) { + throw new Error( + `${preamble} the ${jobName} job does not call ${actionName}.` + ); + } else if (stepsCallingAction.length > 1) { + throw new Error( + `${preamble} the ${jobName} job calls ${actionName} multiple times.` + ); + } + let input = stepsCallingAction[0].with?.[inputName]?.toString(); + if (input !== void 0 && matrixVars !== void 0) { + input = input.replace(/\${{\s+/, "${{").replace(/\s+}}/, "}}"); + for (const [key, value] of Object.entries(matrixVars)) { + input = input.replace(`\${{matrix.${key}}}`, value); + } + } + if (input?.includes("${{")) { + throw new Error( + `Could not get ${inputName} input to ${actionName} since it contained an unrecognized dynamic value.` + ); + } + return input; +} +function getAnalyzeActionName() { + if (isInTestMode() || getTestingEnvironment() === "codeql-action-pr-checks") { + return "./analyze"; + } else { + return "github/codeql-action/analyze"; + } +} +function getCategoryInputOrThrow(workflow, jobName, matrixVars) { + return getInputOrThrow( + workflow, + jobName, + getAnalyzeActionName(), + "category", + matrixVars + ); +} +function getUploadInputOrThrow(workflow, jobName, matrixVars) { + return getInputOrThrow( + workflow, + jobName, + getAnalyzeActionName(), + "upload", + matrixVars + ); +} +function getCheckoutPathInputOrThrow(workflow, jobName, matrixVars) { + return getInputOrThrow( + workflow, + jobName, + getAnalyzeActionName(), + "checkout_path", + matrixVars + ) || getRequiredEnvParam("GITHUB_WORKSPACE"); +} + +// src/init.ts async function initCodeQL(toolsInput, apiDetails, tempDir, variant, defaultCliVersion, features, logger) { logger.startGroup("Setup CodeQL tools"); const { @@ -132960,7 +133095,7 @@ function combineSarifFiles(sarifFiles, logger) { for (const sarifFile of sarifFiles) { logger.debug(`Loading SARIF file: ${sarifFile}`); const sarifObject = JSON.parse( - fs17.readFileSync(sarifFile, "utf8") + fs18.readFileSync(sarifFile, "utf8") ); if (combinedSarif.version === null) { combinedSarif.version = sarifObject.version; @@ -133032,7 +133167,7 @@ async function shouldDisableCombineSarifFiles(sarifObjects, githubVersion) { async function combineSarifFilesUsingCLI(sarifFiles, gitHubVersion, features, logger) { logger.info("Combining SARIF files using the CodeQL CLI"); const sarifObjects = sarifFiles.map((sarifFile) => { - return JSON.parse(fs17.readFileSync(sarifFile, "utf8")); + return JSON.parse(fs18.readFileSync(sarifFile, "utf8")); }); const deprecationWarningMessage = gitHubVersion.type === 1 /* GHES */ ? "and will be removed in GitHub Enterprise Server 3.18" : "and will be removed in July 2025"; const deprecationMoreInformationMessage = "For more information, see https://github.blog/changelog/2024-05-06-code-scanning-will-stop-combining-runs-from-a-single-upload"; @@ -133048,7 +133183,7 @@ async function combineSarifFilesUsingCLI(sarifFiles, gitHubVersion, features, lo logger.warning( `Uploading multiple SARIF runs with the same category is deprecated ${deprecationWarningMessage}. Please update your workflow to upload a single run per category. ${deprecationMoreInformationMessage}` ); - core14.exportVariable("CODEQL_MERGE_SARIF_DEPRECATION_WARNING", "true"); + core16.exportVariable("CODEQL_MERGE_SARIF_DEPRECATION_WARNING", "true"); } return combineSarifFiles(sarifFiles, logger); } @@ -133085,14 +133220,14 @@ async function combineSarifFilesUsingCLI(sarifFiles, gitHubVersion, features, lo ); codeQL = initCodeQLResult.codeql; } - const baseTempDir = path17.resolve(tempDir, "combined-sarif"); - fs17.mkdirSync(baseTempDir, { recursive: true }); - const outputDirectory = fs17.mkdtempSync(path17.resolve(baseTempDir, "output-")); - const outputFile = path17.resolve(outputDirectory, "combined-sarif.sarif"); + const baseTempDir = path18.resolve(tempDir, "combined-sarif"); + fs18.mkdirSync(baseTempDir, { recursive: true }); + const outputDirectory = fs18.mkdtempSync(path18.resolve(baseTempDir, "output-")); + const outputFile = path18.resolve(outputDirectory, "combined-sarif.sarif"); await codeQL.mergeResults(sarifFiles, outputFile, { mergeRunsFromEqualCategory: true }); - return JSON.parse(fs17.readFileSync(outputFile, "utf8")); + return JSON.parse(fs18.readFileSync(outputFile, "utf8")); } function populateRunAutomationDetails(sarif, category, analysis_key, environment) { const automationID = getAutomationID2(category, analysis_key, environment); @@ -133121,7 +133256,7 @@ function getAutomationID2(category, analysis_key, environment) { async function uploadPayload(payload, repositoryNwo, logger, analysis) { logger.info("Uploading results"); if (shouldSkipSarifUpload()) { - const payloadSaveFile = path17.join( + const payloadSaveFile = path18.join( getTemporaryDirectory(), `payload-${analysis.kind}.json` ); @@ -133129,7 +133264,7 @@ async function uploadPayload(payload, repositoryNwo, logger, analysis) { `SARIF upload disabled by an environment variable. Saving to ${payloadSaveFile}` ); logger.info(`Payload: ${JSON.stringify(payload, null, 2)}`); - fs17.writeFileSync(payloadSaveFile, JSON.stringify(payload, null, 2)); + fs18.writeFileSync(payloadSaveFile, JSON.stringify(payload, null, 2)); return "dummy-sarif-id"; } const client = getApiClient(); @@ -133147,13 +133282,13 @@ async function uploadPayload(payload, repositoryNwo, logger, analysis) { if (httpError !== void 0) { switch (httpError.status) { case 403: - core14.warning(httpError.message || GENERIC_403_MSG); + core16.warning(httpError.message || GENERIC_403_MSG); break; case 404: - core14.warning(httpError.message || GENERIC_404_MSG); + core16.warning(httpError.message || GENERIC_404_MSG); break; default: - core14.warning(httpError.message); + core16.warning(httpError.message); break; } } @@ -133163,12 +133298,12 @@ async function uploadPayload(payload, repositoryNwo, logger, analysis) { function findSarifFilesInDir(sarifPath, isSarif) { const sarifFiles = []; const walkSarifFiles = (dir) => { - const entries = fs17.readdirSync(dir, { withFileTypes: true }); + const entries = fs18.readdirSync(dir, { withFileTypes: true }); for (const entry of entries) { if (entry.isFile() && isSarif(entry.name)) { - sarifFiles.push(path17.resolve(dir, entry.name)); + sarifFiles.push(path18.resolve(dir, entry.name)); } else if (entry.isDirectory()) { - walkSarifFiles(path17.resolve(dir, entry.name)); + walkSarifFiles(path18.resolve(dir, entry.name)); } } }; @@ -133176,11 +133311,11 @@ function findSarifFilesInDir(sarifPath, isSarif) { return sarifFiles; } function getSarifFilePaths(sarifPath, isSarif) { - if (!fs17.existsSync(sarifPath)) { + if (!fs18.existsSync(sarifPath)) { throw new ConfigurationError(`Path does not exist: ${sarifPath}`); } let sarifFiles; - if (fs17.lstatSync(sarifPath).isDirectory()) { + if (fs18.lstatSync(sarifPath).isDirectory()) { sarifFiles = findSarifFilesInDir(sarifPath, isSarif); if (sarifFiles.length === 0) { throw new ConfigurationError( @@ -133210,7 +133345,7 @@ function countResultsInSarif(sarif) { } function readSarifFile(sarifFilePath) { try { - return JSON.parse(fs17.readFileSync(sarifFilePath, "utf8")); + return JSON.parse(fs18.readFileSync(sarifFilePath, "utf8")); } catch (e) { throw new InvalidSarifUploadError( `Invalid SARIF. JSON syntax error: ${getErrorMessage(e)}` @@ -133279,7 +133414,7 @@ function buildPayload(commitOid, ref, analysisKey, analysisName, zippedSarif, wo payloadObj.base_sha = mergeBaseCommitOid; } else if (process.env.GITHUB_EVENT_PATH) { const githubEvent = JSON.parse( - fs17.readFileSync(process.env.GITHUB_EVENT_PATH, "utf8") + fs18.readFileSync(process.env.GITHUB_EVENT_PATH, "utf8") ); payloadObj.base_ref = `refs/heads/${githubEvent.pull_request.base.ref}`; payloadObj.base_sha = githubEvent.pull_request.base.sha; @@ -133360,7 +133495,7 @@ async function uploadPostProcessedFiles(logger, checkoutPath, uploadTarget, post logger.debug(`Serializing SARIF for upload`); const sarifPayload = JSON.stringify(sarif); logger.debug(`Compressing serialized SARIF`); - const zippedSarif = import_zlib.default.gzipSync(sarifPayload).toString("base64"); + const zippedSarif = import_zlib2.default.gzipSync(sarifPayload).toString("base64"); const checkoutURI = url.pathToFileURL(checkoutPath).href; const payload = buildPayload( await getCommitOid(checkoutPath), @@ -133508,7 +133643,7 @@ function validateUniqueCategory(sarif, sentinelPrefix) { `Aborting upload: only one run of the codeql/analyze or codeql/upload-sarif actions is allowed per job per tool/category. The easiest fix is to specify a unique value for the \`category\` input. If .runs[].automationDetails.id is specified in the sarif file, that will take precedence over your configured \`category\`. Category: (${id ? id : "none"}) Tool: (${tool ? tool : "none"})` ); } - core14.exportVariable(sentinelEnvVar, sentinelEnvVar); + core16.exportVariable(sentinelEnvVar, sentinelEnvVar); } } function sanitize(str2) { @@ -133538,7 +133673,7 @@ function filterAlertsByDiffRange(logger, sarif) { if (!locationUri || locationStartLine === void 0) { return false; } - const locationPath = path17.join(checkoutPath, locationUri).replaceAll(path17.sep, "/"); + const locationPath = path18.join(checkoutPath, locationUri).replaceAll(path18.sep, "/"); return diffRanges.some( (range) => range.path === locationPath && (range.startLine <= locationStartLine && range.endLine >= locationStartLine || range.startLine === 0 && range.endLine === 0) ); @@ -133549,138 +133684,6 @@ function filterAlertsByDiffRange(logger, sarif) { return sarif; } -// src/workflow.ts -var fs18 = __toESM(require("fs")); -var path18 = __toESM(require("path")); -var import_zlib2 = __toESM(require("zlib")); -var core15 = __toESM(require_core()); -function toCodedErrors(errors) { - return Object.entries(errors).reduce( - (acc, [code, message]) => { - acc[code] = { message, code }; - return acc; - }, - {} - ); -} -var WorkflowErrors = toCodedErrors({ - MissingPushHook: `Please specify an on.push hook to analyze and see code scanning alerts from the default branch on the Security tab.`, - CheckoutWrongHead: `git checkout HEAD^2 is no longer necessary. Please remove this step as Code Scanning recommends analyzing the merge commit for best results.`, - InconsistentActionVersion: `Not all workflow steps that use \`github/codeql-action\` actions use the same version. Please ensure that all such steps use the same version to avoid compatibility issues.` -}); -async function getWorkflow(logger) { - const maybeWorkflow = process.env["CODE_SCANNING_WORKFLOW_FILE"]; - if (maybeWorkflow) { - logger.debug( - "Using the workflow specified by the CODE_SCANNING_WORKFLOW_FILE environment variable." - ); - return load( - import_zlib2.default.gunzipSync(Buffer.from(maybeWorkflow, "base64")).toString() - ); - } - const workflowPath = await getWorkflowAbsolutePath(logger); - return load(fs18.readFileSync(workflowPath, "utf-8")); -} -async function getWorkflowAbsolutePath(logger) { - const relativePath = await getWorkflowRelativePath(); - const absolutePath = path18.join( - getRequiredEnvParam("GITHUB_WORKSPACE"), - relativePath - ); - if (fs18.existsSync(absolutePath)) { - logger.debug( - `Derived the following absolute path for the currently executing workflow: ${absolutePath}.` - ); - return absolutePath; - } - throw new Error( - `Expected to find a code scanning workflow file at ${absolutePath}, but no such file existed. This can happen if the currently running workflow checks out a branch that doesn't contain the corresponding workflow file.` - ); -} -function getStepsCallingAction(job, actionName) { - if (job.uses) { - throw new Error( - `Could not get steps calling ${actionName} since the job calls a reusable workflow.` - ); - } - const steps = job.steps; - if (!Array.isArray(steps)) { - throw new Error( - `Could not get steps calling ${actionName} since job.steps was not an array.` - ); - } - return steps.filter((step) => step.uses?.includes(actionName)); -} -function getInputOrThrow(workflow, jobName, actionName, inputName, matrixVars) { - const preamble = `Could not get ${inputName} input to ${actionName} since`; - if (!workflow.jobs) { - throw new Error(`${preamble} the workflow has no jobs.`); - } - if (!workflow.jobs[jobName]) { - throw new Error(`${preamble} the workflow has no job named ${jobName}.`); - } - const stepsCallingAction = getStepsCallingAction( - workflow.jobs[jobName], - actionName - ); - if (stepsCallingAction.length === 0) { - throw new Error( - `${preamble} the ${jobName} job does not call ${actionName}.` - ); - } else if (stepsCallingAction.length > 1) { - throw new Error( - `${preamble} the ${jobName} job calls ${actionName} multiple times.` - ); - } - let input = stepsCallingAction[0].with?.[inputName]?.toString(); - if (input !== void 0 && matrixVars !== void 0) { - input = input.replace(/\${{\s+/, "${{").replace(/\s+}}/, "}}"); - for (const [key, value] of Object.entries(matrixVars)) { - input = input.replace(`\${{matrix.${key}}}`, value); - } - } - if (input?.includes("${{")) { - throw new Error( - `Could not get ${inputName} input to ${actionName} since it contained an unrecognized dynamic value.` - ); - } - return input; -} -function getAnalyzeActionName() { - if (isInTestMode() || getTestingEnvironment() === "codeql-action-pr-checks") { - return "./analyze"; - } else { - return "github/codeql-action/analyze"; - } -} -function getCategoryInputOrThrow(workflow, jobName, matrixVars) { - return getInputOrThrow( - workflow, - jobName, - getAnalyzeActionName(), - "category", - matrixVars - ); -} -function getUploadInputOrThrow(workflow, jobName, matrixVars) { - return getInputOrThrow( - workflow, - jobName, - getAnalyzeActionName(), - "upload", - matrixVars - ); -} -function getCheckoutPathInputOrThrow(workflow, jobName, matrixVars) { - return getInputOrThrow( - workflow, - jobName, - getAnalyzeActionName(), - "checkout_path", - matrixVars - ) || getRequiredEnvParam("GITHUB_WORKSPACE"); -} - // src/init-action-post-helper.ts function createFailedUploadFailedSarifResult(error2) { const wrappedError = wrapError(error2); @@ -133731,7 +133734,7 @@ async function maybeUploadFailedSarif(config, repositoryNwo, features, logger) { } async function tryUploadSarifIfRunFailed(config, repositoryNwo, features, logger) { if (process.env["CODEQL_ACTION_ANALYZE_DID_COMPLETE_SUCCESSFULLY" /* ANALYZE_DID_COMPLETE_SUCCESSFULLY */] !== "true") { - core16.exportVariable( + core17.exportVariable( "CODEQL_ACTION_JOB_STATUS" /* JOB_STATUS */, process.env["CODEQL_ACTION_JOB_STATUS" /* JOB_STATUS */] ?? "JOB_STATUS_CONFIGURATION_ERROR" /* ConfigErrorStatus */ ); @@ -133754,7 +133757,7 @@ async function tryUploadSarifIfRunFailed(config, repositoryNwo, features, logger return createFailedUploadFailedSarifResult(e); } } else { - core16.exportVariable( + core17.exportVariable( "CODEQL_ACTION_JOB_STATUS" /* JOB_STATUS */, process.env["CODEQL_ACTION_JOB_STATUS" /* JOB_STATUS */] ?? "JOB_STATUS_SUCCESS" /* SuccessStatus */ ); @@ -133932,7 +133935,7 @@ async function runWrapper() { } } catch (unwrappedError) { const error2 = wrapError(unwrappedError); - core17.setFailed(error2.message); + core18.setFailed(error2.message); const statusReportBase2 = await createStatusReportBase( "init-post" /* InitPost */, getActionsStatus(error2), diff --git a/lib/init-action.js b/lib/init-action.js index f5a537666b..103f85087c 100644 --- a/lib/init-action.js +++ b/lib/init-action.js @@ -35463,7 +35463,7 @@ var require_internal_glob_options_helper = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.getOptions = void 0; - var core14 = __importStar4(require_core()); + var core15 = __importStar4(require_core()); function getOptions(copy) { const result = { followSymbolicLinks: true, @@ -35473,15 +35473,15 @@ var require_internal_glob_options_helper = __commonJS({ if (copy) { if (typeof copy.followSymbolicLinks === "boolean") { result.followSymbolicLinks = copy.followSymbolicLinks; - core14.debug(`followSymbolicLinks '${result.followSymbolicLinks}'`); + core15.debug(`followSymbolicLinks '${result.followSymbolicLinks}'`); } if (typeof copy.implicitDescendants === "boolean") { result.implicitDescendants = copy.implicitDescendants; - core14.debug(`implicitDescendants '${result.implicitDescendants}'`); + core15.debug(`implicitDescendants '${result.implicitDescendants}'`); } if (typeof copy.omitBrokenSymbolicLinks === "boolean") { result.omitBrokenSymbolicLinks = copy.omitBrokenSymbolicLinks; - core14.debug(`omitBrokenSymbolicLinks '${result.omitBrokenSymbolicLinks}'`); + core15.debug(`omitBrokenSymbolicLinks '${result.omitBrokenSymbolicLinks}'`); } } return result; @@ -36935,7 +36935,7 @@ var require_internal_globber = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.DefaultGlobber = void 0; - var core14 = __importStar4(require_core()); + var core15 = __importStar4(require_core()); var fs18 = __importStar4(require("fs")); var globOptionsHelper = __importStar4(require_internal_glob_options_helper()); var path20 = __importStar4(require("path")); @@ -36986,7 +36986,7 @@ var require_internal_globber = __commonJS({ } const stack = []; for (const searchPath of patternHelper.getSearchPaths(patterns)) { - core14.debug(`Search path '${searchPath}'`); + core15.debug(`Search path '${searchPath}'`); try { yield __await4(fs18.promises.lstat(searchPath)); } catch (err) { @@ -37058,7 +37058,7 @@ var require_internal_globber = __commonJS({ } catch (err) { if (err.code === "ENOENT") { if (options.omitBrokenSymbolicLinks) { - core14.debug(`Broken symlink '${item.path}'`); + core15.debug(`Broken symlink '${item.path}'`); return void 0; } throw new Error(`No information found for the path '${item.path}'. This may indicate a broken symbolic link.`); @@ -37074,7 +37074,7 @@ var require_internal_globber = __commonJS({ traversalChain.pop(); } if (traversalChain.some((x) => x === realPath)) { - core14.debug(`Symlink cycle detected for path '${item.path}' and realpath '${realPath}'`); + core15.debug(`Symlink cycle detected for path '${item.path}' and realpath '${realPath}'`); return void 0; } traversalChain.push(realPath); @@ -38398,7 +38398,7 @@ var require_cacheUtils = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.getRuntimeToken = exports2.getCacheVersion = exports2.assertDefined = exports2.getGnuTarPathOnWindows = exports2.getCacheFileName = exports2.getCompressionMethod = exports2.unlinkFile = exports2.resolvePaths = exports2.getArchiveFileSizeInBytes = exports2.createTempDirectory = void 0; - var core14 = __importStar4(require_core()); + var core15 = __importStar4(require_core()); var exec2 = __importStar4(require_exec()); var glob2 = __importStar4(require_glob()); var io7 = __importStar4(require_io()); @@ -38451,7 +38451,7 @@ var require_cacheUtils = __commonJS({ _e = false; const file = _c; const relativeFile = path20.relative(workspace, file).replace(new RegExp(`\\${path20.sep}`, "g"), "/"); - core14.debug(`Matched: ${relativeFile}`); + core15.debug(`Matched: ${relativeFile}`); if (relativeFile === "") { paths.push("."); } else { @@ -38481,7 +38481,7 @@ var require_cacheUtils = __commonJS({ return __awaiter4(this, void 0, void 0, function* () { let versionOutput = ""; additionalArgs.push("--version"); - core14.debug(`Checking ${app} ${additionalArgs.join(" ")}`); + core15.debug(`Checking ${app} ${additionalArgs.join(" ")}`); try { yield exec2.exec(`${app}`, additionalArgs, { ignoreReturnCode: true, @@ -38492,10 +38492,10 @@ var require_cacheUtils = __commonJS({ } }); } catch (err) { - core14.debug(err.message); + core15.debug(err.message); } versionOutput = versionOutput.trim(); - core14.debug(versionOutput); + core15.debug(versionOutput); return versionOutput; }); } @@ -38503,7 +38503,7 @@ var require_cacheUtils = __commonJS({ return __awaiter4(this, void 0, void 0, function* () { const versionOutput = yield getVersion("zstd", ["--quiet"]); const version = semver9.clean(versionOutput); - core14.debug(`zstd version: ${version}`); + core15.debug(`zstd version: ${version}`); if (versionOutput === "") { return constants_1.CompressionMethod.Gzip; } else { @@ -73815,7 +73815,7 @@ var require_uploadUtils = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.uploadCacheArchiveSDK = exports2.UploadProgress = void 0; - var core14 = __importStar4(require_core()); + var core15 = __importStar4(require_core()); var storage_blob_1 = require_dist7(); var errors_1 = require_errors2(); var UploadProgress = class { @@ -73857,7 +73857,7 @@ var require_uploadUtils = __commonJS({ const percentage = (100 * (transferredBytes / this.contentLength)).toFixed(1); const elapsedTime = Date.now() - this.startTime; const uploadSpeed = (transferredBytes / (1024 * 1024) / (elapsedTime / 1e3)).toFixed(1); - core14.info(`Sent ${transferredBytes} of ${this.contentLength} (${percentage}%), ${uploadSpeed} MBs/sec`); + core15.info(`Sent ${transferredBytes} of ${this.contentLength} (${percentage}%), ${uploadSpeed} MBs/sec`); if (this.isDone()) { this.displayedComplete = true; } @@ -73912,14 +73912,14 @@ var require_uploadUtils = __commonJS({ }; try { uploadProgress.startDisplayTimer(); - core14.debug(`BlobClient: ${blobClient.name}:${blobClient.accountName}:${blobClient.containerName}`); + core15.debug(`BlobClient: ${blobClient.name}:${blobClient.accountName}:${blobClient.containerName}`); const response = yield blockBlobClient.uploadFile(archivePath, uploadOptions); if (response._response.status >= 400) { throw new errors_1.InvalidResponseError(`uploadCacheArchiveSDK: upload failed with status code ${response._response.status}`); } return response; } catch (error2) { - core14.warning(`uploadCacheArchiveSDK: internal error uploading cache archive: ${error2.message}`); + core15.warning(`uploadCacheArchiveSDK: internal error uploading cache archive: ${error2.message}`); throw error2; } finally { uploadProgress.stopDisplayTimer(); @@ -73990,7 +73990,7 @@ var require_requestUtils = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.retryHttpClientResponse = exports2.retryTypedResponse = exports2.retry = exports2.isRetryableStatusCode = exports2.isServerErrorStatusCode = exports2.isSuccessStatusCode = void 0; - var core14 = __importStar4(require_core()); + var core15 = __importStar4(require_core()); var http_client_1 = require_lib(); var constants_1 = require_constants10(); function isSuccessStatusCode(statusCode) { @@ -74051,9 +74051,9 @@ var require_requestUtils = __commonJS({ isRetryable = isRetryableStatusCode(statusCode); errorMessage = `Cache service responded with ${statusCode}`; } - core14.debug(`${name} - Attempt ${attempt} of ${maxAttempts} failed with error: ${errorMessage}`); + core15.debug(`${name} - Attempt ${attempt} of ${maxAttempts} failed with error: ${errorMessage}`); if (!isRetryable) { - core14.debug(`${name} - Error is not retryable`); + core15.debug(`${name} - Error is not retryable`); break; } yield sleep(delay2); @@ -74158,7 +74158,7 @@ var require_downloadUtils = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.downloadCacheStorageSDK = exports2.downloadCacheHttpClientConcurrent = exports2.downloadCacheHttpClient = exports2.DownloadProgress = void 0; - var core14 = __importStar4(require_core()); + var core15 = __importStar4(require_core()); var http_client_1 = require_lib(); var storage_blob_1 = require_dist7(); var buffer = __importStar4(require("buffer")); @@ -74196,7 +74196,7 @@ var require_downloadUtils = __commonJS({ this.segmentIndex = this.segmentIndex + 1; this.segmentSize = segmentSize; this.receivedBytes = 0; - core14.debug(`Downloading segment at offset ${this.segmentOffset} with length ${this.segmentSize}...`); + core15.debug(`Downloading segment at offset ${this.segmentOffset} with length ${this.segmentSize}...`); } /** * Sets the number of bytes received for the current segment. @@ -74230,7 +74230,7 @@ var require_downloadUtils = __commonJS({ const percentage = (100 * (transferredBytes / this.contentLength)).toFixed(1); const elapsedTime = Date.now() - this.startTime; const downloadSpeed = (transferredBytes / (1024 * 1024) / (elapsedTime / 1e3)).toFixed(1); - core14.info(`Received ${transferredBytes} of ${this.contentLength} (${percentage}%), ${downloadSpeed} MBs/sec`); + core15.info(`Received ${transferredBytes} of ${this.contentLength} (${percentage}%), ${downloadSpeed} MBs/sec`); if (this.isDone()) { this.displayedComplete = true; } @@ -74280,7 +74280,7 @@ var require_downloadUtils = __commonJS({ })); downloadResponse.message.socket.setTimeout(constants_1.SocketTimeout, () => { downloadResponse.message.destroy(); - core14.debug(`Aborting download, socket timed out after ${constants_1.SocketTimeout} ms`); + core15.debug(`Aborting download, socket timed out after ${constants_1.SocketTimeout} ms`); }); yield pipeResponseToStream(downloadResponse, writeStream); const contentLengthHeader = downloadResponse.message.headers["content-length"]; @@ -74291,7 +74291,7 @@ var require_downloadUtils = __commonJS({ throw new Error(`Incomplete download. Expected file size: ${expectedLength}, actual file size: ${actualLength}`); } } else { - core14.debug("Unable to validate download, no Content-Length header"); + core15.debug("Unable to validate download, no Content-Length header"); } }); } @@ -74411,7 +74411,7 @@ var require_downloadUtils = __commonJS({ const properties = yield client.getProperties(); const contentLength = (_a = properties.contentLength) !== null && _a !== void 0 ? _a : -1; if (contentLength < 0) { - core14.debug("Unable to determine content length, downloading file with http-client..."); + core15.debug("Unable to determine content length, downloading file with http-client..."); yield downloadCacheHttpClient(archiveLocation, archivePath); } else { const maxSegmentSize = Math.min(134217728, buffer.constants.MAX_LENGTH); @@ -74491,7 +74491,7 @@ var require_options = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.getDownloadOptions = exports2.getUploadOptions = void 0; - var core14 = __importStar4(require_core()); + var core15 = __importStar4(require_core()); function getUploadOptions(copy) { const result = { useAzureSdk: false, @@ -74511,9 +74511,9 @@ var require_options = __commonJS({ } result.uploadConcurrency = !isNaN(Number(process.env["CACHE_UPLOAD_CONCURRENCY"])) ? Math.min(32, Number(process.env["CACHE_UPLOAD_CONCURRENCY"])) : result.uploadConcurrency; result.uploadChunkSize = !isNaN(Number(process.env["CACHE_UPLOAD_CHUNK_SIZE"])) ? Math.min(128 * 1024 * 1024, Number(process.env["CACHE_UPLOAD_CHUNK_SIZE"]) * 1024 * 1024) : result.uploadChunkSize; - core14.debug(`Use Azure SDK: ${result.useAzureSdk}`); - core14.debug(`Upload concurrency: ${result.uploadConcurrency}`); - core14.debug(`Upload chunk size: ${result.uploadChunkSize}`); + core15.debug(`Use Azure SDK: ${result.useAzureSdk}`); + core15.debug(`Upload concurrency: ${result.uploadConcurrency}`); + core15.debug(`Upload chunk size: ${result.uploadChunkSize}`); return result; } exports2.getUploadOptions = getUploadOptions; @@ -74550,12 +74550,12 @@ var require_options = __commonJS({ if (segmentDownloadTimeoutMins && !isNaN(Number(segmentDownloadTimeoutMins)) && isFinite(Number(segmentDownloadTimeoutMins))) { result.segmentTimeoutInMs = Number(segmentDownloadTimeoutMins) * 60 * 1e3; } - core14.debug(`Use Azure SDK: ${result.useAzureSdk}`); - core14.debug(`Download concurrency: ${result.downloadConcurrency}`); - core14.debug(`Request timeout (ms): ${result.timeoutInMs}`); - core14.debug(`Cache segment download timeout mins env var: ${process.env["SEGMENT_DOWNLOAD_TIMEOUT_MINS"]}`); - core14.debug(`Segment download timeout (ms): ${result.segmentTimeoutInMs}`); - core14.debug(`Lookup only: ${result.lookupOnly}`); + core15.debug(`Use Azure SDK: ${result.useAzureSdk}`); + core15.debug(`Download concurrency: ${result.downloadConcurrency}`); + core15.debug(`Request timeout (ms): ${result.timeoutInMs}`); + core15.debug(`Cache segment download timeout mins env var: ${process.env["SEGMENT_DOWNLOAD_TIMEOUT_MINS"]}`); + core15.debug(`Segment download timeout (ms): ${result.segmentTimeoutInMs}`); + core15.debug(`Lookup only: ${result.lookupOnly}`); return result; } exports2.getDownloadOptions = getDownloadOptions; @@ -74735,7 +74735,7 @@ var require_cacheHttpClient = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.saveCache = exports2.reserveCache = exports2.downloadCache = exports2.getCacheEntry = void 0; - var core14 = __importStar4(require_core()); + var core15 = __importStar4(require_core()); var http_client_1 = require_lib(); var auth_1 = require_auth(); var fs18 = __importStar4(require("fs")); @@ -74753,7 +74753,7 @@ var require_cacheHttpClient = __commonJS({ throw new Error("Cache Service Url not found, unable to restore cache."); } const url = `${baseUrl}_apis/artifactcache/${resource}`; - core14.debug(`Resource Url: ${url}`); + core15.debug(`Resource Url: ${url}`); return url; } function createAcceptHeader(type2, apiVersion) { @@ -74781,7 +74781,7 @@ var require_cacheHttpClient = __commonJS({ return httpClient.getJson(getCacheApiUrl(resource)); })); if (response.statusCode === 204) { - if (core14.isDebug()) { + if (core15.isDebug()) { yield printCachesListForDiagnostics(keys[0], httpClient, version); } return null; @@ -74794,9 +74794,9 @@ var require_cacheHttpClient = __commonJS({ if (!cacheDownloadUrl) { throw new Error("Cache not found."); } - core14.setSecret(cacheDownloadUrl); - core14.debug(`Cache Result:`); - core14.debug(JSON.stringify(cacheResult)); + core15.setSecret(cacheDownloadUrl); + core15.debug(`Cache Result:`); + core15.debug(JSON.stringify(cacheResult)); return cacheResult; }); } @@ -74811,10 +74811,10 @@ var require_cacheHttpClient = __commonJS({ const cacheListResult = response.result; const totalCount = cacheListResult === null || cacheListResult === void 0 ? void 0 : cacheListResult.totalCount; if (totalCount && totalCount > 0) { - core14.debug(`No matching cache found for cache key '${key}', version '${version} and scope ${process.env["GITHUB_REF"]}. There exist one or more cache(s) with similar key but they have different version or scope. See more info on cache matching here: https://docs.github.com/en/actions/using-workflows/caching-dependencies-to-speed-up-workflows#matching-a-cache-key + core15.debug(`No matching cache found for cache key '${key}', version '${version} and scope ${process.env["GITHUB_REF"]}. There exist one or more cache(s) with similar key but they have different version or scope. See more info on cache matching here: https://docs.github.com/en/actions/using-workflows/caching-dependencies-to-speed-up-workflows#matching-a-cache-key Other caches with similar key:`); for (const cacheEntry of (cacheListResult === null || cacheListResult === void 0 ? void 0 : cacheListResult.artifactCaches) || []) { - core14.debug(`Cache Key: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.cacheKey}, Cache Version: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.cacheVersion}, Cache Scope: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.scope}, Cache Created: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.creationTime}`); + core15.debug(`Cache Key: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.cacheKey}, Cache Version: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.cacheVersion}, Cache Scope: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.scope}, Cache Created: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.creationTime}`); } } } @@ -74859,7 +74859,7 @@ Other caches with similar key:`); } function uploadChunk(httpClient, resourceUrl, openStream, start, end) { return __awaiter4(this, void 0, void 0, function* () { - core14.debug(`Uploading chunk of size ${end - start + 1} bytes at offset ${start} with content range: ${getContentRange(start, end)}`); + core15.debug(`Uploading chunk of size ${end - start + 1} bytes at offset ${start} with content range: ${getContentRange(start, end)}`); const additionalHeaders = { "Content-Type": "application/octet-stream", "Content-Range": getContentRange(start, end) @@ -74881,7 +74881,7 @@ Other caches with similar key:`); const concurrency = utils.assertDefined("uploadConcurrency", uploadOptions.uploadConcurrency); const maxChunkSize = utils.assertDefined("uploadChunkSize", uploadOptions.uploadChunkSize); const parallelUploads = [...new Array(concurrency).keys()]; - core14.debug("Awaiting all uploads"); + core15.debug("Awaiting all uploads"); let offset = 0; try { yield Promise.all(parallelUploads.map(() => __awaiter4(this, void 0, void 0, function* () { @@ -74924,16 +74924,16 @@ Other caches with similar key:`); yield (0, uploadUtils_1.uploadCacheArchiveSDK)(signedUploadURL, archivePath, options); } else { const httpClient = createHttpClient(); - core14.debug("Upload cache"); + core15.debug("Upload cache"); yield uploadFile(httpClient, cacheId, archivePath, options); - core14.debug("Commiting cache"); + core15.debug("Commiting cache"); const cacheSize = utils.getArchiveFileSizeInBytes(archivePath); - core14.info(`Cache Size: ~${Math.round(cacheSize / (1024 * 1024))} MB (${cacheSize} B)`); + core15.info(`Cache Size: ~${Math.round(cacheSize / (1024 * 1024))} MB (${cacheSize} B)`); const commitCacheResponse = yield commitCache(httpClient, cacheId, cacheSize); if (!(0, requestUtils_1.isSuccessStatusCode)(commitCacheResponse.statusCode)) { throw new Error(`Cache service responded with ${commitCacheResponse.statusCode} during commit cache.`); } - core14.info("Cache saved successfully"); + core15.info("Cache saved successfully"); } }); } @@ -80385,7 +80385,7 @@ var require_cache3 = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.saveCache = exports2.restoreCache = exports2.isFeatureAvailable = exports2.FinalizeCacheError = exports2.ReserveCacheError = exports2.ValidationError = void 0; - var core14 = __importStar4(require_core()); + var core15 = __importStar4(require_core()); var path20 = __importStar4(require("path")); var utils = __importStar4(require_cacheUtils()); var cacheHttpClient = __importStar4(require_cacheHttpClient()); @@ -80445,7 +80445,7 @@ var require_cache3 = __commonJS({ function restoreCache4(paths, primaryKey, restoreKeys, options, enableCrossOsArchive = false) { return __awaiter4(this, void 0, void 0, function* () { const cacheServiceVersion = (0, config_1.getCacheServiceVersion)(); - core14.debug(`Cache service version: ${cacheServiceVersion}`); + core15.debug(`Cache service version: ${cacheServiceVersion}`); checkPaths(paths); switch (cacheServiceVersion) { case "v2": @@ -80461,8 +80461,8 @@ var require_cache3 = __commonJS({ return __awaiter4(this, void 0, void 0, function* () { restoreKeys = restoreKeys || []; const keys = [primaryKey, ...restoreKeys]; - core14.debug("Resolved Keys:"); - core14.debug(JSON.stringify(keys)); + core15.debug("Resolved Keys:"); + core15.debug(JSON.stringify(keys)); if (keys.length > 10) { throw new ValidationError(`Key Validation Error: Keys are limited to a maximum of 10.`); } @@ -80480,19 +80480,19 @@ var require_cache3 = __commonJS({ return void 0; } if (options === null || options === void 0 ? void 0 : options.lookupOnly) { - core14.info("Lookup only - skipping download"); + core15.info("Lookup only - skipping download"); return cacheEntry.cacheKey; } archivePath = path20.join(yield utils.createTempDirectory(), utils.getCacheFileName(compressionMethod)); - core14.debug(`Archive Path: ${archivePath}`); + core15.debug(`Archive Path: ${archivePath}`); yield cacheHttpClient.downloadCache(cacheEntry.archiveLocation, archivePath, options); - if (core14.isDebug()) { + if (core15.isDebug()) { yield (0, tar_1.listTar)(archivePath, compressionMethod); } const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath); - core14.info(`Cache Size: ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B)`); + core15.info(`Cache Size: ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B)`); yield (0, tar_1.extractTar)(archivePath, compressionMethod); - core14.info("Cache restored successfully"); + core15.info("Cache restored successfully"); return cacheEntry.cacheKey; } catch (error2) { const typedError = error2; @@ -80500,16 +80500,16 @@ var require_cache3 = __commonJS({ throw error2; } else { if (typedError instanceof http_client_1.HttpClientError && typeof typedError.statusCode === "number" && typedError.statusCode >= 500) { - core14.error(`Failed to restore: ${error2.message}`); + core15.error(`Failed to restore: ${error2.message}`); } else { - core14.warning(`Failed to restore: ${error2.message}`); + core15.warning(`Failed to restore: ${error2.message}`); } } } finally { try { yield utils.unlinkFile(archivePath); } catch (error2) { - core14.debug(`Failed to delete archive: ${error2}`); + core15.debug(`Failed to delete archive: ${error2}`); } } return void 0; @@ -80520,8 +80520,8 @@ var require_cache3 = __commonJS({ options = Object.assign(Object.assign({}, options), { useAzureSdk: true }); restoreKeys = restoreKeys || []; const keys = [primaryKey, ...restoreKeys]; - core14.debug("Resolved Keys:"); - core14.debug(JSON.stringify(keys)); + core15.debug("Resolved Keys:"); + core15.debug(JSON.stringify(keys)); if (keys.length > 10) { throw new ValidationError(`Key Validation Error: Keys are limited to a maximum of 10.`); } @@ -80539,30 +80539,30 @@ var require_cache3 = __commonJS({ }; const response = yield twirpClient.GetCacheEntryDownloadURL(request); if (!response.ok) { - core14.debug(`Cache not found for version ${request.version} of keys: ${keys.join(", ")}`); + core15.debug(`Cache not found for version ${request.version} of keys: ${keys.join(", ")}`); return void 0; } const isRestoreKeyMatch = request.key !== response.matchedKey; if (isRestoreKeyMatch) { - core14.info(`Cache hit for restore-key: ${response.matchedKey}`); + core15.info(`Cache hit for restore-key: ${response.matchedKey}`); } else { - core14.info(`Cache hit for: ${response.matchedKey}`); + core15.info(`Cache hit for: ${response.matchedKey}`); } if (options === null || options === void 0 ? void 0 : options.lookupOnly) { - core14.info("Lookup only - skipping download"); + core15.info("Lookup only - skipping download"); return response.matchedKey; } archivePath = path20.join(yield utils.createTempDirectory(), utils.getCacheFileName(compressionMethod)); - core14.debug(`Archive path: ${archivePath}`); - core14.debug(`Starting download of archive to: ${archivePath}`); + core15.debug(`Archive path: ${archivePath}`); + core15.debug(`Starting download of archive to: ${archivePath}`); yield cacheHttpClient.downloadCache(response.signedDownloadUrl, archivePath, options); const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath); - core14.info(`Cache Size: ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B)`); - if (core14.isDebug()) { + core15.info(`Cache Size: ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B)`); + if (core15.isDebug()) { yield (0, tar_1.listTar)(archivePath, compressionMethod); } yield (0, tar_1.extractTar)(archivePath, compressionMethod); - core14.info("Cache restored successfully"); + core15.info("Cache restored successfully"); return response.matchedKey; } catch (error2) { const typedError = error2; @@ -80570,9 +80570,9 @@ var require_cache3 = __commonJS({ throw error2; } else { if (typedError instanceof http_client_1.HttpClientError && typeof typedError.statusCode === "number" && typedError.statusCode >= 500) { - core14.error(`Failed to restore: ${error2.message}`); + core15.error(`Failed to restore: ${error2.message}`); } else { - core14.warning(`Failed to restore: ${error2.message}`); + core15.warning(`Failed to restore: ${error2.message}`); } } } finally { @@ -80581,7 +80581,7 @@ var require_cache3 = __commonJS({ yield utils.unlinkFile(archivePath); } } catch (error2) { - core14.debug(`Failed to delete archive: ${error2}`); + core15.debug(`Failed to delete archive: ${error2}`); } } return void 0; @@ -80590,7 +80590,7 @@ var require_cache3 = __commonJS({ function saveCache4(paths, key, options, enableCrossOsArchive = false) { return __awaiter4(this, void 0, void 0, function* () { const cacheServiceVersion = (0, config_1.getCacheServiceVersion)(); - core14.debug(`Cache service version: ${cacheServiceVersion}`); + core15.debug(`Cache service version: ${cacheServiceVersion}`); checkPaths(paths); checkKey(key); switch (cacheServiceVersion) { @@ -80609,26 +80609,26 @@ var require_cache3 = __commonJS({ const compressionMethod = yield utils.getCompressionMethod(); let cacheId = -1; const cachePaths = yield utils.resolvePaths(paths); - core14.debug("Cache Paths:"); - core14.debug(`${JSON.stringify(cachePaths)}`); + core15.debug("Cache Paths:"); + core15.debug(`${JSON.stringify(cachePaths)}`); if (cachePaths.length === 0) { throw new Error(`Path Validation Error: Path(s) specified in the action for caching do(es) not exist, hence no cache is being saved.`); } const archiveFolder = yield utils.createTempDirectory(); const archivePath = path20.join(archiveFolder, utils.getCacheFileName(compressionMethod)); - core14.debug(`Archive Path: ${archivePath}`); + core15.debug(`Archive Path: ${archivePath}`); try { yield (0, tar_1.createTar)(archiveFolder, cachePaths, compressionMethod); - if (core14.isDebug()) { + if (core15.isDebug()) { yield (0, tar_1.listTar)(archivePath, compressionMethod); } const fileSizeLimit = 10 * 1024 * 1024 * 1024; const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath); - core14.debug(`File Size: ${archiveFileSize}`); + core15.debug(`File Size: ${archiveFileSize}`); if (archiveFileSize > fileSizeLimit && !(0, config_1.isGhes)()) { throw new Error(`Cache size of ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B) is over the 10GB limit, not saving cache.`); } - core14.debug("Reserving Cache"); + core15.debug("Reserving Cache"); const reserveCacheResponse = yield cacheHttpClient.reserveCache(key, paths, { compressionMethod, enableCrossOsArchive, @@ -80641,26 +80641,26 @@ var require_cache3 = __commonJS({ } else { throw new ReserveCacheError2(`Unable to reserve cache with key ${key}, another job may be creating this cache. More details: ${(_e = reserveCacheResponse === null || reserveCacheResponse === void 0 ? void 0 : reserveCacheResponse.error) === null || _e === void 0 ? void 0 : _e.message}`); } - core14.debug(`Saving Cache (ID: ${cacheId})`); + core15.debug(`Saving Cache (ID: ${cacheId})`); yield cacheHttpClient.saveCache(cacheId, archivePath, "", options); } catch (error2) { const typedError = error2; if (typedError.name === ValidationError.name) { throw error2; } else if (typedError.name === ReserveCacheError2.name) { - core14.info(`Failed to save: ${typedError.message}`); + core15.info(`Failed to save: ${typedError.message}`); } else { if (typedError instanceof http_client_1.HttpClientError && typeof typedError.statusCode === "number" && typedError.statusCode >= 500) { - core14.error(`Failed to save: ${typedError.message}`); + core15.error(`Failed to save: ${typedError.message}`); } else { - core14.warning(`Failed to save: ${typedError.message}`); + core15.warning(`Failed to save: ${typedError.message}`); } } } finally { try { yield utils.unlinkFile(archivePath); } catch (error2) { - core14.debug(`Failed to delete archive: ${error2}`); + core15.debug(`Failed to delete archive: ${error2}`); } } return cacheId; @@ -80673,23 +80673,23 @@ var require_cache3 = __commonJS({ const twirpClient = cacheTwirpClient.internalCacheTwirpClient(); let cacheId = -1; const cachePaths = yield utils.resolvePaths(paths); - core14.debug("Cache Paths:"); - core14.debug(`${JSON.stringify(cachePaths)}`); + core15.debug("Cache Paths:"); + core15.debug(`${JSON.stringify(cachePaths)}`); if (cachePaths.length === 0) { throw new Error(`Path Validation Error: Path(s) specified in the action for caching do(es) not exist, hence no cache is being saved.`); } const archiveFolder = yield utils.createTempDirectory(); const archivePath = path20.join(archiveFolder, utils.getCacheFileName(compressionMethod)); - core14.debug(`Archive Path: ${archivePath}`); + core15.debug(`Archive Path: ${archivePath}`); try { yield (0, tar_1.createTar)(archiveFolder, cachePaths, compressionMethod); - if (core14.isDebug()) { + if (core15.isDebug()) { yield (0, tar_1.listTar)(archivePath, compressionMethod); } const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath); - core14.debug(`File Size: ${archiveFileSize}`); + core15.debug(`File Size: ${archiveFileSize}`); options.archiveSizeBytes = archiveFileSize; - core14.debug("Reserving Cache"); + core15.debug("Reserving Cache"); const version = utils.getCacheVersion(paths, compressionMethod, enableCrossOsArchive); const request = { key, @@ -80700,16 +80700,16 @@ var require_cache3 = __commonJS({ const response = yield twirpClient.CreateCacheEntry(request); if (!response.ok) { if (response.message) { - core14.warning(`Cache reservation failed: ${response.message}`); + core15.warning(`Cache reservation failed: ${response.message}`); } throw new Error(response.message || "Response was not ok"); } signedUploadUrl = response.signedUploadUrl; } catch (error2) { - core14.debug(`Failed to reserve cache: ${error2}`); + core15.debug(`Failed to reserve cache: ${error2}`); throw new ReserveCacheError2(`Unable to reserve cache with key ${key}, another job may be creating this cache.`); } - core14.debug(`Attempting to upload cache located at: ${archivePath}`); + core15.debug(`Attempting to upload cache located at: ${archivePath}`); yield cacheHttpClient.saveCache(cacheId, archivePath, signedUploadUrl, options); const finalizeRequest = { key, @@ -80717,7 +80717,7 @@ var require_cache3 = __commonJS({ sizeBytes: `${archiveFileSize}` }; const finalizeResponse = yield twirpClient.FinalizeCacheEntryUpload(finalizeRequest); - core14.debug(`FinalizeCacheEntryUploadResponse: ${finalizeResponse.ok}`); + core15.debug(`FinalizeCacheEntryUploadResponse: ${finalizeResponse.ok}`); if (!finalizeResponse.ok) { if (finalizeResponse.message) { throw new FinalizeCacheError(finalizeResponse.message); @@ -80730,21 +80730,21 @@ var require_cache3 = __commonJS({ if (typedError.name === ValidationError.name) { throw error2; } else if (typedError.name === ReserveCacheError2.name) { - core14.info(`Failed to save: ${typedError.message}`); + core15.info(`Failed to save: ${typedError.message}`); } else if (typedError.name === FinalizeCacheError.name) { - core14.warning(typedError.message); + core15.warning(typedError.message); } else { if (typedError instanceof http_client_1.HttpClientError && typeof typedError.statusCode === "number" && typedError.statusCode >= 500) { - core14.error(`Failed to save: ${typedError.message}`); + core15.error(`Failed to save: ${typedError.message}`); } else { - core14.warning(`Failed to save: ${typedError.message}`); + core15.warning(`Failed to save: ${typedError.message}`); } } } finally { try { yield utils.unlinkFile(archivePath); } catch (error2) { - core14.debug(`Failed to delete archive: ${error2}`); + core15.debug(`Failed to delete archive: ${error2}`); } } return cacheId; @@ -80786,7 +80786,7 @@ var require_internal_glob_options_helper2 = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.getOptions = void 0; - var core14 = __importStar4(require_core()); + var core15 = __importStar4(require_core()); function getOptions(copy) { const result = { followSymbolicLinks: true, @@ -80798,23 +80798,23 @@ var require_internal_glob_options_helper2 = __commonJS({ if (copy) { if (typeof copy.followSymbolicLinks === "boolean") { result.followSymbolicLinks = copy.followSymbolicLinks; - core14.debug(`followSymbolicLinks '${result.followSymbolicLinks}'`); + core15.debug(`followSymbolicLinks '${result.followSymbolicLinks}'`); } if (typeof copy.implicitDescendants === "boolean") { result.implicitDescendants = copy.implicitDescendants; - core14.debug(`implicitDescendants '${result.implicitDescendants}'`); + core15.debug(`implicitDescendants '${result.implicitDescendants}'`); } if (typeof copy.matchDirectories === "boolean") { result.matchDirectories = copy.matchDirectories; - core14.debug(`matchDirectories '${result.matchDirectories}'`); + core15.debug(`matchDirectories '${result.matchDirectories}'`); } if (typeof copy.omitBrokenSymbolicLinks === "boolean") { result.omitBrokenSymbolicLinks = copy.omitBrokenSymbolicLinks; - core14.debug(`omitBrokenSymbolicLinks '${result.omitBrokenSymbolicLinks}'`); + core15.debug(`omitBrokenSymbolicLinks '${result.omitBrokenSymbolicLinks}'`); } if (typeof copy.excludeHiddenFiles === "boolean") { result.excludeHiddenFiles = copy.excludeHiddenFiles; - core14.debug(`excludeHiddenFiles '${result.excludeHiddenFiles}'`); + core15.debug(`excludeHiddenFiles '${result.excludeHiddenFiles}'`); } } return result; @@ -81498,7 +81498,7 @@ var require_internal_globber2 = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.DefaultGlobber = void 0; - var core14 = __importStar4(require_core()); + var core15 = __importStar4(require_core()); var fs18 = __importStar4(require("fs")); var globOptionsHelper = __importStar4(require_internal_glob_options_helper2()); var path20 = __importStar4(require("path")); @@ -81551,7 +81551,7 @@ var require_internal_globber2 = __commonJS({ } const stack = []; for (const searchPath of patternHelper.getSearchPaths(patterns)) { - core14.debug(`Search path '${searchPath}'`); + core15.debug(`Search path '${searchPath}'`); try { yield __await4(fs18.promises.lstat(searchPath)); } catch (err) { @@ -81626,7 +81626,7 @@ var require_internal_globber2 = __commonJS({ } catch (err) { if (err.code === "ENOENT") { if (options.omitBrokenSymbolicLinks) { - core14.debug(`Broken symlink '${item.path}'`); + core15.debug(`Broken symlink '${item.path}'`); return void 0; } throw new Error(`No information found for the path '${item.path}'. This may indicate a broken symbolic link.`); @@ -81642,7 +81642,7 @@ var require_internal_globber2 = __commonJS({ traversalChain.pop(); } if (traversalChain.some((x) => x === realPath)) { - core14.debug(`Symlink cycle detected for path '${item.path}' and realpath '${realPath}'`); + core15.debug(`Symlink cycle detected for path '${item.path}' and realpath '${realPath}'`); return void 0; } traversalChain.push(realPath); @@ -81735,7 +81735,7 @@ var require_internal_hash_files = __commonJS({ Object.defineProperty(exports2, "__esModule", { value: true }); exports2.hashFiles = void 0; var crypto2 = __importStar4(require("crypto")); - var core14 = __importStar4(require_core()); + var core15 = __importStar4(require_core()); var fs18 = __importStar4(require("fs")); var stream2 = __importStar4(require("stream")); var util = __importStar4(require("util")); @@ -81744,7 +81744,7 @@ var require_internal_hash_files = __commonJS({ var _a, e_1, _b, _c; var _d; return __awaiter4(this, void 0, void 0, function* () { - const writeDelegate = verbose ? core14.info : core14.debug; + const writeDelegate = verbose ? core15.info : core15.debug; let hasMatch = false; const githubWorkspace = currentWorkspace ? currentWorkspace : (_d = process.env["GITHUB_WORKSPACE"]) !== null && _d !== void 0 ? _d : process.cwd(); const result = crypto2.createHash("sha256"); @@ -82049,7 +82049,7 @@ var require_retry_helper = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.RetryHelper = void 0; - var core14 = __importStar4(require_core()); + var core15 = __importStar4(require_core()); var RetryHelper = class { constructor(maxAttempts, minSeconds, maxSeconds) { if (maxAttempts < 1) { @@ -82072,10 +82072,10 @@ var require_retry_helper = __commonJS({ if (isRetryable && !isRetryable(err)) { throw err; } - core14.info(err.message); + core15.info(err.message); } const seconds = this.getSleepAmount(); - core14.info(`Waiting ${seconds} seconds before trying again`); + core15.info(`Waiting ${seconds} seconds before trying again`); yield this.sleep(seconds); attempt++; } @@ -82155,7 +82155,7 @@ var require_tool_cache = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.evaluateVersions = exports2.isExplicitVersion = exports2.findFromManifest = exports2.getManifestFromRepo = exports2.findAllVersions = exports2.find = exports2.cacheFile = exports2.cacheDir = exports2.extractZip = exports2.extractXar = exports2.extractTar = exports2.extract7z = exports2.downloadTool = exports2.HTTPError = void 0; - var core14 = __importStar4(require_core()); + var core15 = __importStar4(require_core()); var io7 = __importStar4(require_io()); var crypto2 = __importStar4(require("crypto")); var fs18 = __importStar4(require("fs")); @@ -82184,8 +82184,8 @@ var require_tool_cache = __commonJS({ return __awaiter4(this, void 0, void 0, function* () { dest = dest || path20.join(_getTempDirectory(), crypto2.randomUUID()); yield io7.mkdirP(path20.dirname(dest)); - core14.debug(`Downloading ${url}`); - core14.debug(`Destination ${dest}`); + core15.debug(`Downloading ${url}`); + core15.debug(`Destination ${dest}`); const maxAttempts = 3; const minSeconds = _getGlobal("TEST_DOWNLOAD_TOOL_RETRY_MIN_SECONDS", 10); const maxSeconds = _getGlobal("TEST_DOWNLOAD_TOOL_RETRY_MAX_SECONDS", 20); @@ -82212,7 +82212,7 @@ var require_tool_cache = __commonJS({ allowRetries: false }); if (auth) { - core14.debug("set auth"); + core15.debug("set auth"); if (headers === void 0) { headers = {}; } @@ -82221,7 +82221,7 @@ var require_tool_cache = __commonJS({ const response = yield http.get(url, headers); if (response.message.statusCode !== 200) { const err = new HTTPError2(response.message.statusCode); - core14.debug(`Failed to download from "${url}". Code(${response.message.statusCode}) Message(${response.message.statusMessage})`); + core15.debug(`Failed to download from "${url}". Code(${response.message.statusCode}) Message(${response.message.statusMessage})`); throw err; } const pipeline = util.promisify(stream2.pipeline); @@ -82230,16 +82230,16 @@ var require_tool_cache = __commonJS({ let succeeded = false; try { yield pipeline(readStream, fs18.createWriteStream(dest)); - core14.debug("download complete"); + core15.debug("download complete"); succeeded = true; return dest; } finally { if (!succeeded) { - core14.debug("download failed"); + core15.debug("download failed"); try { yield io7.rmRF(dest); } catch (err) { - core14.debug(`Failed to delete '${dest}'. ${err.message}`); + core15.debug(`Failed to delete '${dest}'. ${err.message}`); } } } @@ -82254,7 +82254,7 @@ var require_tool_cache = __commonJS({ process.chdir(dest); if (_7zPath) { try { - const logLevel = core14.isDebug() ? "-bb1" : "-bb0"; + const logLevel = core15.isDebug() ? "-bb1" : "-bb0"; const args = [ "x", logLevel, @@ -82304,7 +82304,7 @@ var require_tool_cache = __commonJS({ throw new Error("parameter 'file' is required"); } dest = yield _createExtractFolder(dest); - core14.debug("Checking tar --version"); + core15.debug("Checking tar --version"); let versionOutput = ""; yield (0, exec_1.exec)("tar --version", [], { ignoreReturnCode: true, @@ -82314,7 +82314,7 @@ var require_tool_cache = __commonJS({ stderr: (data) => versionOutput += data.toString() } }); - core14.debug(versionOutput.trim()); + core15.debug(versionOutput.trim()); const isGnuTar = versionOutput.toUpperCase().includes("GNU TAR"); let args; if (flags instanceof Array) { @@ -82322,7 +82322,7 @@ var require_tool_cache = __commonJS({ } else { args = [flags]; } - if (core14.isDebug() && !flags.includes("v")) { + if (core15.isDebug() && !flags.includes("v")) { args.push("-v"); } let destArg = dest; @@ -82354,7 +82354,7 @@ var require_tool_cache = __commonJS({ args = [flags]; } args.push("-x", "-C", dest, "-f", file); - if (core14.isDebug()) { + if (core15.isDebug()) { args.push("-v"); } const xarPath = yield io7.which("xar", true); @@ -82399,7 +82399,7 @@ var require_tool_cache = __commonJS({ "-Command", pwshCommand ]; - core14.debug(`Using pwsh at path: ${pwshPath}`); + core15.debug(`Using pwsh at path: ${pwshPath}`); yield (0, exec_1.exec)(`"${pwshPath}"`, args); } else { const powershellCommand = [ @@ -82419,7 +82419,7 @@ var require_tool_cache = __commonJS({ powershellCommand ]; const powershellPath = yield io7.which("powershell", true); - core14.debug(`Using powershell at path: ${powershellPath}`); + core15.debug(`Using powershell at path: ${powershellPath}`); yield (0, exec_1.exec)(`"${powershellPath}"`, args); } }); @@ -82428,7 +82428,7 @@ var require_tool_cache = __commonJS({ return __awaiter4(this, void 0, void 0, function* () { const unzipPath = yield io7.which("unzip", true); const args = [file]; - if (!core14.isDebug()) { + if (!core15.isDebug()) { args.unshift("-q"); } args.unshift("-o"); @@ -82439,8 +82439,8 @@ var require_tool_cache = __commonJS({ return __awaiter4(this, void 0, void 0, function* () { version = semver9.clean(version) || version; arch2 = arch2 || os5.arch(); - core14.debug(`Caching tool ${tool} ${version} ${arch2}`); - core14.debug(`source dir: ${sourceDir}`); + core15.debug(`Caching tool ${tool} ${version} ${arch2}`); + core15.debug(`source dir: ${sourceDir}`); if (!fs18.statSync(sourceDir).isDirectory()) { throw new Error("sourceDir is not a directory"); } @@ -82458,14 +82458,14 @@ var require_tool_cache = __commonJS({ return __awaiter4(this, void 0, void 0, function* () { version = semver9.clean(version) || version; arch2 = arch2 || os5.arch(); - core14.debug(`Caching tool ${tool} ${version} ${arch2}`); - core14.debug(`source file: ${sourceFile}`); + core15.debug(`Caching tool ${tool} ${version} ${arch2}`); + core15.debug(`source file: ${sourceFile}`); if (!fs18.statSync(sourceFile).isFile()) { throw new Error("sourceFile is not a file"); } const destFolder = yield _createToolPath(tool, version, arch2); const destPath = path20.join(destFolder, targetFile); - core14.debug(`destination file ${destPath}`); + core15.debug(`destination file ${destPath}`); yield io7.cp(sourceFile, destPath); _completeToolPath(tool, version, arch2); return destFolder; @@ -82489,12 +82489,12 @@ var require_tool_cache = __commonJS({ if (versionSpec) { versionSpec = semver9.clean(versionSpec) || ""; const cachePath = path20.join(_getCacheDirectory(), toolName, versionSpec, arch2); - core14.debug(`checking cache: ${cachePath}`); + core15.debug(`checking cache: ${cachePath}`); if (fs18.existsSync(cachePath) && fs18.existsSync(`${cachePath}.complete`)) { - core14.debug(`Found tool in cache ${toolName} ${versionSpec} ${arch2}`); + core15.debug(`Found tool in cache ${toolName} ${versionSpec} ${arch2}`); toolPath = cachePath; } else { - core14.debug("not found"); + core15.debug("not found"); } } return toolPath; @@ -82525,7 +82525,7 @@ var require_tool_cache = __commonJS({ const http = new httpm.HttpClient("tool-cache"); const headers = {}; if (auth) { - core14.debug("set auth"); + core15.debug("set auth"); headers.authorization = auth; } const response = yield http.getJson(treeUrl, headers); @@ -82546,7 +82546,7 @@ var require_tool_cache = __commonJS({ try { releases = JSON.parse(versionsRaw); } catch (_a) { - core14.debug("Invalid json"); + core15.debug("Invalid json"); } } return releases; @@ -82572,7 +82572,7 @@ var require_tool_cache = __commonJS({ function _createToolPath(tool, version, arch2) { return __awaiter4(this, void 0, void 0, function* () { const folderPath = path20.join(_getCacheDirectory(), tool, semver9.clean(version) || version, arch2 || ""); - core14.debug(`destination ${folderPath}`); + core15.debug(`destination ${folderPath}`); const markerPath = `${folderPath}.complete`; yield io7.rmRF(folderPath); yield io7.rmRF(markerPath); @@ -82584,19 +82584,19 @@ var require_tool_cache = __commonJS({ const folderPath = path20.join(_getCacheDirectory(), tool, semver9.clean(version) || version, arch2 || ""); const markerPath = `${folderPath}.complete`; fs18.writeFileSync(markerPath, ""); - core14.debug("finished caching tool"); + core15.debug("finished caching tool"); } function isExplicitVersion(versionSpec) { const c = semver9.clean(versionSpec) || ""; - core14.debug(`isExplicit: ${c}`); + core15.debug(`isExplicit: ${c}`); const valid3 = semver9.valid(c) != null; - core14.debug(`explicit? ${valid3}`); + core15.debug(`explicit? ${valid3}`); return valid3; } exports2.isExplicitVersion = isExplicitVersion; function evaluateVersions(versions, versionSpec) { let version = ""; - core14.debug(`evaluating ${versions.length} versions`); + core15.debug(`evaluating ${versions.length} versions`); versions = versions.sort((a, b) => { if (semver9.gt(a, b)) { return 1; @@ -82612,9 +82612,9 @@ var require_tool_cache = __commonJS({ } } if (version) { - core14.debug(`matched: ${version}`); + core15.debug(`matched: ${version}`); } else { - core14.debug("match not found"); + core15.debug("match not found"); } return version; } @@ -83193,7 +83193,7 @@ var require_follow_redirects = __commonJS({ // src/init-action.ts var fs17 = __toESM(require("fs")); var path19 = __toESM(require("path")); -var core13 = __toESM(require_core()); +var core14 = __toESM(require_core()); var io6 = __toESM(require_io()); var semver8 = __toESM(require_semver2()); @@ -89916,8 +89916,9 @@ function flushDiagnostics(config) { } // src/init.ts -var fs15 = __toESM(require("fs")); -var path17 = __toESM(require("path")); +var fs16 = __toESM(require("fs")); +var path18 = __toESM(require("path")); +var core12 = __toESM(require_core()); var toolrunner4 = __toESM(require_toolrunner()); var io5 = __toESM(require_io()); @@ -91654,7 +91655,193 @@ async function getJobRunUuidSarifOptions(codeql) { ) ? [`--sarif-run-property=jobRunUuid=${jobRunUuid}`] : []; } +// src/workflow.ts +var fs15 = __toESM(require("fs")); +var path17 = __toESM(require("path")); +var import_zlib = __toESM(require("zlib")); +var core11 = __toESM(require_core()); +function toCodedErrors(errors) { + return Object.entries(errors).reduce( + (acc, [code, message]) => { + acc[code] = { message, code }; + return acc; + }, + {} + ); +} +var WorkflowErrors = toCodedErrors({ + MissingPushHook: `Please specify an on.push hook to analyze and see code scanning alerts from the default branch on the Security tab.`, + CheckoutWrongHead: `git checkout HEAD^2 is no longer necessary. Please remove this step as Code Scanning recommends analyzing the merge commit for best results.`, + InconsistentActionVersion: `Not all workflow steps that use \`github/codeql-action\` actions use the same version. Please ensure that all such steps use the same version to avoid compatibility issues.` +}); +async function groupLanguagesByExtractor(languages, codeql) { + const resolveResult = await codeql.betterResolveLanguages(); + if (!resolveResult.aliases) { + return void 0; + } + const aliases = resolveResult.aliases; + const languagesByExtractor = {}; + for (const language of languages) { + const extractorName = aliases[language] || language; + if (!languagesByExtractor[extractorName]) { + languagesByExtractor[extractorName] = []; + } + languagesByExtractor[extractorName].push(language); + } + return languagesByExtractor; +} +async function getWorkflowErrors(doc, codeql) { + const errors = []; + const jobName = process.env.GITHUB_JOB; + if (jobName) { + const job = doc?.jobs?.[jobName]; + if (job?.strategy?.matrix?.language) { + const matrixLanguages = job.strategy.matrix.language; + if (Array.isArray(matrixLanguages)) { + const matrixLanguagesByExtractor = await groupLanguagesByExtractor( + matrixLanguages, + codeql + ); + if (matrixLanguagesByExtractor !== void 0) { + for (const [extractor, languages] of Object.entries( + matrixLanguagesByExtractor + )) { + if (languages.length > 1) { + errors.push({ + message: `CodeQL language '${extractor}' is referenced by more than one entry in the 'language' matrix parameter for job '${jobName}'. This may result in duplicate alerts. Please edit the 'language' matrix parameter to keep only one of the following: ${languages.map((language) => `'${language}'`).join(", ")}.`, + code: "DuplicateLanguageInMatrix" + }); + } + } + } + } + } + const steps = job?.steps; + if (Array.isArray(steps)) { + for (const step of steps) { + if (step?.run === "git checkout HEAD^2") { + errors.push(WorkflowErrors.CheckoutWrongHead); + break; + } + } + } + } + const codeqlStepRefs = []; + for (const job of Object.values(doc?.jobs || {})) { + if (Array.isArray(job.steps)) { + for (const step of job.steps) { + if (step.uses?.startsWith("github/codeql-action/")) { + const parts = step.uses.split("@"); + if (parts.length >= 2) { + codeqlStepRefs.push(parts[parts.length - 1]); + } + } + } + } + } + if (codeqlStepRefs.length > 0 && !codeqlStepRefs.every((ref) => ref === codeqlStepRefs[0])) { + errors.push(WorkflowErrors.InconsistentActionVersion); + } + const hasPushTrigger = hasWorkflowTrigger("push", doc); + const hasPullRequestTrigger = hasWorkflowTrigger("pull_request", doc); + const hasWorkflowCallTrigger = hasWorkflowTrigger("workflow_call", doc); + if (hasPullRequestTrigger && !hasPushTrigger && !hasWorkflowCallTrigger) { + errors.push(WorkflowErrors.MissingPushHook); + } + return errors; +} +function hasWorkflowTrigger(triggerName, doc) { + if (!doc.on) { + return false; + } + if (typeof doc.on === "string") { + return doc.on === triggerName; + } + if (Array.isArray(doc.on)) { + return doc.on.includes(triggerName); + } + return Object.prototype.hasOwnProperty.call(doc.on, triggerName); +} +async function validateWorkflow(codeql, logger) { + let workflow; + try { + workflow = await getWorkflow(logger); + } catch (e) { + return `error: getWorkflow() failed: ${String(e)}`; + } + let workflowErrors; + try { + workflowErrors = await getWorkflowErrors(workflow, codeql); + } catch (e) { + return `error: getWorkflowErrors() failed: ${String(e)}`; + } + if (workflowErrors.length > 0) { + let message; + try { + message = formatWorkflowErrors(workflowErrors); + } catch (e) { + return `error: formatWorkflowErrors() failed: ${String(e)}`; + } + core11.warning(message); + } + return formatWorkflowCause(workflowErrors); +} +function formatWorkflowErrors(errors) { + const issuesWere = errors.length === 1 ? "issue was" : "issues were"; + const errorsList = errors.map((e) => e.message).join(" "); + return `${errors.length} ${issuesWere} detected with this workflow: ${errorsList}`; +} +function formatWorkflowCause(errors) { + if (errors.length === 0) { + return void 0; + } + return errors.map((e) => e.code).join(","); +} +async function getWorkflow(logger) { + const maybeWorkflow = process.env["CODE_SCANNING_WORKFLOW_FILE"]; + if (maybeWorkflow) { + logger.debug( + "Using the workflow specified by the CODE_SCANNING_WORKFLOW_FILE environment variable." + ); + return load( + import_zlib.default.gunzipSync(Buffer.from(maybeWorkflow, "base64")).toString() + ); + } + const workflowPath = await getWorkflowAbsolutePath(logger); + return load(fs15.readFileSync(workflowPath, "utf-8")); +} +async function getWorkflowAbsolutePath(logger) { + const relativePath = await getWorkflowRelativePath(); + const absolutePath = path17.join( + getRequiredEnvParam("GITHUB_WORKSPACE"), + relativePath + ); + if (fs15.existsSync(absolutePath)) { + logger.debug( + `Derived the following absolute path for the currently executing workflow: ${absolutePath}.` + ); + return absolutePath; + } + throw new Error( + `Expected to find a code scanning workflow file at ${absolutePath}, but no such file existed. This can happen if the currently running workflow checks out a branch that doesn't contain the corresponding workflow file.` + ); +} + // src/init.ts +async function checkWorkflow(logger, codeql) { + if (process.env["CODEQL_ACTION_SKIP_WORKFLOW_VALIDATION" /* SKIP_WORKFLOW_VALIDATION */] !== "true") { + core12.startGroup("Validating workflow"); + const validateWorkflowResult = await validateWorkflow(codeql, logger); + if (validateWorkflowResult === void 0) { + logger.info("Detected no issues with the code scanning workflow."); + } else { + logger.warning( + `Unable to validate code scanning workflow: ${validateWorkflowResult}` + ); + } + core12.endGroup(); + } +} async function initCodeQL(toolsInput, apiDetails, tempDir, variant, defaultCliVersion, features, logger) { logger.startGroup("Setup CodeQL tools"); const { @@ -91689,7 +91876,7 @@ async function initConfig2(features, inputs) { }); } async function runDatabaseInitCluster(databaseInitEnvironment, codeql, config, sourceRoot, processName, qlconfigFile, logger) { - fs15.mkdirSync(config.dbLocation, { recursive: true }); + fs16.mkdirSync(config.dbLocation, { recursive: true }); await wrapEnvironment( databaseInitEnvironment, async () => await codeql.databaseInitCluster( @@ -91724,25 +91911,25 @@ async function checkPacksForOverlayCompatibility(codeql, config, logger) { } function checkPackForOverlayCompatibility(packDir, codeQlOverlayVersion, logger) { try { - let qlpackPath = path17.join(packDir, "qlpack.yml"); - if (!fs15.existsSync(qlpackPath)) { - qlpackPath = path17.join(packDir, "codeql-pack.yml"); + let qlpackPath = path18.join(packDir, "qlpack.yml"); + if (!fs16.existsSync(qlpackPath)) { + qlpackPath = path18.join(packDir, "codeql-pack.yml"); } const qlpackContents = load( - fs15.readFileSync(qlpackPath, "utf8") + fs16.readFileSync(qlpackPath, "utf8") ); if (!qlpackContents.buildMetadata) { return true; } - const packInfoPath = path17.join(packDir, ".packinfo"); - if (!fs15.existsSync(packInfoPath)) { + const packInfoPath = path18.join(packDir, ".packinfo"); + if (!fs16.existsSync(packInfoPath)) { logger.warning( `The query pack at ${packDir} does not have a .packinfo file, so it cannot support overlay analysis. Recompiling the query pack with the latest CodeQL CLI should solve this problem.` ); return false; } const packInfoFileContents = JSON.parse( - fs15.readFileSync(packInfoPath, "utf8") + fs16.readFileSync(packInfoPath, "utf8") ); const packOverlayVersion = packInfoFileContents.overlayVersion; if (typeof packOverlayVersion !== "number") { @@ -91767,7 +91954,7 @@ function checkPackForOverlayCompatibility(packDir, codeQlOverlayVersion, logger) } async function checkInstallPython311(languages, codeql) { if (languages.includes("python" /* python */) && process.platform === "win32" && !(await codeql.getVersion()).features?.supportsPython312) { - const script = path17.resolve( + const script = path18.resolve( __dirname, "../python-setup", "check_python12.ps1" @@ -91777,8 +91964,8 @@ async function checkInstallPython311(languages, codeql) { ]).exec(); } } -function cleanupDatabaseClusterDirectory(config, logger, options = {}, rmSync2 = fs15.rmSync) { - if (fs15.existsSync(config.dbLocation) && (fs15.statSync(config.dbLocation).isFile() || fs15.readdirSync(config.dbLocation).length > 0)) { +function cleanupDatabaseClusterDirectory(config, logger, options = {}, rmSync2 = fs16.rmSync) { + if (fs16.existsSync(config.dbLocation) && (fs16.statSync(config.dbLocation).isFile() || fs16.readdirSync(config.dbLocation).length > 0)) { if (!options.disableExistingDirectoryWarning) { logger.warning( `The database cluster directory ${config.dbLocation} must be empty. Attempting to clean it up.` @@ -91812,7 +91999,7 @@ function cleanupDatabaseClusterDirectory(config, logger, options = {}, rmSync2 = // src/status-report.ts var os4 = __toESM(require("os")); -var core11 = __toESM(require_core()); +var core13 = __toESM(require_core()); function isFirstPartyAnalysis(actionName) { if (actionName !== "upload-sarif" /* UploadSarif */) { return true; @@ -91828,12 +92015,12 @@ function getActionsStatus(error2, otherFailureCause) { } function setJobStatusIfUnsuccessful(actionStatus) { if (actionStatus === "user-error") { - core11.exportVariable( + core13.exportVariable( "CODEQL_ACTION_JOB_STATUS" /* JOB_STATUS */, process.env["CODEQL_ACTION_JOB_STATUS" /* JOB_STATUS */] ?? "JOB_STATUS_CONFIGURATION_ERROR" /* ConfigErrorStatus */ ); } else if (actionStatus === "failure" || actionStatus === "aborted") { - core11.exportVariable( + core13.exportVariable( "CODEQL_ACTION_JOB_STATUS" /* JOB_STATUS */, process.env["CODEQL_ACTION_JOB_STATUS" /* JOB_STATUS */] ?? "JOB_STATUS_FAILURE" /* FailureStatus */ ); @@ -91852,14 +92039,14 @@ async function createStatusReportBase(actionName, status, actionStartedAt, confi let workflowStartedAt = process.env["CODEQL_WORKFLOW_STARTED_AT" /* WORKFLOW_STARTED_AT */]; if (workflowStartedAt === void 0) { workflowStartedAt = actionStartedAt.toISOString(); - core11.exportVariable("CODEQL_WORKFLOW_STARTED_AT" /* WORKFLOW_STARTED_AT */, workflowStartedAt); + core13.exportVariable("CODEQL_WORKFLOW_STARTED_AT" /* WORKFLOW_STARTED_AT */, workflowStartedAt); } const runnerOs = getRequiredEnvParam("RUNNER_OS"); const codeQlCliVersion = getCachedCodeQlVersion(); const actionRef = process.env["GITHUB_ACTION_REF"] || ""; const testingEnvironment = getTestingEnvironment(); if (testingEnvironment) { - core11.exportVariable("CODEQL_ACTION_TESTING_ENVIRONMENT" /* TESTING_ENVIRONMENT */, testingEnvironment); + core13.exportVariable("CODEQL_ACTION_TESTING_ENVIRONMENT" /* TESTING_ENVIRONMENT */, testingEnvironment); } const isSteadyStateDefaultSetupRun = process.env["CODE_SCANNING_IS_STEADY_STATE_DEFAULT_SETUP"] === "true"; const statusReport = { @@ -91940,9 +92127,9 @@ var INCOMPATIBLE_MSG = "CodeQL Action version is incompatible with the API endpo async function sendStatusReport(statusReport) { setJobStatusIfUnsuccessful(statusReport.status); const statusReportJSON = JSON.stringify(statusReport); - core11.debug(`Sending status report: ${statusReportJSON}`); + core13.debug(`Sending status report: ${statusReportJSON}`); if (isInTestMode()) { - core11.debug("In test mode. Status reports are not uploaded."); + core13.debug("In test mode. Status reports are not uploaded."); return; } const nwo = getRepositoryNwo(); @@ -91962,28 +92149,28 @@ async function sendStatusReport(statusReport) { switch (httpError.status) { case 403: if (getWorkflowEventName() === "push" && process.env["GITHUB_ACTOR"] === "dependabot[bot]") { - core11.warning( + core13.warning( `Workflows triggered by Dependabot on the "push" event run with read-only access. Uploading CodeQL results requires write access. To use CodeQL with Dependabot, please ensure you are using the "pull_request" event for this workflow and avoid triggering on the "push" event for Dependabot branches. See ${"https://docs.github.com/en/code-security/code-scanning/creating-an-advanced-setup-for-code-scanning/customizing-your-advanced-setup-for-code-scanning#scanning-on-push" /* SCANNING_ON_PUSH */} for more information on how to configure these events.` ); } else { - core11.warning( + core13.warning( `This run of the CodeQL Action does not have permission to access the CodeQL Action API endpoints. This could be because the Action is running on a pull request from a fork. If not, please ensure the workflow has at least the 'security-events: read' permission. Details: ${httpError.message}` ); } return; case 404: - core11.warning(httpError.message); + core13.warning(httpError.message); return; case 422: if (getRequiredEnvParam("GITHUB_SERVER_URL") !== GITHUB_DOTCOM_URL) { - core11.debug(INCOMPATIBLE_MSG); + core13.debug(INCOMPATIBLE_MSG); } else { - core11.debug(OUT_OF_DATE_MSG); + core13.debug(OUT_OF_DATE_MSG); } return; } } - core11.warning( + core13.warning( `An unexpected error occurred when sending a status report: ${getErrorMessage( e )}` @@ -92037,178 +92224,6 @@ async function createInitWithConfigStatusReport(config, initStatusReport, config }; } -// src/workflow.ts -var fs16 = __toESM(require("fs")); -var path18 = __toESM(require("path")); -var import_zlib = __toESM(require("zlib")); -var core12 = __toESM(require_core()); -function toCodedErrors(errors) { - return Object.entries(errors).reduce( - (acc, [code, message]) => { - acc[code] = { message, code }; - return acc; - }, - {} - ); -} -var WorkflowErrors = toCodedErrors({ - MissingPushHook: `Please specify an on.push hook to analyze and see code scanning alerts from the default branch on the Security tab.`, - CheckoutWrongHead: `git checkout HEAD^2 is no longer necessary. Please remove this step as Code Scanning recommends analyzing the merge commit for best results.`, - InconsistentActionVersion: `Not all workflow steps that use \`github/codeql-action\` actions use the same version. Please ensure that all such steps use the same version to avoid compatibility issues.` -}); -async function groupLanguagesByExtractor(languages, codeql) { - const resolveResult = await codeql.betterResolveLanguages(); - if (!resolveResult.aliases) { - return void 0; - } - const aliases = resolveResult.aliases; - const languagesByExtractor = {}; - for (const language of languages) { - const extractorName = aliases[language] || language; - if (!languagesByExtractor[extractorName]) { - languagesByExtractor[extractorName] = []; - } - languagesByExtractor[extractorName].push(language); - } - return languagesByExtractor; -} -async function getWorkflowErrors(doc, codeql) { - const errors = []; - const jobName = process.env.GITHUB_JOB; - if (jobName) { - const job = doc?.jobs?.[jobName]; - if (job?.strategy?.matrix?.language) { - const matrixLanguages = job.strategy.matrix.language; - if (Array.isArray(matrixLanguages)) { - const matrixLanguagesByExtractor = await groupLanguagesByExtractor( - matrixLanguages, - codeql - ); - if (matrixLanguagesByExtractor !== void 0) { - for (const [extractor, languages] of Object.entries( - matrixLanguagesByExtractor - )) { - if (languages.length > 1) { - errors.push({ - message: `CodeQL language '${extractor}' is referenced by more than one entry in the 'language' matrix parameter for job '${jobName}'. This may result in duplicate alerts. Please edit the 'language' matrix parameter to keep only one of the following: ${languages.map((language) => `'${language}'`).join(", ")}.`, - code: "DuplicateLanguageInMatrix" - }); - } - } - } - } - } - const steps = job?.steps; - if (Array.isArray(steps)) { - for (const step of steps) { - if (step?.run === "git checkout HEAD^2") { - errors.push(WorkflowErrors.CheckoutWrongHead); - break; - } - } - } - } - const codeqlStepRefs = []; - for (const job of Object.values(doc?.jobs || {})) { - if (Array.isArray(job.steps)) { - for (const step of job.steps) { - if (step.uses?.startsWith("github/codeql-action/")) { - const parts = step.uses.split("@"); - if (parts.length >= 2) { - codeqlStepRefs.push(parts[parts.length - 1]); - } - } - } - } - } - if (codeqlStepRefs.length > 0 && !codeqlStepRefs.every((ref) => ref === codeqlStepRefs[0])) { - errors.push(WorkflowErrors.InconsistentActionVersion); - } - const hasPushTrigger = hasWorkflowTrigger("push", doc); - const hasPullRequestTrigger = hasWorkflowTrigger("pull_request", doc); - const hasWorkflowCallTrigger = hasWorkflowTrigger("workflow_call", doc); - if (hasPullRequestTrigger && !hasPushTrigger && !hasWorkflowCallTrigger) { - errors.push(WorkflowErrors.MissingPushHook); - } - return errors; -} -function hasWorkflowTrigger(triggerName, doc) { - if (!doc.on) { - return false; - } - if (typeof doc.on === "string") { - return doc.on === triggerName; - } - if (Array.isArray(doc.on)) { - return doc.on.includes(triggerName); - } - return Object.prototype.hasOwnProperty.call(doc.on, triggerName); -} -async function validateWorkflow(codeql, logger) { - let workflow; - try { - workflow = await getWorkflow(logger); - } catch (e) { - return `error: getWorkflow() failed: ${String(e)}`; - } - let workflowErrors; - try { - workflowErrors = await getWorkflowErrors(workflow, codeql); - } catch (e) { - return `error: getWorkflowErrors() failed: ${String(e)}`; - } - if (workflowErrors.length > 0) { - let message; - try { - message = formatWorkflowErrors(workflowErrors); - } catch (e) { - return `error: formatWorkflowErrors() failed: ${String(e)}`; - } - core12.warning(message); - } - return formatWorkflowCause(workflowErrors); -} -function formatWorkflowErrors(errors) { - const issuesWere = errors.length === 1 ? "issue was" : "issues were"; - const errorsList = errors.map((e) => e.message).join(" "); - return `${errors.length} ${issuesWere} detected with this workflow: ${errorsList}`; -} -function formatWorkflowCause(errors) { - if (errors.length === 0) { - return void 0; - } - return errors.map((e) => e.code).join(","); -} -async function getWorkflow(logger) { - const maybeWorkflow = process.env["CODE_SCANNING_WORKFLOW_FILE"]; - if (maybeWorkflow) { - logger.debug( - "Using the workflow specified by the CODE_SCANNING_WORKFLOW_FILE environment variable." - ); - return load( - import_zlib.default.gunzipSync(Buffer.from(maybeWorkflow, "base64")).toString() - ); - } - const workflowPath = await getWorkflowAbsolutePath(logger); - return load(fs16.readFileSync(workflowPath, "utf-8")); -} -async function getWorkflowAbsolutePath(logger) { - const relativePath = await getWorkflowRelativePath(); - const absolutePath = path18.join( - getRequiredEnvParam("GITHUB_WORKSPACE"), - relativePath - ); - if (fs16.existsSync(absolutePath)) { - logger.debug( - `Derived the following absolute path for the currently executing workflow: ${absolutePath}.` - ); - return absolutePath; - } - throw new Error( - `Expected to find a code scanning workflow file at ${absolutePath}, but no such file existed. This can happen if the currently running workflow checks out a branch that doesn't contain the corresponding workflow file.` - ); -} - // src/init-action.ts async function sendStartingStatusReport(startedAt, config, logger) { const statusReportBase = await createStatusReportBase( @@ -92305,8 +92320,8 @@ async function run() { const repositoryProperties = enableRepoProps ? await loadPropertiesFromApi(gitHubVersion, logger, repositoryNwo) : {}; const jobRunUuid = v4_default(); logger.info(`Job run UUID is ${jobRunUuid}.`); - core13.exportVariable("JOB_RUN_UUID" /* JOB_RUN_UUID */, jobRunUuid); - core13.exportVariable("CODEQL_ACTION_INIT_HAS_RUN" /* INIT_ACTION_HAS_RUN */, "true"); + core14.exportVariable("JOB_RUN_UUID" /* JOB_RUN_UUID */, jobRunUuid); + core14.exportVariable("CODEQL_ACTION_INIT_HAS_RUN" /* INIT_ACTION_HAS_RUN */, "true"); const configFile = getOptionalInput("config-file"); const sourceRoot = path19.resolve( getRequiredEnvParam("GITHUB_WORKSPACE"), @@ -92345,18 +92360,7 @@ async function run() { toolsVersion = initCodeQLResult.toolsVersion; toolsSource = initCodeQLResult.toolsSource; zstdAvailability = initCodeQLResult.zstdAvailability; - if (process.env["CODEQL_ACTION_SKIP_WORKFLOW_VALIDATION" /* SKIP_WORKFLOW_VALIDATION */] !== "true") { - core13.startGroup("Validating workflow"); - const validateWorkflowResult = await validateWorkflow(codeql, logger); - if (validateWorkflowResult === void 0) { - logger.info("Detected no issues with the code scanning workflow."); - } else { - logger.warning( - `Unable to validate code scanning workflow: ${validateWorkflowResult}` - ); - } - core13.endGroup(); - } + await checkWorkflow(logger, codeql); if ( // Only enable the experimental features env variable for Rust analysis if the user has explicitly // requested rust - don't enable it via language autodetection. @@ -92371,7 +92375,7 @@ async function run() { ); } if (semver8.lt(actualVer, publicPreview)) { - core13.exportVariable("CODEQL_ENABLE_EXPERIMENTAL_FEATURES" /* EXPERIMENTAL_FEATURES */, "true"); + core14.exportVariable("CODEQL_ENABLE_EXPERIMENTAL_FEATURES" /* EXPERIMENTAL_FEATURES */, "true"); logger.info("Experimental Rust analysis enabled"); } } @@ -92391,7 +92395,7 @@ async function run() { // - The `init` Action is passed `debug: true`. // - Actions step debugging is enabled (e.g. by [enabling debug logging for a rerun](https://docs.github.com/en/actions/managing-workflow-runs/re-running-workflows-and-jobs#re-running-all-the-jobs-in-a-workflow), // or by setting the `ACTIONS_STEP_DEBUG` secret to `true`). - debugMode: getOptionalInput("debug") === "true" || core13.isDebug(), + debugMode: getOptionalInput("debug") === "true" || core14.isDebug(), debugArtifactName: getOptionalInput("debug-artifact-name") || DEFAULT_DEBUG_ARTIFACT_NAME, debugDatabaseName: getOptionalInput("debug-database-name") || DEFAULT_DEBUG_DATABASE_NAME, repository: repositoryNwo, @@ -92408,7 +92412,7 @@ async function run() { await checkInstallPython311(config.languages, codeql); } catch (unwrappedError) { const error2 = wrapError(unwrappedError); - core13.setFailed(error2.message); + core14.setFailed(error2.message); const statusReportBase = await createStatusReportBase( "init" /* Init */, error2 instanceof ConfigurationError ? "user-error" : "aborted", @@ -92468,8 +92472,8 @@ async function run() { } const goFlags = process.env["GOFLAGS"]; if (goFlags) { - core13.exportVariable("GOFLAGS", goFlags); - core13.warning( + core14.exportVariable("GOFLAGS", goFlags); + core14.warning( "Passing the GOFLAGS env parameter to the init action is deprecated. Please move this to the analyze action." ); } @@ -92493,7 +92497,7 @@ async function run() { "bin" ); fs17.mkdirSync(tempBinPath, { recursive: true }); - core13.addPath(tempBinPath); + core14.addPath(tempBinPath); const goWrapperPath = path19.resolve(tempBinPath, "go"); fs17.writeFileSync( goWrapperPath, @@ -92502,14 +92506,14 @@ async function run() { exec ${goBinaryPath} "$@"` ); fs17.chmodSync(goWrapperPath, "755"); - core13.exportVariable("CODEQL_ACTION_GO_BINARY" /* GO_BINARY_LOCATION */, goWrapperPath); + core14.exportVariable("CODEQL_ACTION_GO_BINARY" /* GO_BINARY_LOCATION */, goWrapperPath); } catch (e) { logger.warning( `Analyzing Go on Linux, but failed to install wrapper script. Tracing custom builds may fail: ${e}` ); } } else { - core13.exportVariable("CODEQL_ACTION_GO_BINARY" /* GO_BINARY_LOCATION */, goBinaryPath); + core14.exportVariable("CODEQL_ACTION_GO_BINARY" /* GO_BINARY_LOCATION */, goBinaryPath); } } catch (e) { logger.warning( @@ -92536,20 +92540,20 @@ exec ${goBinaryPath} "$@"` } } } - core13.exportVariable( + core14.exportVariable( "CODEQL_RAM", process.env["CODEQL_RAM"] || getMemoryFlagValue(getOptionalInput("ram"), logger).toString() ); - core13.exportVariable( + core14.exportVariable( "CODEQL_THREADS", process.env["CODEQL_THREADS"] || getThreadsFlagValue(getOptionalInput("threads"), logger).toString() ); if (await features.getValue("disable_kotlin_analysis_enabled" /* DisableKotlinAnalysisEnabled */)) { - core13.exportVariable("CODEQL_EXTRACTOR_JAVA_AGENT_DISABLE_KOTLIN", "true"); + core14.exportVariable("CODEQL_EXTRACTOR_JAVA_AGENT_DISABLE_KOTLIN", "true"); } const kotlinLimitVar = "CODEQL_EXTRACTOR_KOTLIN_OVERRIDE_MAXIMUM_VERSION_LIMIT"; if (await codeQlVersionAtLeast(codeql, "2.20.3") && !await codeQlVersionAtLeast(codeql, "2.20.4")) { - core13.exportVariable(kotlinLimitVar, "2.1.20"); + core14.exportVariable(kotlinLimitVar, "2.1.20"); } if (config.languages.includes("cpp" /* cpp */)) { const envVar = "CODEQL_EXTRACTOR_CPP_TRAP_CACHING"; @@ -92559,10 +92563,10 @@ exec ${goBinaryPath} "$@"` ); } else if (getTrapCachingEnabled() && await codeQlVersionAtLeast(codeql, "2.17.5")) { logger.info("Enabling CodeQL C++ TRAP caching support"); - core13.exportVariable(envVar, "true"); + core14.exportVariable(envVar, "true"); } else { logger.info("Disabling CodeQL C++ TRAP caching support"); - core13.exportVariable(envVar, "false"); + core14.exportVariable(envVar, "false"); } } const minimizeJavaJars = await features.getValue( @@ -92578,7 +92582,7 @@ exec ${goBinaryPath} "$@"` } if (await codeQlVersionAtLeast(codeql, "2.17.1")) { } else { - core13.exportVariable( + core14.exportVariable( "CODEQL_EXTRACTOR_PYTHON_DISABLE_LIBRARY_EXTRACTION", "true" ); @@ -92604,7 +92608,7 @@ exec ${goBinaryPath} "$@"` "python_default_is_to_not_extract_stdlib" /* PythonDefaultIsToNotExtractStdlib */, codeql )) { - core13.exportVariable("CODEQL_EXTRACTOR_PYTHON_EXTRACT_STDLIB", "true"); + core14.exportVariable("CODEQL_EXTRACTOR_PYTHON_EXTRACT_STDLIB", "true"); } } if (process.env["CODEQL_EXTRACTOR_JAVA_OPTION_MINIMIZE_DEPENDENCY_JARS" /* JAVA_EXTRACTOR_MINIMIZE_DEPENDENCY_JARS */]) { @@ -92612,7 +92616,7 @@ exec ${goBinaryPath} "$@"` `${"CODEQL_EXTRACTOR_JAVA_OPTION_MINIMIZE_DEPENDENCY_JARS" /* JAVA_EXTRACTOR_MINIMIZE_DEPENDENCY_JARS */} is already set to '${process.env["CODEQL_EXTRACTOR_JAVA_OPTION_MINIMIZE_DEPENDENCY_JARS" /* JAVA_EXTRACTOR_MINIMIZE_DEPENDENCY_JARS */]}', so the Action will not override it.` ); } else if (minimizeJavaJars && config.dependencyCachingEnabled && config.buildMode === "none" /* None */ && config.languages.includes("java" /* java */)) { - core13.exportVariable( + core14.exportVariable( "CODEQL_EXTRACTOR_JAVA_OPTION_MINIMIZE_DEPENDENCY_JARS" /* JAVA_EXTRACTOR_MINIMIZE_DEPENDENCY_JARS */, "true" ); @@ -92656,15 +92660,15 @@ exec ${goBinaryPath} "$@"` const tracerConfig = await getCombinedTracerConfig(codeql, config); if (tracerConfig !== void 0) { for (const [key, value] of Object.entries(tracerConfig.env)) { - core13.exportVariable(key, value); + core14.exportVariable(key, value); } } flushDiagnostics(config); - core13.setOutput("codeql-path", config.codeQLCmd); - core13.setOutput("codeql-version", (await codeql.getVersion()).version); + core14.setOutput("codeql-path", config.codeQLCmd); + core14.setOutput("codeql-version", (await codeql.getVersion()).version); } catch (unwrappedError) { const error2 = wrapError(unwrappedError); - core13.setFailed(error2.message); + core14.setFailed(error2.message); await sendCompletedStatusReport( startedAt, config, @@ -92727,7 +92731,7 @@ async function runWrapper() { try { await run(); } catch (error2) { - core13.setFailed(`init action failed: ${getErrorMessage(error2)}`); + core14.setFailed(`init action failed: ${getErrorMessage(error2)}`); } await checkForTimeout(); } diff --git a/lib/setup-codeql-action.js b/lib/setup-codeql-action.js index 37e3f6121a..7fefd35163 100644 --- a/lib/setup-codeql-action.js +++ b/lib/setup-codeql-action.js @@ -19743,10 +19743,10 @@ Support boolean input list: \`true | True | TRUE | false | False | FALSE\``); (0, command_1.issueCommand)("error", (0, utils_1.toCommandProperties)(properties), message instanceof Error ? message.toString() : message); } exports2.error = error2; - function warning6(message, properties = {}) { + function warning7(message, properties = {}) { (0, command_1.issueCommand)("warning", (0, utils_1.toCommandProperties)(properties), message instanceof Error ? message.toString() : message); } - exports2.warning = warning6; + exports2.warning = warning7; function notice(message, properties = {}) { (0, command_1.issueCommand)("notice", (0, utils_1.toCommandProperties)(properties), message instanceof Error ? message.toString() : message); } @@ -19755,22 +19755,22 @@ Support boolean input list: \`true | True | TRUE | false | False | FALSE\``); process.stdout.write(message + os3.EOL); } exports2.info = info4; - function startGroup3(name) { + function startGroup4(name) { (0, command_1.issue)("group", name); } - exports2.startGroup = startGroup3; - function endGroup3() { + exports2.startGroup = startGroup4; + function endGroup4() { (0, command_1.issue)("endgroup"); } - exports2.endGroup = endGroup3; + exports2.endGroup = endGroup4; function group(name, fn) { return __awaiter4(this, void 0, void 0, function* () { - startGroup3(name); + startGroup4(name); let result; try { result = yield fn(); } finally { - endGroup3(); + endGroup4(); } return result; }); @@ -34015,7 +34015,7 @@ var require_internal_glob_options_helper = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.getOptions = void 0; - var core13 = __importStar4(require_core()); + var core15 = __importStar4(require_core()); function getOptions(copy) { const result = { followSymbolicLinks: true, @@ -34025,15 +34025,15 @@ var require_internal_glob_options_helper = __commonJS({ if (copy) { if (typeof copy.followSymbolicLinks === "boolean") { result.followSymbolicLinks = copy.followSymbolicLinks; - core13.debug(`followSymbolicLinks '${result.followSymbolicLinks}'`); + core15.debug(`followSymbolicLinks '${result.followSymbolicLinks}'`); } if (typeof copy.implicitDescendants === "boolean") { result.implicitDescendants = copy.implicitDescendants; - core13.debug(`implicitDescendants '${result.implicitDescendants}'`); + core15.debug(`implicitDescendants '${result.implicitDescendants}'`); } if (typeof copy.omitBrokenSymbolicLinks === "boolean") { result.omitBrokenSymbolicLinks = copy.omitBrokenSymbolicLinks; - core13.debug(`omitBrokenSymbolicLinks '${result.omitBrokenSymbolicLinks}'`); + core15.debug(`omitBrokenSymbolicLinks '${result.omitBrokenSymbolicLinks}'`); } } return result; @@ -35487,7 +35487,7 @@ var require_internal_globber = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.DefaultGlobber = void 0; - var core13 = __importStar4(require_core()); + var core15 = __importStar4(require_core()); var fs11 = __importStar4(require("fs")); var globOptionsHelper = __importStar4(require_internal_glob_options_helper()); var path12 = __importStar4(require("path")); @@ -35538,7 +35538,7 @@ var require_internal_globber = __commonJS({ } const stack = []; for (const searchPath of patternHelper.getSearchPaths(patterns)) { - core13.debug(`Search path '${searchPath}'`); + core15.debug(`Search path '${searchPath}'`); try { yield __await4(fs11.promises.lstat(searchPath)); } catch (err) { @@ -35610,7 +35610,7 @@ var require_internal_globber = __commonJS({ } catch (err) { if (err.code === "ENOENT") { if (options.omitBrokenSymbolicLinks) { - core13.debug(`Broken symlink '${item.path}'`); + core15.debug(`Broken symlink '${item.path}'`); return void 0; } throw new Error(`No information found for the path '${item.path}'. This may indicate a broken symbolic link.`); @@ -35626,7 +35626,7 @@ var require_internal_globber = __commonJS({ traversalChain.pop(); } if (traversalChain.some((x) => x === realPath)) { - core13.debug(`Symlink cycle detected for path '${item.path}' and realpath '${realPath}'`); + core15.debug(`Symlink cycle detected for path '${item.path}' and realpath '${realPath}'`); return void 0; } traversalChain.push(realPath); @@ -36950,7 +36950,7 @@ var require_cacheUtils = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.getRuntimeToken = exports2.getCacheVersion = exports2.assertDefined = exports2.getGnuTarPathOnWindows = exports2.getCacheFileName = exports2.getCompressionMethod = exports2.unlinkFile = exports2.resolvePaths = exports2.getArchiveFileSizeInBytes = exports2.createTempDirectory = void 0; - var core13 = __importStar4(require_core()); + var core15 = __importStar4(require_core()); var exec2 = __importStar4(require_exec()); var glob = __importStar4(require_glob()); var io6 = __importStar4(require_io()); @@ -37003,7 +37003,7 @@ var require_cacheUtils = __commonJS({ _e = false; const file = _c; const relativeFile = path12.relative(workspace, file).replace(new RegExp(`\\${path12.sep}`, "g"), "/"); - core13.debug(`Matched: ${relativeFile}`); + core15.debug(`Matched: ${relativeFile}`); if (relativeFile === "") { paths.push("."); } else { @@ -37033,7 +37033,7 @@ var require_cacheUtils = __commonJS({ return __awaiter4(this, void 0, void 0, function* () { let versionOutput = ""; additionalArgs.push("--version"); - core13.debug(`Checking ${app} ${additionalArgs.join(" ")}`); + core15.debug(`Checking ${app} ${additionalArgs.join(" ")}`); try { yield exec2.exec(`${app}`, additionalArgs, { ignoreReturnCode: true, @@ -37044,10 +37044,10 @@ var require_cacheUtils = __commonJS({ } }); } catch (err) { - core13.debug(err.message); + core15.debug(err.message); } versionOutput = versionOutput.trim(); - core13.debug(versionOutput); + core15.debug(versionOutput); return versionOutput; }); } @@ -37055,7 +37055,7 @@ var require_cacheUtils = __commonJS({ return __awaiter4(this, void 0, void 0, function* () { const versionOutput = yield getVersion("zstd", ["--quiet"]); const version = semver8.clean(versionOutput); - core13.debug(`zstd version: ${version}`); + core15.debug(`zstd version: ${version}`); if (versionOutput === "") { return constants_1.CompressionMethod.Gzip; } else { @@ -72367,7 +72367,7 @@ var require_uploadUtils = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.uploadCacheArchiveSDK = exports2.UploadProgress = void 0; - var core13 = __importStar4(require_core()); + var core15 = __importStar4(require_core()); var storage_blob_1 = require_dist7(); var errors_1 = require_errors2(); var UploadProgress = class { @@ -72409,7 +72409,7 @@ var require_uploadUtils = __commonJS({ const percentage = (100 * (transferredBytes / this.contentLength)).toFixed(1); const elapsedTime = Date.now() - this.startTime; const uploadSpeed = (transferredBytes / (1024 * 1024) / (elapsedTime / 1e3)).toFixed(1); - core13.info(`Sent ${transferredBytes} of ${this.contentLength} (${percentage}%), ${uploadSpeed} MBs/sec`); + core15.info(`Sent ${transferredBytes} of ${this.contentLength} (${percentage}%), ${uploadSpeed} MBs/sec`); if (this.isDone()) { this.displayedComplete = true; } @@ -72464,14 +72464,14 @@ var require_uploadUtils = __commonJS({ }; try { uploadProgress.startDisplayTimer(); - core13.debug(`BlobClient: ${blobClient.name}:${blobClient.accountName}:${blobClient.containerName}`); + core15.debug(`BlobClient: ${blobClient.name}:${blobClient.accountName}:${blobClient.containerName}`); const response = yield blockBlobClient.uploadFile(archivePath, uploadOptions); if (response._response.status >= 400) { throw new errors_1.InvalidResponseError(`uploadCacheArchiveSDK: upload failed with status code ${response._response.status}`); } return response; } catch (error2) { - core13.warning(`uploadCacheArchiveSDK: internal error uploading cache archive: ${error2.message}`); + core15.warning(`uploadCacheArchiveSDK: internal error uploading cache archive: ${error2.message}`); throw error2; } finally { uploadProgress.stopDisplayTimer(); @@ -72542,7 +72542,7 @@ var require_requestUtils = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.retryHttpClientResponse = exports2.retryTypedResponse = exports2.retry = exports2.isRetryableStatusCode = exports2.isServerErrorStatusCode = exports2.isSuccessStatusCode = void 0; - var core13 = __importStar4(require_core()); + var core15 = __importStar4(require_core()); var http_client_1 = require_lib(); var constants_1 = require_constants10(); function isSuccessStatusCode(statusCode) { @@ -72603,9 +72603,9 @@ var require_requestUtils = __commonJS({ isRetryable = isRetryableStatusCode(statusCode); errorMessage = `Cache service responded with ${statusCode}`; } - core13.debug(`${name} - Attempt ${attempt} of ${maxAttempts} failed with error: ${errorMessage}`); + core15.debug(`${name} - Attempt ${attempt} of ${maxAttempts} failed with error: ${errorMessage}`); if (!isRetryable) { - core13.debug(`${name} - Error is not retryable`); + core15.debug(`${name} - Error is not retryable`); break; } yield sleep(delay2); @@ -72710,7 +72710,7 @@ var require_downloadUtils = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.downloadCacheStorageSDK = exports2.downloadCacheHttpClientConcurrent = exports2.downloadCacheHttpClient = exports2.DownloadProgress = void 0; - var core13 = __importStar4(require_core()); + var core15 = __importStar4(require_core()); var http_client_1 = require_lib(); var storage_blob_1 = require_dist7(); var buffer = __importStar4(require("buffer")); @@ -72748,7 +72748,7 @@ var require_downloadUtils = __commonJS({ this.segmentIndex = this.segmentIndex + 1; this.segmentSize = segmentSize; this.receivedBytes = 0; - core13.debug(`Downloading segment at offset ${this.segmentOffset} with length ${this.segmentSize}...`); + core15.debug(`Downloading segment at offset ${this.segmentOffset} with length ${this.segmentSize}...`); } /** * Sets the number of bytes received for the current segment. @@ -72782,7 +72782,7 @@ var require_downloadUtils = __commonJS({ const percentage = (100 * (transferredBytes / this.contentLength)).toFixed(1); const elapsedTime = Date.now() - this.startTime; const downloadSpeed = (transferredBytes / (1024 * 1024) / (elapsedTime / 1e3)).toFixed(1); - core13.info(`Received ${transferredBytes} of ${this.contentLength} (${percentage}%), ${downloadSpeed} MBs/sec`); + core15.info(`Received ${transferredBytes} of ${this.contentLength} (${percentage}%), ${downloadSpeed} MBs/sec`); if (this.isDone()) { this.displayedComplete = true; } @@ -72832,7 +72832,7 @@ var require_downloadUtils = __commonJS({ })); downloadResponse.message.socket.setTimeout(constants_1.SocketTimeout, () => { downloadResponse.message.destroy(); - core13.debug(`Aborting download, socket timed out after ${constants_1.SocketTimeout} ms`); + core15.debug(`Aborting download, socket timed out after ${constants_1.SocketTimeout} ms`); }); yield pipeResponseToStream(downloadResponse, writeStream); const contentLengthHeader = downloadResponse.message.headers["content-length"]; @@ -72843,7 +72843,7 @@ var require_downloadUtils = __commonJS({ throw new Error(`Incomplete download. Expected file size: ${expectedLength}, actual file size: ${actualLength}`); } } else { - core13.debug("Unable to validate download, no Content-Length header"); + core15.debug("Unable to validate download, no Content-Length header"); } }); } @@ -72963,7 +72963,7 @@ var require_downloadUtils = __commonJS({ const properties = yield client.getProperties(); const contentLength = (_a = properties.contentLength) !== null && _a !== void 0 ? _a : -1; if (contentLength < 0) { - core13.debug("Unable to determine content length, downloading file with http-client..."); + core15.debug("Unable to determine content length, downloading file with http-client..."); yield downloadCacheHttpClient(archiveLocation, archivePath); } else { const maxSegmentSize = Math.min(134217728, buffer.constants.MAX_LENGTH); @@ -73043,7 +73043,7 @@ var require_options = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.getDownloadOptions = exports2.getUploadOptions = void 0; - var core13 = __importStar4(require_core()); + var core15 = __importStar4(require_core()); function getUploadOptions(copy) { const result = { useAzureSdk: false, @@ -73063,9 +73063,9 @@ var require_options = __commonJS({ } result.uploadConcurrency = !isNaN(Number(process.env["CACHE_UPLOAD_CONCURRENCY"])) ? Math.min(32, Number(process.env["CACHE_UPLOAD_CONCURRENCY"])) : result.uploadConcurrency; result.uploadChunkSize = !isNaN(Number(process.env["CACHE_UPLOAD_CHUNK_SIZE"])) ? Math.min(128 * 1024 * 1024, Number(process.env["CACHE_UPLOAD_CHUNK_SIZE"]) * 1024 * 1024) : result.uploadChunkSize; - core13.debug(`Use Azure SDK: ${result.useAzureSdk}`); - core13.debug(`Upload concurrency: ${result.uploadConcurrency}`); - core13.debug(`Upload chunk size: ${result.uploadChunkSize}`); + core15.debug(`Use Azure SDK: ${result.useAzureSdk}`); + core15.debug(`Upload concurrency: ${result.uploadConcurrency}`); + core15.debug(`Upload chunk size: ${result.uploadChunkSize}`); return result; } exports2.getUploadOptions = getUploadOptions; @@ -73102,12 +73102,12 @@ var require_options = __commonJS({ if (segmentDownloadTimeoutMins && !isNaN(Number(segmentDownloadTimeoutMins)) && isFinite(Number(segmentDownloadTimeoutMins))) { result.segmentTimeoutInMs = Number(segmentDownloadTimeoutMins) * 60 * 1e3; } - core13.debug(`Use Azure SDK: ${result.useAzureSdk}`); - core13.debug(`Download concurrency: ${result.downloadConcurrency}`); - core13.debug(`Request timeout (ms): ${result.timeoutInMs}`); - core13.debug(`Cache segment download timeout mins env var: ${process.env["SEGMENT_DOWNLOAD_TIMEOUT_MINS"]}`); - core13.debug(`Segment download timeout (ms): ${result.segmentTimeoutInMs}`); - core13.debug(`Lookup only: ${result.lookupOnly}`); + core15.debug(`Use Azure SDK: ${result.useAzureSdk}`); + core15.debug(`Download concurrency: ${result.downloadConcurrency}`); + core15.debug(`Request timeout (ms): ${result.timeoutInMs}`); + core15.debug(`Cache segment download timeout mins env var: ${process.env["SEGMENT_DOWNLOAD_TIMEOUT_MINS"]}`); + core15.debug(`Segment download timeout (ms): ${result.segmentTimeoutInMs}`); + core15.debug(`Lookup only: ${result.lookupOnly}`); return result; } exports2.getDownloadOptions = getDownloadOptions; @@ -73287,7 +73287,7 @@ var require_cacheHttpClient = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.saveCache = exports2.reserveCache = exports2.downloadCache = exports2.getCacheEntry = void 0; - var core13 = __importStar4(require_core()); + var core15 = __importStar4(require_core()); var http_client_1 = require_lib(); var auth_1 = require_auth(); var fs11 = __importStar4(require("fs")); @@ -73305,7 +73305,7 @@ var require_cacheHttpClient = __commonJS({ throw new Error("Cache Service Url not found, unable to restore cache."); } const url = `${baseUrl}_apis/artifactcache/${resource}`; - core13.debug(`Resource Url: ${url}`); + core15.debug(`Resource Url: ${url}`); return url; } function createAcceptHeader(type2, apiVersion) { @@ -73333,7 +73333,7 @@ var require_cacheHttpClient = __commonJS({ return httpClient.getJson(getCacheApiUrl(resource)); })); if (response.statusCode === 204) { - if (core13.isDebug()) { + if (core15.isDebug()) { yield printCachesListForDiagnostics(keys[0], httpClient, version); } return null; @@ -73346,9 +73346,9 @@ var require_cacheHttpClient = __commonJS({ if (!cacheDownloadUrl) { throw new Error("Cache not found."); } - core13.setSecret(cacheDownloadUrl); - core13.debug(`Cache Result:`); - core13.debug(JSON.stringify(cacheResult)); + core15.setSecret(cacheDownloadUrl); + core15.debug(`Cache Result:`); + core15.debug(JSON.stringify(cacheResult)); return cacheResult; }); } @@ -73363,10 +73363,10 @@ var require_cacheHttpClient = __commonJS({ const cacheListResult = response.result; const totalCount = cacheListResult === null || cacheListResult === void 0 ? void 0 : cacheListResult.totalCount; if (totalCount && totalCount > 0) { - core13.debug(`No matching cache found for cache key '${key}', version '${version} and scope ${process.env["GITHUB_REF"]}. There exist one or more cache(s) with similar key but they have different version or scope. See more info on cache matching here: https://docs.github.com/en/actions/using-workflows/caching-dependencies-to-speed-up-workflows#matching-a-cache-key + core15.debug(`No matching cache found for cache key '${key}', version '${version} and scope ${process.env["GITHUB_REF"]}. There exist one or more cache(s) with similar key but they have different version or scope. See more info on cache matching here: https://docs.github.com/en/actions/using-workflows/caching-dependencies-to-speed-up-workflows#matching-a-cache-key Other caches with similar key:`); for (const cacheEntry of (cacheListResult === null || cacheListResult === void 0 ? void 0 : cacheListResult.artifactCaches) || []) { - core13.debug(`Cache Key: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.cacheKey}, Cache Version: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.cacheVersion}, Cache Scope: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.scope}, Cache Created: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.creationTime}`); + core15.debug(`Cache Key: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.cacheKey}, Cache Version: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.cacheVersion}, Cache Scope: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.scope}, Cache Created: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.creationTime}`); } } } @@ -73411,7 +73411,7 @@ Other caches with similar key:`); } function uploadChunk(httpClient, resourceUrl, openStream, start, end) { return __awaiter4(this, void 0, void 0, function* () { - core13.debug(`Uploading chunk of size ${end - start + 1} bytes at offset ${start} with content range: ${getContentRange(start, end)}`); + core15.debug(`Uploading chunk of size ${end - start + 1} bytes at offset ${start} with content range: ${getContentRange(start, end)}`); const additionalHeaders = { "Content-Type": "application/octet-stream", "Content-Range": getContentRange(start, end) @@ -73433,7 +73433,7 @@ Other caches with similar key:`); const concurrency = utils.assertDefined("uploadConcurrency", uploadOptions.uploadConcurrency); const maxChunkSize = utils.assertDefined("uploadChunkSize", uploadOptions.uploadChunkSize); const parallelUploads = [...new Array(concurrency).keys()]; - core13.debug("Awaiting all uploads"); + core15.debug("Awaiting all uploads"); let offset = 0; try { yield Promise.all(parallelUploads.map(() => __awaiter4(this, void 0, void 0, function* () { @@ -73476,16 +73476,16 @@ Other caches with similar key:`); yield (0, uploadUtils_1.uploadCacheArchiveSDK)(signedUploadURL, archivePath, options); } else { const httpClient = createHttpClient(); - core13.debug("Upload cache"); + core15.debug("Upload cache"); yield uploadFile(httpClient, cacheId, archivePath, options); - core13.debug("Commiting cache"); + core15.debug("Commiting cache"); const cacheSize = utils.getArchiveFileSizeInBytes(archivePath); - core13.info(`Cache Size: ~${Math.round(cacheSize / (1024 * 1024))} MB (${cacheSize} B)`); + core15.info(`Cache Size: ~${Math.round(cacheSize / (1024 * 1024))} MB (${cacheSize} B)`); const commitCacheResponse = yield commitCache(httpClient, cacheId, cacheSize); if (!(0, requestUtils_1.isSuccessStatusCode)(commitCacheResponse.statusCode)) { throw new Error(`Cache service responded with ${commitCacheResponse.statusCode} during commit cache.`); } - core13.info("Cache saved successfully"); + core15.info("Cache saved successfully"); } }); } @@ -78937,7 +78937,7 @@ var require_cache3 = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.saveCache = exports2.restoreCache = exports2.isFeatureAvailable = exports2.FinalizeCacheError = exports2.ReserveCacheError = exports2.ValidationError = void 0; - var core13 = __importStar4(require_core()); + var core15 = __importStar4(require_core()); var path12 = __importStar4(require("path")); var utils = __importStar4(require_cacheUtils()); var cacheHttpClient = __importStar4(require_cacheHttpClient()); @@ -78997,7 +78997,7 @@ var require_cache3 = __commonJS({ function restoreCache3(paths, primaryKey, restoreKeys, options, enableCrossOsArchive = false) { return __awaiter4(this, void 0, void 0, function* () { const cacheServiceVersion = (0, config_1.getCacheServiceVersion)(); - core13.debug(`Cache service version: ${cacheServiceVersion}`); + core15.debug(`Cache service version: ${cacheServiceVersion}`); checkPaths(paths); switch (cacheServiceVersion) { case "v2": @@ -79013,8 +79013,8 @@ var require_cache3 = __commonJS({ return __awaiter4(this, void 0, void 0, function* () { restoreKeys = restoreKeys || []; const keys = [primaryKey, ...restoreKeys]; - core13.debug("Resolved Keys:"); - core13.debug(JSON.stringify(keys)); + core15.debug("Resolved Keys:"); + core15.debug(JSON.stringify(keys)); if (keys.length > 10) { throw new ValidationError(`Key Validation Error: Keys are limited to a maximum of 10.`); } @@ -79032,19 +79032,19 @@ var require_cache3 = __commonJS({ return void 0; } if (options === null || options === void 0 ? void 0 : options.lookupOnly) { - core13.info("Lookup only - skipping download"); + core15.info("Lookup only - skipping download"); return cacheEntry.cacheKey; } archivePath = path12.join(yield utils.createTempDirectory(), utils.getCacheFileName(compressionMethod)); - core13.debug(`Archive Path: ${archivePath}`); + core15.debug(`Archive Path: ${archivePath}`); yield cacheHttpClient.downloadCache(cacheEntry.archiveLocation, archivePath, options); - if (core13.isDebug()) { + if (core15.isDebug()) { yield (0, tar_1.listTar)(archivePath, compressionMethod); } const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath); - core13.info(`Cache Size: ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B)`); + core15.info(`Cache Size: ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B)`); yield (0, tar_1.extractTar)(archivePath, compressionMethod); - core13.info("Cache restored successfully"); + core15.info("Cache restored successfully"); return cacheEntry.cacheKey; } catch (error2) { const typedError = error2; @@ -79052,16 +79052,16 @@ var require_cache3 = __commonJS({ throw error2; } else { if (typedError instanceof http_client_1.HttpClientError && typeof typedError.statusCode === "number" && typedError.statusCode >= 500) { - core13.error(`Failed to restore: ${error2.message}`); + core15.error(`Failed to restore: ${error2.message}`); } else { - core13.warning(`Failed to restore: ${error2.message}`); + core15.warning(`Failed to restore: ${error2.message}`); } } } finally { try { yield utils.unlinkFile(archivePath); } catch (error2) { - core13.debug(`Failed to delete archive: ${error2}`); + core15.debug(`Failed to delete archive: ${error2}`); } } return void 0; @@ -79072,8 +79072,8 @@ var require_cache3 = __commonJS({ options = Object.assign(Object.assign({}, options), { useAzureSdk: true }); restoreKeys = restoreKeys || []; const keys = [primaryKey, ...restoreKeys]; - core13.debug("Resolved Keys:"); - core13.debug(JSON.stringify(keys)); + core15.debug("Resolved Keys:"); + core15.debug(JSON.stringify(keys)); if (keys.length > 10) { throw new ValidationError(`Key Validation Error: Keys are limited to a maximum of 10.`); } @@ -79091,30 +79091,30 @@ var require_cache3 = __commonJS({ }; const response = yield twirpClient.GetCacheEntryDownloadURL(request); if (!response.ok) { - core13.debug(`Cache not found for version ${request.version} of keys: ${keys.join(", ")}`); + core15.debug(`Cache not found for version ${request.version} of keys: ${keys.join(", ")}`); return void 0; } const isRestoreKeyMatch = request.key !== response.matchedKey; if (isRestoreKeyMatch) { - core13.info(`Cache hit for restore-key: ${response.matchedKey}`); + core15.info(`Cache hit for restore-key: ${response.matchedKey}`); } else { - core13.info(`Cache hit for: ${response.matchedKey}`); + core15.info(`Cache hit for: ${response.matchedKey}`); } if (options === null || options === void 0 ? void 0 : options.lookupOnly) { - core13.info("Lookup only - skipping download"); + core15.info("Lookup only - skipping download"); return response.matchedKey; } archivePath = path12.join(yield utils.createTempDirectory(), utils.getCacheFileName(compressionMethod)); - core13.debug(`Archive path: ${archivePath}`); - core13.debug(`Starting download of archive to: ${archivePath}`); + core15.debug(`Archive path: ${archivePath}`); + core15.debug(`Starting download of archive to: ${archivePath}`); yield cacheHttpClient.downloadCache(response.signedDownloadUrl, archivePath, options); const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath); - core13.info(`Cache Size: ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B)`); - if (core13.isDebug()) { + core15.info(`Cache Size: ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B)`); + if (core15.isDebug()) { yield (0, tar_1.listTar)(archivePath, compressionMethod); } yield (0, tar_1.extractTar)(archivePath, compressionMethod); - core13.info("Cache restored successfully"); + core15.info("Cache restored successfully"); return response.matchedKey; } catch (error2) { const typedError = error2; @@ -79122,9 +79122,9 @@ var require_cache3 = __commonJS({ throw error2; } else { if (typedError instanceof http_client_1.HttpClientError && typeof typedError.statusCode === "number" && typedError.statusCode >= 500) { - core13.error(`Failed to restore: ${error2.message}`); + core15.error(`Failed to restore: ${error2.message}`); } else { - core13.warning(`Failed to restore: ${error2.message}`); + core15.warning(`Failed to restore: ${error2.message}`); } } } finally { @@ -79133,7 +79133,7 @@ var require_cache3 = __commonJS({ yield utils.unlinkFile(archivePath); } } catch (error2) { - core13.debug(`Failed to delete archive: ${error2}`); + core15.debug(`Failed to delete archive: ${error2}`); } } return void 0; @@ -79142,7 +79142,7 @@ var require_cache3 = __commonJS({ function saveCache3(paths, key, options, enableCrossOsArchive = false) { return __awaiter4(this, void 0, void 0, function* () { const cacheServiceVersion = (0, config_1.getCacheServiceVersion)(); - core13.debug(`Cache service version: ${cacheServiceVersion}`); + core15.debug(`Cache service version: ${cacheServiceVersion}`); checkPaths(paths); checkKey(key); switch (cacheServiceVersion) { @@ -79161,26 +79161,26 @@ var require_cache3 = __commonJS({ const compressionMethod = yield utils.getCompressionMethod(); let cacheId = -1; const cachePaths = yield utils.resolvePaths(paths); - core13.debug("Cache Paths:"); - core13.debug(`${JSON.stringify(cachePaths)}`); + core15.debug("Cache Paths:"); + core15.debug(`${JSON.stringify(cachePaths)}`); if (cachePaths.length === 0) { throw new Error(`Path Validation Error: Path(s) specified in the action for caching do(es) not exist, hence no cache is being saved.`); } const archiveFolder = yield utils.createTempDirectory(); const archivePath = path12.join(archiveFolder, utils.getCacheFileName(compressionMethod)); - core13.debug(`Archive Path: ${archivePath}`); + core15.debug(`Archive Path: ${archivePath}`); try { yield (0, tar_1.createTar)(archiveFolder, cachePaths, compressionMethod); - if (core13.isDebug()) { + if (core15.isDebug()) { yield (0, tar_1.listTar)(archivePath, compressionMethod); } const fileSizeLimit = 10 * 1024 * 1024 * 1024; const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath); - core13.debug(`File Size: ${archiveFileSize}`); + core15.debug(`File Size: ${archiveFileSize}`); if (archiveFileSize > fileSizeLimit && !(0, config_1.isGhes)()) { throw new Error(`Cache size of ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B) is over the 10GB limit, not saving cache.`); } - core13.debug("Reserving Cache"); + core15.debug("Reserving Cache"); const reserveCacheResponse = yield cacheHttpClient.reserveCache(key, paths, { compressionMethod, enableCrossOsArchive, @@ -79193,26 +79193,26 @@ var require_cache3 = __commonJS({ } else { throw new ReserveCacheError(`Unable to reserve cache with key ${key}, another job may be creating this cache. More details: ${(_e = reserveCacheResponse === null || reserveCacheResponse === void 0 ? void 0 : reserveCacheResponse.error) === null || _e === void 0 ? void 0 : _e.message}`); } - core13.debug(`Saving Cache (ID: ${cacheId})`); + core15.debug(`Saving Cache (ID: ${cacheId})`); yield cacheHttpClient.saveCache(cacheId, archivePath, "", options); } catch (error2) { const typedError = error2; if (typedError.name === ValidationError.name) { throw error2; } else if (typedError.name === ReserveCacheError.name) { - core13.info(`Failed to save: ${typedError.message}`); + core15.info(`Failed to save: ${typedError.message}`); } else { if (typedError instanceof http_client_1.HttpClientError && typeof typedError.statusCode === "number" && typedError.statusCode >= 500) { - core13.error(`Failed to save: ${typedError.message}`); + core15.error(`Failed to save: ${typedError.message}`); } else { - core13.warning(`Failed to save: ${typedError.message}`); + core15.warning(`Failed to save: ${typedError.message}`); } } } finally { try { yield utils.unlinkFile(archivePath); } catch (error2) { - core13.debug(`Failed to delete archive: ${error2}`); + core15.debug(`Failed to delete archive: ${error2}`); } } return cacheId; @@ -79225,23 +79225,23 @@ var require_cache3 = __commonJS({ const twirpClient = cacheTwirpClient.internalCacheTwirpClient(); let cacheId = -1; const cachePaths = yield utils.resolvePaths(paths); - core13.debug("Cache Paths:"); - core13.debug(`${JSON.stringify(cachePaths)}`); + core15.debug("Cache Paths:"); + core15.debug(`${JSON.stringify(cachePaths)}`); if (cachePaths.length === 0) { throw new Error(`Path Validation Error: Path(s) specified in the action for caching do(es) not exist, hence no cache is being saved.`); } const archiveFolder = yield utils.createTempDirectory(); const archivePath = path12.join(archiveFolder, utils.getCacheFileName(compressionMethod)); - core13.debug(`Archive Path: ${archivePath}`); + core15.debug(`Archive Path: ${archivePath}`); try { yield (0, tar_1.createTar)(archiveFolder, cachePaths, compressionMethod); - if (core13.isDebug()) { + if (core15.isDebug()) { yield (0, tar_1.listTar)(archivePath, compressionMethod); } const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath); - core13.debug(`File Size: ${archiveFileSize}`); + core15.debug(`File Size: ${archiveFileSize}`); options.archiveSizeBytes = archiveFileSize; - core13.debug("Reserving Cache"); + core15.debug("Reserving Cache"); const version = utils.getCacheVersion(paths, compressionMethod, enableCrossOsArchive); const request = { key, @@ -79252,16 +79252,16 @@ var require_cache3 = __commonJS({ const response = yield twirpClient.CreateCacheEntry(request); if (!response.ok) { if (response.message) { - core13.warning(`Cache reservation failed: ${response.message}`); + core15.warning(`Cache reservation failed: ${response.message}`); } throw new Error(response.message || "Response was not ok"); } signedUploadUrl = response.signedUploadUrl; } catch (error2) { - core13.debug(`Failed to reserve cache: ${error2}`); + core15.debug(`Failed to reserve cache: ${error2}`); throw new ReserveCacheError(`Unable to reserve cache with key ${key}, another job may be creating this cache.`); } - core13.debug(`Attempting to upload cache located at: ${archivePath}`); + core15.debug(`Attempting to upload cache located at: ${archivePath}`); yield cacheHttpClient.saveCache(cacheId, archivePath, signedUploadUrl, options); const finalizeRequest = { key, @@ -79269,7 +79269,7 @@ var require_cache3 = __commonJS({ sizeBytes: `${archiveFileSize}` }; const finalizeResponse = yield twirpClient.FinalizeCacheEntryUpload(finalizeRequest); - core13.debug(`FinalizeCacheEntryUploadResponse: ${finalizeResponse.ok}`); + core15.debug(`FinalizeCacheEntryUploadResponse: ${finalizeResponse.ok}`); if (!finalizeResponse.ok) { if (finalizeResponse.message) { throw new FinalizeCacheError(finalizeResponse.message); @@ -79282,21 +79282,21 @@ var require_cache3 = __commonJS({ if (typedError.name === ValidationError.name) { throw error2; } else if (typedError.name === ReserveCacheError.name) { - core13.info(`Failed to save: ${typedError.message}`); + core15.info(`Failed to save: ${typedError.message}`); } else if (typedError.name === FinalizeCacheError.name) { - core13.warning(typedError.message); + core15.warning(typedError.message); } else { if (typedError instanceof http_client_1.HttpClientError && typeof typedError.statusCode === "number" && typedError.statusCode >= 500) { - core13.error(`Failed to save: ${typedError.message}`); + core15.error(`Failed to save: ${typedError.message}`); } else { - core13.warning(`Failed to save: ${typedError.message}`); + core15.warning(`Failed to save: ${typedError.message}`); } } } finally { try { yield utils.unlinkFile(archivePath); } catch (error2) { - core13.debug(`Failed to delete archive: ${error2}`); + core15.debug(`Failed to delete archive: ${error2}`); } } return cacheId; @@ -80801,7 +80801,7 @@ var require_retry_helper = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.RetryHelper = void 0; - var core13 = __importStar4(require_core()); + var core15 = __importStar4(require_core()); var RetryHelper = class { constructor(maxAttempts, minSeconds, maxSeconds) { if (maxAttempts < 1) { @@ -80824,10 +80824,10 @@ var require_retry_helper = __commonJS({ if (isRetryable && !isRetryable(err)) { throw err; } - core13.info(err.message); + core15.info(err.message); } const seconds = this.getSleepAmount(); - core13.info(`Waiting ${seconds} seconds before trying again`); + core15.info(`Waiting ${seconds} seconds before trying again`); yield this.sleep(seconds); attempt++; } @@ -80907,7 +80907,7 @@ var require_tool_cache = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.evaluateVersions = exports2.isExplicitVersion = exports2.findFromManifest = exports2.getManifestFromRepo = exports2.findAllVersions = exports2.find = exports2.cacheFile = exports2.cacheDir = exports2.extractZip = exports2.extractXar = exports2.extractTar = exports2.extract7z = exports2.downloadTool = exports2.HTTPError = void 0; - var core13 = __importStar4(require_core()); + var core15 = __importStar4(require_core()); var io6 = __importStar4(require_io()); var crypto = __importStar4(require("crypto")); var fs11 = __importStar4(require("fs")); @@ -80936,8 +80936,8 @@ var require_tool_cache = __commonJS({ return __awaiter4(this, void 0, void 0, function* () { dest = dest || path12.join(_getTempDirectory(), crypto.randomUUID()); yield io6.mkdirP(path12.dirname(dest)); - core13.debug(`Downloading ${url}`); - core13.debug(`Destination ${dest}`); + core15.debug(`Downloading ${url}`); + core15.debug(`Destination ${dest}`); const maxAttempts = 3; const minSeconds = _getGlobal("TEST_DOWNLOAD_TOOL_RETRY_MIN_SECONDS", 10); const maxSeconds = _getGlobal("TEST_DOWNLOAD_TOOL_RETRY_MAX_SECONDS", 20); @@ -80964,7 +80964,7 @@ var require_tool_cache = __commonJS({ allowRetries: false }); if (auth) { - core13.debug("set auth"); + core15.debug("set auth"); if (headers === void 0) { headers = {}; } @@ -80973,7 +80973,7 @@ var require_tool_cache = __commonJS({ const response = yield http.get(url, headers); if (response.message.statusCode !== 200) { const err = new HTTPError2(response.message.statusCode); - core13.debug(`Failed to download from "${url}". Code(${response.message.statusCode}) Message(${response.message.statusMessage})`); + core15.debug(`Failed to download from "${url}". Code(${response.message.statusCode}) Message(${response.message.statusMessage})`); throw err; } const pipeline = util.promisify(stream2.pipeline); @@ -80982,16 +80982,16 @@ var require_tool_cache = __commonJS({ let succeeded = false; try { yield pipeline(readStream, fs11.createWriteStream(dest)); - core13.debug("download complete"); + core15.debug("download complete"); succeeded = true; return dest; } finally { if (!succeeded) { - core13.debug("download failed"); + core15.debug("download failed"); try { yield io6.rmRF(dest); } catch (err) { - core13.debug(`Failed to delete '${dest}'. ${err.message}`); + core15.debug(`Failed to delete '${dest}'. ${err.message}`); } } } @@ -81006,7 +81006,7 @@ var require_tool_cache = __commonJS({ process.chdir(dest); if (_7zPath) { try { - const logLevel = core13.isDebug() ? "-bb1" : "-bb0"; + const logLevel = core15.isDebug() ? "-bb1" : "-bb0"; const args = [ "x", logLevel, @@ -81056,7 +81056,7 @@ var require_tool_cache = __commonJS({ throw new Error("parameter 'file' is required"); } dest = yield _createExtractFolder(dest); - core13.debug("Checking tar --version"); + core15.debug("Checking tar --version"); let versionOutput = ""; yield (0, exec_1.exec)("tar --version", [], { ignoreReturnCode: true, @@ -81066,7 +81066,7 @@ var require_tool_cache = __commonJS({ stderr: (data) => versionOutput += data.toString() } }); - core13.debug(versionOutput.trim()); + core15.debug(versionOutput.trim()); const isGnuTar = versionOutput.toUpperCase().includes("GNU TAR"); let args; if (flags instanceof Array) { @@ -81074,7 +81074,7 @@ var require_tool_cache = __commonJS({ } else { args = [flags]; } - if (core13.isDebug() && !flags.includes("v")) { + if (core15.isDebug() && !flags.includes("v")) { args.push("-v"); } let destArg = dest; @@ -81106,7 +81106,7 @@ var require_tool_cache = __commonJS({ args = [flags]; } args.push("-x", "-C", dest, "-f", file); - if (core13.isDebug()) { + if (core15.isDebug()) { args.push("-v"); } const xarPath = yield io6.which("xar", true); @@ -81151,7 +81151,7 @@ var require_tool_cache = __commonJS({ "-Command", pwshCommand ]; - core13.debug(`Using pwsh at path: ${pwshPath}`); + core15.debug(`Using pwsh at path: ${pwshPath}`); yield (0, exec_1.exec)(`"${pwshPath}"`, args); } else { const powershellCommand = [ @@ -81171,7 +81171,7 @@ var require_tool_cache = __commonJS({ powershellCommand ]; const powershellPath = yield io6.which("powershell", true); - core13.debug(`Using powershell at path: ${powershellPath}`); + core15.debug(`Using powershell at path: ${powershellPath}`); yield (0, exec_1.exec)(`"${powershellPath}"`, args); } }); @@ -81180,7 +81180,7 @@ var require_tool_cache = __commonJS({ return __awaiter4(this, void 0, void 0, function* () { const unzipPath = yield io6.which("unzip", true); const args = [file]; - if (!core13.isDebug()) { + if (!core15.isDebug()) { args.unshift("-q"); } args.unshift("-o"); @@ -81191,8 +81191,8 @@ var require_tool_cache = __commonJS({ return __awaiter4(this, void 0, void 0, function* () { version = semver8.clean(version) || version; arch2 = arch2 || os3.arch(); - core13.debug(`Caching tool ${tool} ${version} ${arch2}`); - core13.debug(`source dir: ${sourceDir}`); + core15.debug(`Caching tool ${tool} ${version} ${arch2}`); + core15.debug(`source dir: ${sourceDir}`); if (!fs11.statSync(sourceDir).isDirectory()) { throw new Error("sourceDir is not a directory"); } @@ -81210,14 +81210,14 @@ var require_tool_cache = __commonJS({ return __awaiter4(this, void 0, void 0, function* () { version = semver8.clean(version) || version; arch2 = arch2 || os3.arch(); - core13.debug(`Caching tool ${tool} ${version} ${arch2}`); - core13.debug(`source file: ${sourceFile}`); + core15.debug(`Caching tool ${tool} ${version} ${arch2}`); + core15.debug(`source file: ${sourceFile}`); if (!fs11.statSync(sourceFile).isFile()) { throw new Error("sourceFile is not a file"); } const destFolder = yield _createToolPath(tool, version, arch2); const destPath = path12.join(destFolder, targetFile); - core13.debug(`destination file ${destPath}`); + core15.debug(`destination file ${destPath}`); yield io6.cp(sourceFile, destPath); _completeToolPath(tool, version, arch2); return destFolder; @@ -81241,12 +81241,12 @@ var require_tool_cache = __commonJS({ if (versionSpec) { versionSpec = semver8.clean(versionSpec) || ""; const cachePath = path12.join(_getCacheDirectory(), toolName, versionSpec, arch2); - core13.debug(`checking cache: ${cachePath}`); + core15.debug(`checking cache: ${cachePath}`); if (fs11.existsSync(cachePath) && fs11.existsSync(`${cachePath}.complete`)) { - core13.debug(`Found tool in cache ${toolName} ${versionSpec} ${arch2}`); + core15.debug(`Found tool in cache ${toolName} ${versionSpec} ${arch2}`); toolPath = cachePath; } else { - core13.debug("not found"); + core15.debug("not found"); } } return toolPath; @@ -81277,7 +81277,7 @@ var require_tool_cache = __commonJS({ const http = new httpm.HttpClient("tool-cache"); const headers = {}; if (auth) { - core13.debug("set auth"); + core15.debug("set auth"); headers.authorization = auth; } const response = yield http.getJson(treeUrl, headers); @@ -81298,7 +81298,7 @@ var require_tool_cache = __commonJS({ try { releases = JSON.parse(versionsRaw); } catch (_a) { - core13.debug("Invalid json"); + core15.debug("Invalid json"); } } return releases; @@ -81324,7 +81324,7 @@ var require_tool_cache = __commonJS({ function _createToolPath(tool, version, arch2) { return __awaiter4(this, void 0, void 0, function* () { const folderPath = path12.join(_getCacheDirectory(), tool, semver8.clean(version) || version, arch2 || ""); - core13.debug(`destination ${folderPath}`); + core15.debug(`destination ${folderPath}`); const markerPath = `${folderPath}.complete`; yield io6.rmRF(folderPath); yield io6.rmRF(markerPath); @@ -81336,19 +81336,19 @@ var require_tool_cache = __commonJS({ const folderPath = path12.join(_getCacheDirectory(), tool, semver8.clean(version) || version, arch2 || ""); const markerPath = `${folderPath}.complete`; fs11.writeFileSync(markerPath, ""); - core13.debug("finished caching tool"); + core15.debug("finished caching tool"); } function isExplicitVersion(versionSpec) { const c = semver8.clean(versionSpec) || ""; - core13.debug(`isExplicit: ${c}`); + core15.debug(`isExplicit: ${c}`); const valid3 = semver8.valid(c) != null; - core13.debug(`explicit? ${valid3}`); + core15.debug(`explicit? ${valid3}`); return valid3; } exports2.isExplicitVersion = isExplicitVersion; function evaluateVersions(versions, versionSpec) { let version = ""; - core13.debug(`evaluating ${versions.length} versions`); + core15.debug(`evaluating ${versions.length} versions`); versions = versions.sort((a, b) => { if (semver8.gt(a, b)) { return 1; @@ -81364,9 +81364,9 @@ var require_tool_cache = __commonJS({ } } if (version) { - core13.debug(`matched: ${version}`); + core15.debug(`matched: ${version}`); } else { - core13.debug("match not found"); + core15.debug("match not found"); } return version; } @@ -81943,7 +81943,7 @@ var require_follow_redirects = __commonJS({ }); // src/setup-codeql-action.ts -var core12 = __toESM(require_core()); +var core14 = __toESM(require_core()); // node_modules/uuid/dist-node/stringify.js var byteToHex = []; @@ -86807,6 +86807,7 @@ var GitHubFeatureFlags = class { }; // src/init.ts +var core12 = __toESM(require_core()); var toolrunner4 = __toESM(require_toolrunner()); var io5 = __toESM(require_io()); @@ -88589,6 +88590,23 @@ async function getJobRunUuidSarifOptions(codeql) { ) ? [`--sarif-run-property=jobRunUuid=${jobRunUuid}`] : []; } +// src/workflow.ts +var core11 = __toESM(require_core()); +function toCodedErrors(errors) { + return Object.entries(errors).reduce( + (acc, [code, message]) => { + acc[code] = { message, code }; + return acc; + }, + {} + ); +} +var WorkflowErrors = toCodedErrors({ + MissingPushHook: `Please specify an on.push hook to analyze and see code scanning alerts from the default branch on the Security tab.`, + CheckoutWrongHead: `git checkout HEAD^2 is no longer necessary. Please remove this step as Code Scanning recommends analyzing the merge commit for best results.`, + InconsistentActionVersion: `Not all workflow steps that use \`github/codeql-action\` actions use the same version. Please ensure that all such steps use the same version to avoid compatibility issues.` +}); + // src/init.ts async function initCodeQL(toolsInput, apiDetails, tempDir, variant, defaultCliVersion, features, logger) { logger.startGroup("Setup CodeQL tools"); @@ -88621,7 +88639,7 @@ async function initCodeQL(toolsInput, apiDetails, tempDir, variant, defaultCliVe // src/status-report.ts var os2 = __toESM(require("os")); -var core11 = __toESM(require_core()); +var core13 = __toESM(require_core()); function isFirstPartyAnalysis(actionName) { if (actionName !== "upload-sarif" /* UploadSarif */) { return true; @@ -88637,12 +88655,12 @@ function getActionsStatus(error2, otherFailureCause) { } function setJobStatusIfUnsuccessful(actionStatus) { if (actionStatus === "user-error") { - core11.exportVariable( + core13.exportVariable( "CODEQL_ACTION_JOB_STATUS" /* JOB_STATUS */, process.env["CODEQL_ACTION_JOB_STATUS" /* JOB_STATUS */] ?? "JOB_STATUS_CONFIGURATION_ERROR" /* ConfigErrorStatus */ ); } else if (actionStatus === "failure" || actionStatus === "aborted") { - core11.exportVariable( + core13.exportVariable( "CODEQL_ACTION_JOB_STATUS" /* JOB_STATUS */, process.env["CODEQL_ACTION_JOB_STATUS" /* JOB_STATUS */] ?? "JOB_STATUS_FAILURE" /* FailureStatus */ ); @@ -88661,14 +88679,14 @@ async function createStatusReportBase(actionName, status, actionStartedAt, confi let workflowStartedAt = process.env["CODEQL_WORKFLOW_STARTED_AT" /* WORKFLOW_STARTED_AT */]; if (workflowStartedAt === void 0) { workflowStartedAt = actionStartedAt.toISOString(); - core11.exportVariable("CODEQL_WORKFLOW_STARTED_AT" /* WORKFLOW_STARTED_AT */, workflowStartedAt); + core13.exportVariable("CODEQL_WORKFLOW_STARTED_AT" /* WORKFLOW_STARTED_AT */, workflowStartedAt); } const runnerOs = getRequiredEnvParam("RUNNER_OS"); const codeQlCliVersion = getCachedCodeQlVersion(); const actionRef = process.env["GITHUB_ACTION_REF"] || ""; const testingEnvironment = getTestingEnvironment(); if (testingEnvironment) { - core11.exportVariable("CODEQL_ACTION_TESTING_ENVIRONMENT" /* TESTING_ENVIRONMENT */, testingEnvironment); + core13.exportVariable("CODEQL_ACTION_TESTING_ENVIRONMENT" /* TESTING_ENVIRONMENT */, testingEnvironment); } const isSteadyStateDefaultSetupRun = process.env["CODE_SCANNING_IS_STEADY_STATE_DEFAULT_SETUP"] === "true"; const statusReport = { @@ -88749,9 +88767,9 @@ var INCOMPATIBLE_MSG = "CodeQL Action version is incompatible with the API endpo async function sendStatusReport(statusReport) { setJobStatusIfUnsuccessful(statusReport.status); const statusReportJSON = JSON.stringify(statusReport); - core11.debug(`Sending status report: ${statusReportJSON}`); + core13.debug(`Sending status report: ${statusReportJSON}`); if (isInTestMode()) { - core11.debug("In test mode. Status reports are not uploaded."); + core13.debug("In test mode. Status reports are not uploaded."); return; } const nwo = getRepositoryNwo(); @@ -88771,28 +88789,28 @@ async function sendStatusReport(statusReport) { switch (httpError.status) { case 403: if (getWorkflowEventName() === "push" && process.env["GITHUB_ACTOR"] === "dependabot[bot]") { - core11.warning( + core13.warning( `Workflows triggered by Dependabot on the "push" event run with read-only access. Uploading CodeQL results requires write access. To use CodeQL with Dependabot, please ensure you are using the "pull_request" event for this workflow and avoid triggering on the "push" event for Dependabot branches. See ${"https://docs.github.com/en/code-security/code-scanning/creating-an-advanced-setup-for-code-scanning/customizing-your-advanced-setup-for-code-scanning#scanning-on-push" /* SCANNING_ON_PUSH */} for more information on how to configure these events.` ); } else { - core11.warning( + core13.warning( `This run of the CodeQL Action does not have permission to access the CodeQL Action API endpoints. This could be because the Action is running on a pull request from a fork. If not, please ensure the workflow has at least the 'security-events: read' permission. Details: ${httpError.message}` ); } return; case 404: - core11.warning(httpError.message); + core13.warning(httpError.message); return; case 422: if (getRequiredEnvParam("GITHUB_SERVER_URL") !== GITHUB_DOTCOM_URL) { - core11.debug(INCOMPATIBLE_MSG); + core13.debug(INCOMPATIBLE_MSG); } else { - core11.debug(OUT_OF_DATE_MSG); + core13.debug(OUT_OF_DATE_MSG); } return; } } - core11.warning( + core13.warning( `An unexpected error occurred when sending a status report: ${getErrorMessage( e )}` @@ -88858,7 +88876,7 @@ async function run() { ); const jobRunUuid = v4_default(); logger.info(`Job run UUID is ${jobRunUuid}.`); - core12.exportVariable("JOB_RUN_UUID" /* JOB_RUN_UUID */, jobRunUuid); + core14.exportVariable("JOB_RUN_UUID" /* JOB_RUN_UUID */, jobRunUuid); try { const statusReportBase = await createStatusReportBase( "setup-codeql" /* SetupCodeQL */, @@ -88888,12 +88906,12 @@ async function run() { toolsDownloadStatusReport = initCodeQLResult.toolsDownloadStatusReport; toolsVersion = initCodeQLResult.toolsVersion; toolsSource = initCodeQLResult.toolsSource; - core12.setOutput("codeql-path", codeql.getPath()); - core12.setOutput("codeql-version", (await codeql.getVersion()).version); - core12.exportVariable("CODEQL_ACTION_SETUP_CODEQL_HAS_RUN" /* SETUP_CODEQL_ACTION_HAS_RUN */, "true"); + core14.setOutput("codeql-path", codeql.getPath()); + core14.setOutput("codeql-version", (await codeql.getVersion()).version); + core14.exportVariable("CODEQL_ACTION_SETUP_CODEQL_HAS_RUN" /* SETUP_CODEQL_ACTION_HAS_RUN */, "true"); } catch (unwrappedError) { const error2 = wrapError(unwrappedError); - core12.setFailed(error2.message); + core14.setFailed(error2.message); const statusReportBase = await createStatusReportBase( "setup-codeql" /* SetupCodeQL */, error2 instanceof ConfigurationError ? "user-error" : "failure", @@ -88922,7 +88940,7 @@ async function runWrapper() { try { await run(); } catch (error2) { - core12.setFailed(`setup-codeql action failed: ${getErrorMessage(error2)}`); + core14.setFailed(`setup-codeql action failed: ${getErrorMessage(error2)}`); } await checkForTimeout(); } diff --git a/lib/upload-lib.js b/lib/upload-lib.js index b5f901089d..0bd4083269 100644 --- a/lib/upload-lib.js +++ b/lib/upload-lib.js @@ -19743,10 +19743,10 @@ Support boolean input list: \`true | True | TRUE | false | False | FALSE\``); (0, command_1.issueCommand)("error", (0, utils_1.toCommandProperties)(properties), message instanceof Error ? message.toString() : message); } exports2.error = error2; - function warning6(message, properties = {}) { + function warning7(message, properties = {}) { (0, command_1.issueCommand)("warning", (0, utils_1.toCommandProperties)(properties), message instanceof Error ? message.toString() : message); } - exports2.warning = warning6; + exports2.warning = warning7; function notice(message, properties = {}) { (0, command_1.issueCommand)("notice", (0, utils_1.toCommandProperties)(properties), message instanceof Error ? message.toString() : message); } @@ -19755,22 +19755,22 @@ Support boolean input list: \`true | True | TRUE | false | False | FALSE\``); process.stdout.write(message + os2.EOL); } exports2.info = info4; - function startGroup3(name) { + function startGroup4(name) { (0, command_1.issue)("group", name); } - exports2.startGroup = startGroup3; - function endGroup3() { + exports2.startGroup = startGroup4; + function endGroup4() { (0, command_1.issue)("endgroup"); } - exports2.endGroup = endGroup3; + exports2.endGroup = endGroup4; function group(name, fn) { return __awaiter4(this, void 0, void 0, function* () { - startGroup3(name); + startGroup4(name); let result; try { result = yield fn(); } finally { - endGroup3(); + endGroup4(); } return result; }); @@ -35312,7 +35312,7 @@ var require_internal_glob_options_helper = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.getOptions = void 0; - var core12 = __importStar4(require_core()); + var core14 = __importStar4(require_core()); function getOptions(copy) { const result = { followSymbolicLinks: true, @@ -35322,15 +35322,15 @@ var require_internal_glob_options_helper = __commonJS({ if (copy) { if (typeof copy.followSymbolicLinks === "boolean") { result.followSymbolicLinks = copy.followSymbolicLinks; - core12.debug(`followSymbolicLinks '${result.followSymbolicLinks}'`); + core14.debug(`followSymbolicLinks '${result.followSymbolicLinks}'`); } if (typeof copy.implicitDescendants === "boolean") { result.implicitDescendants = copy.implicitDescendants; - core12.debug(`implicitDescendants '${result.implicitDescendants}'`); + core14.debug(`implicitDescendants '${result.implicitDescendants}'`); } if (typeof copy.omitBrokenSymbolicLinks === "boolean") { result.omitBrokenSymbolicLinks = copy.omitBrokenSymbolicLinks; - core12.debug(`omitBrokenSymbolicLinks '${result.omitBrokenSymbolicLinks}'`); + core14.debug(`omitBrokenSymbolicLinks '${result.omitBrokenSymbolicLinks}'`); } } return result; @@ -36784,7 +36784,7 @@ var require_internal_globber = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.DefaultGlobber = void 0; - var core12 = __importStar4(require_core()); + var core14 = __importStar4(require_core()); var fs14 = __importStar4(require("fs")); var globOptionsHelper = __importStar4(require_internal_glob_options_helper()); var path15 = __importStar4(require("path")); @@ -36835,7 +36835,7 @@ var require_internal_globber = __commonJS({ } const stack = []; for (const searchPath of patternHelper.getSearchPaths(patterns)) { - core12.debug(`Search path '${searchPath}'`); + core14.debug(`Search path '${searchPath}'`); try { yield __await4(fs14.promises.lstat(searchPath)); } catch (err) { @@ -36907,7 +36907,7 @@ var require_internal_globber = __commonJS({ } catch (err) { if (err.code === "ENOENT") { if (options.omitBrokenSymbolicLinks) { - core12.debug(`Broken symlink '${item.path}'`); + core14.debug(`Broken symlink '${item.path}'`); return void 0; } throw new Error(`No information found for the path '${item.path}'. This may indicate a broken symbolic link.`); @@ -36923,7 +36923,7 @@ var require_internal_globber = __commonJS({ traversalChain.pop(); } if (traversalChain.some((x) => x === realPath)) { - core12.debug(`Symlink cycle detected for path '${item.path}' and realpath '${realPath}'`); + core14.debug(`Symlink cycle detected for path '${item.path}' and realpath '${realPath}'`); return void 0; } traversalChain.push(realPath); @@ -38247,7 +38247,7 @@ var require_cacheUtils = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.getRuntimeToken = exports2.getCacheVersion = exports2.assertDefined = exports2.getGnuTarPathOnWindows = exports2.getCacheFileName = exports2.getCompressionMethod = exports2.unlinkFile = exports2.resolvePaths = exports2.getArchiveFileSizeInBytes = exports2.createTempDirectory = void 0; - var core12 = __importStar4(require_core()); + var core14 = __importStar4(require_core()); var exec2 = __importStar4(require_exec()); var glob = __importStar4(require_glob()); var io6 = __importStar4(require_io()); @@ -38300,7 +38300,7 @@ var require_cacheUtils = __commonJS({ _e = false; const file = _c; const relativeFile = path15.relative(workspace, file).replace(new RegExp(`\\${path15.sep}`, "g"), "/"); - core12.debug(`Matched: ${relativeFile}`); + core14.debug(`Matched: ${relativeFile}`); if (relativeFile === "") { paths.push("."); } else { @@ -38330,7 +38330,7 @@ var require_cacheUtils = __commonJS({ return __awaiter4(this, void 0, void 0, function* () { let versionOutput = ""; additionalArgs.push("--version"); - core12.debug(`Checking ${app} ${additionalArgs.join(" ")}`); + core14.debug(`Checking ${app} ${additionalArgs.join(" ")}`); try { yield exec2.exec(`${app}`, additionalArgs, { ignoreReturnCode: true, @@ -38341,10 +38341,10 @@ var require_cacheUtils = __commonJS({ } }); } catch (err) { - core12.debug(err.message); + core14.debug(err.message); } versionOutput = versionOutput.trim(); - core12.debug(versionOutput); + core14.debug(versionOutput); return versionOutput; }); } @@ -38352,7 +38352,7 @@ var require_cacheUtils = __commonJS({ return __awaiter4(this, void 0, void 0, function* () { const versionOutput = yield getVersion("zstd", ["--quiet"]); const version = semver8.clean(versionOutput); - core12.debug(`zstd version: ${version}`); + core14.debug(`zstd version: ${version}`); if (versionOutput === "") { return constants_1.CompressionMethod.Gzip; } else { @@ -73664,7 +73664,7 @@ var require_uploadUtils = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.uploadCacheArchiveSDK = exports2.UploadProgress = void 0; - var core12 = __importStar4(require_core()); + var core14 = __importStar4(require_core()); var storage_blob_1 = require_dist7(); var errors_1 = require_errors2(); var UploadProgress = class { @@ -73706,7 +73706,7 @@ var require_uploadUtils = __commonJS({ const percentage = (100 * (transferredBytes / this.contentLength)).toFixed(1); const elapsedTime = Date.now() - this.startTime; const uploadSpeed = (transferredBytes / (1024 * 1024) / (elapsedTime / 1e3)).toFixed(1); - core12.info(`Sent ${transferredBytes} of ${this.contentLength} (${percentage}%), ${uploadSpeed} MBs/sec`); + core14.info(`Sent ${transferredBytes} of ${this.contentLength} (${percentage}%), ${uploadSpeed} MBs/sec`); if (this.isDone()) { this.displayedComplete = true; } @@ -73761,14 +73761,14 @@ var require_uploadUtils = __commonJS({ }; try { uploadProgress.startDisplayTimer(); - core12.debug(`BlobClient: ${blobClient.name}:${blobClient.accountName}:${blobClient.containerName}`); + core14.debug(`BlobClient: ${blobClient.name}:${blobClient.accountName}:${blobClient.containerName}`); const response = yield blockBlobClient.uploadFile(archivePath, uploadOptions); if (response._response.status >= 400) { throw new errors_1.InvalidResponseError(`uploadCacheArchiveSDK: upload failed with status code ${response._response.status}`); } return response; } catch (error2) { - core12.warning(`uploadCacheArchiveSDK: internal error uploading cache archive: ${error2.message}`); + core14.warning(`uploadCacheArchiveSDK: internal error uploading cache archive: ${error2.message}`); throw error2; } finally { uploadProgress.stopDisplayTimer(); @@ -73839,7 +73839,7 @@ var require_requestUtils = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.retryHttpClientResponse = exports2.retryTypedResponse = exports2.retry = exports2.isRetryableStatusCode = exports2.isServerErrorStatusCode = exports2.isSuccessStatusCode = void 0; - var core12 = __importStar4(require_core()); + var core14 = __importStar4(require_core()); var http_client_1 = require_lib(); var constants_1 = require_constants10(); function isSuccessStatusCode(statusCode) { @@ -73900,9 +73900,9 @@ var require_requestUtils = __commonJS({ isRetryable = isRetryableStatusCode(statusCode); errorMessage = `Cache service responded with ${statusCode}`; } - core12.debug(`${name} - Attempt ${attempt} of ${maxAttempts} failed with error: ${errorMessage}`); + core14.debug(`${name} - Attempt ${attempt} of ${maxAttempts} failed with error: ${errorMessage}`); if (!isRetryable) { - core12.debug(`${name} - Error is not retryable`); + core14.debug(`${name} - Error is not retryable`); break; } yield sleep(delay2); @@ -74007,7 +74007,7 @@ var require_downloadUtils = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.downloadCacheStorageSDK = exports2.downloadCacheHttpClientConcurrent = exports2.downloadCacheHttpClient = exports2.DownloadProgress = void 0; - var core12 = __importStar4(require_core()); + var core14 = __importStar4(require_core()); var http_client_1 = require_lib(); var storage_blob_1 = require_dist7(); var buffer = __importStar4(require("buffer")); @@ -74045,7 +74045,7 @@ var require_downloadUtils = __commonJS({ this.segmentIndex = this.segmentIndex + 1; this.segmentSize = segmentSize; this.receivedBytes = 0; - core12.debug(`Downloading segment at offset ${this.segmentOffset} with length ${this.segmentSize}...`); + core14.debug(`Downloading segment at offset ${this.segmentOffset} with length ${this.segmentSize}...`); } /** * Sets the number of bytes received for the current segment. @@ -74079,7 +74079,7 @@ var require_downloadUtils = __commonJS({ const percentage = (100 * (transferredBytes / this.contentLength)).toFixed(1); const elapsedTime = Date.now() - this.startTime; const downloadSpeed = (transferredBytes / (1024 * 1024) / (elapsedTime / 1e3)).toFixed(1); - core12.info(`Received ${transferredBytes} of ${this.contentLength} (${percentage}%), ${downloadSpeed} MBs/sec`); + core14.info(`Received ${transferredBytes} of ${this.contentLength} (${percentage}%), ${downloadSpeed} MBs/sec`); if (this.isDone()) { this.displayedComplete = true; } @@ -74129,7 +74129,7 @@ var require_downloadUtils = __commonJS({ })); downloadResponse.message.socket.setTimeout(constants_1.SocketTimeout, () => { downloadResponse.message.destroy(); - core12.debug(`Aborting download, socket timed out after ${constants_1.SocketTimeout} ms`); + core14.debug(`Aborting download, socket timed out after ${constants_1.SocketTimeout} ms`); }); yield pipeResponseToStream(downloadResponse, writeStream); const contentLengthHeader = downloadResponse.message.headers["content-length"]; @@ -74140,7 +74140,7 @@ var require_downloadUtils = __commonJS({ throw new Error(`Incomplete download. Expected file size: ${expectedLength}, actual file size: ${actualLength}`); } } else { - core12.debug("Unable to validate download, no Content-Length header"); + core14.debug("Unable to validate download, no Content-Length header"); } }); } @@ -74260,7 +74260,7 @@ var require_downloadUtils = __commonJS({ const properties = yield client.getProperties(); const contentLength = (_a = properties.contentLength) !== null && _a !== void 0 ? _a : -1; if (contentLength < 0) { - core12.debug("Unable to determine content length, downloading file with http-client..."); + core14.debug("Unable to determine content length, downloading file with http-client..."); yield downloadCacheHttpClient(archiveLocation, archivePath); } else { const maxSegmentSize = Math.min(134217728, buffer.constants.MAX_LENGTH); @@ -74340,7 +74340,7 @@ var require_options = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.getDownloadOptions = exports2.getUploadOptions = void 0; - var core12 = __importStar4(require_core()); + var core14 = __importStar4(require_core()); function getUploadOptions(copy) { const result = { useAzureSdk: false, @@ -74360,9 +74360,9 @@ var require_options = __commonJS({ } result.uploadConcurrency = !isNaN(Number(process.env["CACHE_UPLOAD_CONCURRENCY"])) ? Math.min(32, Number(process.env["CACHE_UPLOAD_CONCURRENCY"])) : result.uploadConcurrency; result.uploadChunkSize = !isNaN(Number(process.env["CACHE_UPLOAD_CHUNK_SIZE"])) ? Math.min(128 * 1024 * 1024, Number(process.env["CACHE_UPLOAD_CHUNK_SIZE"]) * 1024 * 1024) : result.uploadChunkSize; - core12.debug(`Use Azure SDK: ${result.useAzureSdk}`); - core12.debug(`Upload concurrency: ${result.uploadConcurrency}`); - core12.debug(`Upload chunk size: ${result.uploadChunkSize}`); + core14.debug(`Use Azure SDK: ${result.useAzureSdk}`); + core14.debug(`Upload concurrency: ${result.uploadConcurrency}`); + core14.debug(`Upload chunk size: ${result.uploadChunkSize}`); return result; } exports2.getUploadOptions = getUploadOptions; @@ -74399,12 +74399,12 @@ var require_options = __commonJS({ if (segmentDownloadTimeoutMins && !isNaN(Number(segmentDownloadTimeoutMins)) && isFinite(Number(segmentDownloadTimeoutMins))) { result.segmentTimeoutInMs = Number(segmentDownloadTimeoutMins) * 60 * 1e3; } - core12.debug(`Use Azure SDK: ${result.useAzureSdk}`); - core12.debug(`Download concurrency: ${result.downloadConcurrency}`); - core12.debug(`Request timeout (ms): ${result.timeoutInMs}`); - core12.debug(`Cache segment download timeout mins env var: ${process.env["SEGMENT_DOWNLOAD_TIMEOUT_MINS"]}`); - core12.debug(`Segment download timeout (ms): ${result.segmentTimeoutInMs}`); - core12.debug(`Lookup only: ${result.lookupOnly}`); + core14.debug(`Use Azure SDK: ${result.useAzureSdk}`); + core14.debug(`Download concurrency: ${result.downloadConcurrency}`); + core14.debug(`Request timeout (ms): ${result.timeoutInMs}`); + core14.debug(`Cache segment download timeout mins env var: ${process.env["SEGMENT_DOWNLOAD_TIMEOUT_MINS"]}`); + core14.debug(`Segment download timeout (ms): ${result.segmentTimeoutInMs}`); + core14.debug(`Lookup only: ${result.lookupOnly}`); return result; } exports2.getDownloadOptions = getDownloadOptions; @@ -74584,7 +74584,7 @@ var require_cacheHttpClient = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.saveCache = exports2.reserveCache = exports2.downloadCache = exports2.getCacheEntry = void 0; - var core12 = __importStar4(require_core()); + var core14 = __importStar4(require_core()); var http_client_1 = require_lib(); var auth_1 = require_auth(); var fs14 = __importStar4(require("fs")); @@ -74602,7 +74602,7 @@ var require_cacheHttpClient = __commonJS({ throw new Error("Cache Service Url not found, unable to restore cache."); } const url2 = `${baseUrl}_apis/artifactcache/${resource}`; - core12.debug(`Resource Url: ${url2}`); + core14.debug(`Resource Url: ${url2}`); return url2; } function createAcceptHeader(type2, apiVersion) { @@ -74630,7 +74630,7 @@ var require_cacheHttpClient = __commonJS({ return httpClient.getJson(getCacheApiUrl(resource)); })); if (response.statusCode === 204) { - if (core12.isDebug()) { + if (core14.isDebug()) { yield printCachesListForDiagnostics(keys[0], httpClient, version); } return null; @@ -74643,9 +74643,9 @@ var require_cacheHttpClient = __commonJS({ if (!cacheDownloadUrl) { throw new Error("Cache not found."); } - core12.setSecret(cacheDownloadUrl); - core12.debug(`Cache Result:`); - core12.debug(JSON.stringify(cacheResult)); + core14.setSecret(cacheDownloadUrl); + core14.debug(`Cache Result:`); + core14.debug(JSON.stringify(cacheResult)); return cacheResult; }); } @@ -74660,10 +74660,10 @@ var require_cacheHttpClient = __commonJS({ const cacheListResult = response.result; const totalCount = cacheListResult === null || cacheListResult === void 0 ? void 0 : cacheListResult.totalCount; if (totalCount && totalCount > 0) { - core12.debug(`No matching cache found for cache key '${key}', version '${version} and scope ${process.env["GITHUB_REF"]}. There exist one or more cache(s) with similar key but they have different version or scope. See more info on cache matching here: https://docs.github.com/en/actions/using-workflows/caching-dependencies-to-speed-up-workflows#matching-a-cache-key + core14.debug(`No matching cache found for cache key '${key}', version '${version} and scope ${process.env["GITHUB_REF"]}. There exist one or more cache(s) with similar key but they have different version or scope. See more info on cache matching here: https://docs.github.com/en/actions/using-workflows/caching-dependencies-to-speed-up-workflows#matching-a-cache-key Other caches with similar key:`); for (const cacheEntry of (cacheListResult === null || cacheListResult === void 0 ? void 0 : cacheListResult.artifactCaches) || []) { - core12.debug(`Cache Key: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.cacheKey}, Cache Version: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.cacheVersion}, Cache Scope: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.scope}, Cache Created: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.creationTime}`); + core14.debug(`Cache Key: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.cacheKey}, Cache Version: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.cacheVersion}, Cache Scope: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.scope}, Cache Created: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.creationTime}`); } } } @@ -74708,7 +74708,7 @@ Other caches with similar key:`); } function uploadChunk(httpClient, resourceUrl, openStream, start, end) { return __awaiter4(this, void 0, void 0, function* () { - core12.debug(`Uploading chunk of size ${end - start + 1} bytes at offset ${start} with content range: ${getContentRange(start, end)}`); + core14.debug(`Uploading chunk of size ${end - start + 1} bytes at offset ${start} with content range: ${getContentRange(start, end)}`); const additionalHeaders = { "Content-Type": "application/octet-stream", "Content-Range": getContentRange(start, end) @@ -74730,7 +74730,7 @@ Other caches with similar key:`); const concurrency = utils.assertDefined("uploadConcurrency", uploadOptions.uploadConcurrency); const maxChunkSize = utils.assertDefined("uploadChunkSize", uploadOptions.uploadChunkSize); const parallelUploads = [...new Array(concurrency).keys()]; - core12.debug("Awaiting all uploads"); + core14.debug("Awaiting all uploads"); let offset = 0; try { yield Promise.all(parallelUploads.map(() => __awaiter4(this, void 0, void 0, function* () { @@ -74773,16 +74773,16 @@ Other caches with similar key:`); yield (0, uploadUtils_1.uploadCacheArchiveSDK)(signedUploadURL, archivePath, options); } else { const httpClient = createHttpClient(); - core12.debug("Upload cache"); + core14.debug("Upload cache"); yield uploadFile(httpClient, cacheId, archivePath, options); - core12.debug("Commiting cache"); + core14.debug("Commiting cache"); const cacheSize = utils.getArchiveFileSizeInBytes(archivePath); - core12.info(`Cache Size: ~${Math.round(cacheSize / (1024 * 1024))} MB (${cacheSize} B)`); + core14.info(`Cache Size: ~${Math.round(cacheSize / (1024 * 1024))} MB (${cacheSize} B)`); const commitCacheResponse = yield commitCache(httpClient, cacheId, cacheSize); if (!(0, requestUtils_1.isSuccessStatusCode)(commitCacheResponse.statusCode)) { throw new Error(`Cache service responded with ${commitCacheResponse.statusCode} during commit cache.`); } - core12.info("Cache saved successfully"); + core14.info("Cache saved successfully"); } }); } @@ -80234,7 +80234,7 @@ var require_cache3 = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.saveCache = exports2.restoreCache = exports2.isFeatureAvailable = exports2.FinalizeCacheError = exports2.ReserveCacheError = exports2.ValidationError = void 0; - var core12 = __importStar4(require_core()); + var core14 = __importStar4(require_core()); var path15 = __importStar4(require("path")); var utils = __importStar4(require_cacheUtils()); var cacheHttpClient = __importStar4(require_cacheHttpClient()); @@ -80294,7 +80294,7 @@ var require_cache3 = __commonJS({ function restoreCache3(paths, primaryKey, restoreKeys, options, enableCrossOsArchive = false) { return __awaiter4(this, void 0, void 0, function* () { const cacheServiceVersion = (0, config_1.getCacheServiceVersion)(); - core12.debug(`Cache service version: ${cacheServiceVersion}`); + core14.debug(`Cache service version: ${cacheServiceVersion}`); checkPaths(paths); switch (cacheServiceVersion) { case "v2": @@ -80310,8 +80310,8 @@ var require_cache3 = __commonJS({ return __awaiter4(this, void 0, void 0, function* () { restoreKeys = restoreKeys || []; const keys = [primaryKey, ...restoreKeys]; - core12.debug("Resolved Keys:"); - core12.debug(JSON.stringify(keys)); + core14.debug("Resolved Keys:"); + core14.debug(JSON.stringify(keys)); if (keys.length > 10) { throw new ValidationError(`Key Validation Error: Keys are limited to a maximum of 10.`); } @@ -80329,19 +80329,19 @@ var require_cache3 = __commonJS({ return void 0; } if (options === null || options === void 0 ? void 0 : options.lookupOnly) { - core12.info("Lookup only - skipping download"); + core14.info("Lookup only - skipping download"); return cacheEntry.cacheKey; } archivePath = path15.join(yield utils.createTempDirectory(), utils.getCacheFileName(compressionMethod)); - core12.debug(`Archive Path: ${archivePath}`); + core14.debug(`Archive Path: ${archivePath}`); yield cacheHttpClient.downloadCache(cacheEntry.archiveLocation, archivePath, options); - if (core12.isDebug()) { + if (core14.isDebug()) { yield (0, tar_1.listTar)(archivePath, compressionMethod); } const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath); - core12.info(`Cache Size: ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B)`); + core14.info(`Cache Size: ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B)`); yield (0, tar_1.extractTar)(archivePath, compressionMethod); - core12.info("Cache restored successfully"); + core14.info("Cache restored successfully"); return cacheEntry.cacheKey; } catch (error2) { const typedError = error2; @@ -80349,16 +80349,16 @@ var require_cache3 = __commonJS({ throw error2; } else { if (typedError instanceof http_client_1.HttpClientError && typeof typedError.statusCode === "number" && typedError.statusCode >= 500) { - core12.error(`Failed to restore: ${error2.message}`); + core14.error(`Failed to restore: ${error2.message}`); } else { - core12.warning(`Failed to restore: ${error2.message}`); + core14.warning(`Failed to restore: ${error2.message}`); } } } finally { try { yield utils.unlinkFile(archivePath); } catch (error2) { - core12.debug(`Failed to delete archive: ${error2}`); + core14.debug(`Failed to delete archive: ${error2}`); } } return void 0; @@ -80369,8 +80369,8 @@ var require_cache3 = __commonJS({ options = Object.assign(Object.assign({}, options), { useAzureSdk: true }); restoreKeys = restoreKeys || []; const keys = [primaryKey, ...restoreKeys]; - core12.debug("Resolved Keys:"); - core12.debug(JSON.stringify(keys)); + core14.debug("Resolved Keys:"); + core14.debug(JSON.stringify(keys)); if (keys.length > 10) { throw new ValidationError(`Key Validation Error: Keys are limited to a maximum of 10.`); } @@ -80388,30 +80388,30 @@ var require_cache3 = __commonJS({ }; const response = yield twirpClient.GetCacheEntryDownloadURL(request); if (!response.ok) { - core12.debug(`Cache not found for version ${request.version} of keys: ${keys.join(", ")}`); + core14.debug(`Cache not found for version ${request.version} of keys: ${keys.join(", ")}`); return void 0; } const isRestoreKeyMatch = request.key !== response.matchedKey; if (isRestoreKeyMatch) { - core12.info(`Cache hit for restore-key: ${response.matchedKey}`); + core14.info(`Cache hit for restore-key: ${response.matchedKey}`); } else { - core12.info(`Cache hit for: ${response.matchedKey}`); + core14.info(`Cache hit for: ${response.matchedKey}`); } if (options === null || options === void 0 ? void 0 : options.lookupOnly) { - core12.info("Lookup only - skipping download"); + core14.info("Lookup only - skipping download"); return response.matchedKey; } archivePath = path15.join(yield utils.createTempDirectory(), utils.getCacheFileName(compressionMethod)); - core12.debug(`Archive path: ${archivePath}`); - core12.debug(`Starting download of archive to: ${archivePath}`); + core14.debug(`Archive path: ${archivePath}`); + core14.debug(`Starting download of archive to: ${archivePath}`); yield cacheHttpClient.downloadCache(response.signedDownloadUrl, archivePath, options); const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath); - core12.info(`Cache Size: ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B)`); - if (core12.isDebug()) { + core14.info(`Cache Size: ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B)`); + if (core14.isDebug()) { yield (0, tar_1.listTar)(archivePath, compressionMethod); } yield (0, tar_1.extractTar)(archivePath, compressionMethod); - core12.info("Cache restored successfully"); + core14.info("Cache restored successfully"); return response.matchedKey; } catch (error2) { const typedError = error2; @@ -80419,9 +80419,9 @@ var require_cache3 = __commonJS({ throw error2; } else { if (typedError instanceof http_client_1.HttpClientError && typeof typedError.statusCode === "number" && typedError.statusCode >= 500) { - core12.error(`Failed to restore: ${error2.message}`); + core14.error(`Failed to restore: ${error2.message}`); } else { - core12.warning(`Failed to restore: ${error2.message}`); + core14.warning(`Failed to restore: ${error2.message}`); } } } finally { @@ -80430,7 +80430,7 @@ var require_cache3 = __commonJS({ yield utils.unlinkFile(archivePath); } } catch (error2) { - core12.debug(`Failed to delete archive: ${error2}`); + core14.debug(`Failed to delete archive: ${error2}`); } } return void 0; @@ -80439,7 +80439,7 @@ var require_cache3 = __commonJS({ function saveCache3(paths, key, options, enableCrossOsArchive = false) { return __awaiter4(this, void 0, void 0, function* () { const cacheServiceVersion = (0, config_1.getCacheServiceVersion)(); - core12.debug(`Cache service version: ${cacheServiceVersion}`); + core14.debug(`Cache service version: ${cacheServiceVersion}`); checkPaths(paths); checkKey(key); switch (cacheServiceVersion) { @@ -80458,26 +80458,26 @@ var require_cache3 = __commonJS({ const compressionMethod = yield utils.getCompressionMethod(); let cacheId = -1; const cachePaths = yield utils.resolvePaths(paths); - core12.debug("Cache Paths:"); - core12.debug(`${JSON.stringify(cachePaths)}`); + core14.debug("Cache Paths:"); + core14.debug(`${JSON.stringify(cachePaths)}`); if (cachePaths.length === 0) { throw new Error(`Path Validation Error: Path(s) specified in the action for caching do(es) not exist, hence no cache is being saved.`); } const archiveFolder = yield utils.createTempDirectory(); const archivePath = path15.join(archiveFolder, utils.getCacheFileName(compressionMethod)); - core12.debug(`Archive Path: ${archivePath}`); + core14.debug(`Archive Path: ${archivePath}`); try { yield (0, tar_1.createTar)(archiveFolder, cachePaths, compressionMethod); - if (core12.isDebug()) { + if (core14.isDebug()) { yield (0, tar_1.listTar)(archivePath, compressionMethod); } const fileSizeLimit = 10 * 1024 * 1024 * 1024; const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath); - core12.debug(`File Size: ${archiveFileSize}`); + core14.debug(`File Size: ${archiveFileSize}`); if (archiveFileSize > fileSizeLimit && !(0, config_1.isGhes)()) { throw new Error(`Cache size of ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B) is over the 10GB limit, not saving cache.`); } - core12.debug("Reserving Cache"); + core14.debug("Reserving Cache"); const reserveCacheResponse = yield cacheHttpClient.reserveCache(key, paths, { compressionMethod, enableCrossOsArchive, @@ -80490,26 +80490,26 @@ var require_cache3 = __commonJS({ } else { throw new ReserveCacheError(`Unable to reserve cache with key ${key}, another job may be creating this cache. More details: ${(_e = reserveCacheResponse === null || reserveCacheResponse === void 0 ? void 0 : reserveCacheResponse.error) === null || _e === void 0 ? void 0 : _e.message}`); } - core12.debug(`Saving Cache (ID: ${cacheId})`); + core14.debug(`Saving Cache (ID: ${cacheId})`); yield cacheHttpClient.saveCache(cacheId, archivePath, "", options); } catch (error2) { const typedError = error2; if (typedError.name === ValidationError.name) { throw error2; } else if (typedError.name === ReserveCacheError.name) { - core12.info(`Failed to save: ${typedError.message}`); + core14.info(`Failed to save: ${typedError.message}`); } else { if (typedError instanceof http_client_1.HttpClientError && typeof typedError.statusCode === "number" && typedError.statusCode >= 500) { - core12.error(`Failed to save: ${typedError.message}`); + core14.error(`Failed to save: ${typedError.message}`); } else { - core12.warning(`Failed to save: ${typedError.message}`); + core14.warning(`Failed to save: ${typedError.message}`); } } } finally { try { yield utils.unlinkFile(archivePath); } catch (error2) { - core12.debug(`Failed to delete archive: ${error2}`); + core14.debug(`Failed to delete archive: ${error2}`); } } return cacheId; @@ -80522,23 +80522,23 @@ var require_cache3 = __commonJS({ const twirpClient = cacheTwirpClient.internalCacheTwirpClient(); let cacheId = -1; const cachePaths = yield utils.resolvePaths(paths); - core12.debug("Cache Paths:"); - core12.debug(`${JSON.stringify(cachePaths)}`); + core14.debug("Cache Paths:"); + core14.debug(`${JSON.stringify(cachePaths)}`); if (cachePaths.length === 0) { throw new Error(`Path Validation Error: Path(s) specified in the action for caching do(es) not exist, hence no cache is being saved.`); } const archiveFolder = yield utils.createTempDirectory(); const archivePath = path15.join(archiveFolder, utils.getCacheFileName(compressionMethod)); - core12.debug(`Archive Path: ${archivePath}`); + core14.debug(`Archive Path: ${archivePath}`); try { yield (0, tar_1.createTar)(archiveFolder, cachePaths, compressionMethod); - if (core12.isDebug()) { + if (core14.isDebug()) { yield (0, tar_1.listTar)(archivePath, compressionMethod); } const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath); - core12.debug(`File Size: ${archiveFileSize}`); + core14.debug(`File Size: ${archiveFileSize}`); options.archiveSizeBytes = archiveFileSize; - core12.debug("Reserving Cache"); + core14.debug("Reserving Cache"); const version = utils.getCacheVersion(paths, compressionMethod, enableCrossOsArchive); const request = { key, @@ -80549,16 +80549,16 @@ var require_cache3 = __commonJS({ const response = yield twirpClient.CreateCacheEntry(request); if (!response.ok) { if (response.message) { - core12.warning(`Cache reservation failed: ${response.message}`); + core14.warning(`Cache reservation failed: ${response.message}`); } throw new Error(response.message || "Response was not ok"); } signedUploadUrl = response.signedUploadUrl; } catch (error2) { - core12.debug(`Failed to reserve cache: ${error2}`); + core14.debug(`Failed to reserve cache: ${error2}`); throw new ReserveCacheError(`Unable to reserve cache with key ${key}, another job may be creating this cache.`); } - core12.debug(`Attempting to upload cache located at: ${archivePath}`); + core14.debug(`Attempting to upload cache located at: ${archivePath}`); yield cacheHttpClient.saveCache(cacheId, archivePath, signedUploadUrl, options); const finalizeRequest = { key, @@ -80566,7 +80566,7 @@ var require_cache3 = __commonJS({ sizeBytes: `${archiveFileSize}` }; const finalizeResponse = yield twirpClient.FinalizeCacheEntryUpload(finalizeRequest); - core12.debug(`FinalizeCacheEntryUploadResponse: ${finalizeResponse.ok}`); + core14.debug(`FinalizeCacheEntryUploadResponse: ${finalizeResponse.ok}`); if (!finalizeResponse.ok) { if (finalizeResponse.message) { throw new FinalizeCacheError(finalizeResponse.message); @@ -80579,21 +80579,21 @@ var require_cache3 = __commonJS({ if (typedError.name === ValidationError.name) { throw error2; } else if (typedError.name === ReserveCacheError.name) { - core12.info(`Failed to save: ${typedError.message}`); + core14.info(`Failed to save: ${typedError.message}`); } else if (typedError.name === FinalizeCacheError.name) { - core12.warning(typedError.message); + core14.warning(typedError.message); } else { if (typedError instanceof http_client_1.HttpClientError && typeof typedError.statusCode === "number" && typedError.statusCode >= 500) { - core12.error(`Failed to save: ${typedError.message}`); + core14.error(`Failed to save: ${typedError.message}`); } else { - core12.warning(`Failed to save: ${typedError.message}`); + core14.warning(`Failed to save: ${typedError.message}`); } } } finally { try { yield utils.unlinkFile(archivePath); } catch (error2) { - core12.debug(`Failed to delete archive: ${error2}`); + core14.debug(`Failed to delete archive: ${error2}`); } } return cacheId; @@ -80801,7 +80801,7 @@ var require_retry_helper = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.RetryHelper = void 0; - var core12 = __importStar4(require_core()); + var core14 = __importStar4(require_core()); var RetryHelper = class { constructor(maxAttempts, minSeconds, maxSeconds) { if (maxAttempts < 1) { @@ -80824,10 +80824,10 @@ var require_retry_helper = __commonJS({ if (isRetryable && !isRetryable(err)) { throw err; } - core12.info(err.message); + core14.info(err.message); } const seconds = this.getSleepAmount(); - core12.info(`Waiting ${seconds} seconds before trying again`); + core14.info(`Waiting ${seconds} seconds before trying again`); yield this.sleep(seconds); attempt++; } @@ -80907,7 +80907,7 @@ var require_tool_cache = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.evaluateVersions = exports2.isExplicitVersion = exports2.findFromManifest = exports2.getManifestFromRepo = exports2.findAllVersions = exports2.find = exports2.cacheFile = exports2.cacheDir = exports2.extractZip = exports2.extractXar = exports2.extractTar = exports2.extract7z = exports2.downloadTool = exports2.HTTPError = void 0; - var core12 = __importStar4(require_core()); + var core14 = __importStar4(require_core()); var io6 = __importStar4(require_io()); var crypto = __importStar4(require("crypto")); var fs14 = __importStar4(require("fs")); @@ -80936,8 +80936,8 @@ var require_tool_cache = __commonJS({ return __awaiter4(this, void 0, void 0, function* () { dest = dest || path15.join(_getTempDirectory(), crypto.randomUUID()); yield io6.mkdirP(path15.dirname(dest)); - core12.debug(`Downloading ${url2}`); - core12.debug(`Destination ${dest}`); + core14.debug(`Downloading ${url2}`); + core14.debug(`Destination ${dest}`); const maxAttempts = 3; const minSeconds = _getGlobal("TEST_DOWNLOAD_TOOL_RETRY_MIN_SECONDS", 10); const maxSeconds = _getGlobal("TEST_DOWNLOAD_TOOL_RETRY_MAX_SECONDS", 20); @@ -80964,7 +80964,7 @@ var require_tool_cache = __commonJS({ allowRetries: false }); if (auth) { - core12.debug("set auth"); + core14.debug("set auth"); if (headers === void 0) { headers = {}; } @@ -80973,7 +80973,7 @@ var require_tool_cache = __commonJS({ const response = yield http.get(url2, headers); if (response.message.statusCode !== 200) { const err = new HTTPError2(response.message.statusCode); - core12.debug(`Failed to download from "${url2}". Code(${response.message.statusCode}) Message(${response.message.statusMessage})`); + core14.debug(`Failed to download from "${url2}". Code(${response.message.statusCode}) Message(${response.message.statusMessage})`); throw err; } const pipeline = util.promisify(stream2.pipeline); @@ -80982,16 +80982,16 @@ var require_tool_cache = __commonJS({ let succeeded = false; try { yield pipeline(readStream, fs14.createWriteStream(dest)); - core12.debug("download complete"); + core14.debug("download complete"); succeeded = true; return dest; } finally { if (!succeeded) { - core12.debug("download failed"); + core14.debug("download failed"); try { yield io6.rmRF(dest); } catch (err) { - core12.debug(`Failed to delete '${dest}'. ${err.message}`); + core14.debug(`Failed to delete '${dest}'. ${err.message}`); } } } @@ -81006,7 +81006,7 @@ var require_tool_cache = __commonJS({ process.chdir(dest); if (_7zPath) { try { - const logLevel = core12.isDebug() ? "-bb1" : "-bb0"; + const logLevel = core14.isDebug() ? "-bb1" : "-bb0"; const args = [ "x", logLevel, @@ -81056,7 +81056,7 @@ var require_tool_cache = __commonJS({ throw new Error("parameter 'file' is required"); } dest = yield _createExtractFolder(dest); - core12.debug("Checking tar --version"); + core14.debug("Checking tar --version"); let versionOutput = ""; yield (0, exec_1.exec)("tar --version", [], { ignoreReturnCode: true, @@ -81066,7 +81066,7 @@ var require_tool_cache = __commonJS({ stderr: (data) => versionOutput += data.toString() } }); - core12.debug(versionOutput.trim()); + core14.debug(versionOutput.trim()); const isGnuTar = versionOutput.toUpperCase().includes("GNU TAR"); let args; if (flags instanceof Array) { @@ -81074,7 +81074,7 @@ var require_tool_cache = __commonJS({ } else { args = [flags]; } - if (core12.isDebug() && !flags.includes("v")) { + if (core14.isDebug() && !flags.includes("v")) { args.push("-v"); } let destArg = dest; @@ -81106,7 +81106,7 @@ var require_tool_cache = __commonJS({ args = [flags]; } args.push("-x", "-C", dest, "-f", file); - if (core12.isDebug()) { + if (core14.isDebug()) { args.push("-v"); } const xarPath = yield io6.which("xar", true); @@ -81151,7 +81151,7 @@ var require_tool_cache = __commonJS({ "-Command", pwshCommand ]; - core12.debug(`Using pwsh at path: ${pwshPath}`); + core14.debug(`Using pwsh at path: ${pwshPath}`); yield (0, exec_1.exec)(`"${pwshPath}"`, args); } else { const powershellCommand = [ @@ -81171,7 +81171,7 @@ var require_tool_cache = __commonJS({ powershellCommand ]; const powershellPath = yield io6.which("powershell", true); - core12.debug(`Using powershell at path: ${powershellPath}`); + core14.debug(`Using powershell at path: ${powershellPath}`); yield (0, exec_1.exec)(`"${powershellPath}"`, args); } }); @@ -81180,7 +81180,7 @@ var require_tool_cache = __commonJS({ return __awaiter4(this, void 0, void 0, function* () { const unzipPath = yield io6.which("unzip", true); const args = [file]; - if (!core12.isDebug()) { + if (!core14.isDebug()) { args.unshift("-q"); } args.unshift("-o"); @@ -81191,8 +81191,8 @@ var require_tool_cache = __commonJS({ return __awaiter4(this, void 0, void 0, function* () { version = semver8.clean(version) || version; arch2 = arch2 || os2.arch(); - core12.debug(`Caching tool ${tool} ${version} ${arch2}`); - core12.debug(`source dir: ${sourceDir}`); + core14.debug(`Caching tool ${tool} ${version} ${arch2}`); + core14.debug(`source dir: ${sourceDir}`); if (!fs14.statSync(sourceDir).isDirectory()) { throw new Error("sourceDir is not a directory"); } @@ -81210,14 +81210,14 @@ var require_tool_cache = __commonJS({ return __awaiter4(this, void 0, void 0, function* () { version = semver8.clean(version) || version; arch2 = arch2 || os2.arch(); - core12.debug(`Caching tool ${tool} ${version} ${arch2}`); - core12.debug(`source file: ${sourceFile}`); + core14.debug(`Caching tool ${tool} ${version} ${arch2}`); + core14.debug(`source file: ${sourceFile}`); if (!fs14.statSync(sourceFile).isFile()) { throw new Error("sourceFile is not a file"); } const destFolder = yield _createToolPath(tool, version, arch2); const destPath = path15.join(destFolder, targetFile); - core12.debug(`destination file ${destPath}`); + core14.debug(`destination file ${destPath}`); yield io6.cp(sourceFile, destPath); _completeToolPath(tool, version, arch2); return destFolder; @@ -81241,12 +81241,12 @@ var require_tool_cache = __commonJS({ if (versionSpec) { versionSpec = semver8.clean(versionSpec) || ""; const cachePath = path15.join(_getCacheDirectory(), toolName, versionSpec, arch2); - core12.debug(`checking cache: ${cachePath}`); + core14.debug(`checking cache: ${cachePath}`); if (fs14.existsSync(cachePath) && fs14.existsSync(`${cachePath}.complete`)) { - core12.debug(`Found tool in cache ${toolName} ${versionSpec} ${arch2}`); + core14.debug(`Found tool in cache ${toolName} ${versionSpec} ${arch2}`); toolPath = cachePath; } else { - core12.debug("not found"); + core14.debug("not found"); } } return toolPath; @@ -81277,7 +81277,7 @@ var require_tool_cache = __commonJS({ const http = new httpm.HttpClient("tool-cache"); const headers = {}; if (auth) { - core12.debug("set auth"); + core14.debug("set auth"); headers.authorization = auth; } const response = yield http.getJson(treeUrl, headers); @@ -81298,7 +81298,7 @@ var require_tool_cache = __commonJS({ try { releases = JSON.parse(versionsRaw); } catch (_a) { - core12.debug("Invalid json"); + core14.debug("Invalid json"); } } return releases; @@ -81324,7 +81324,7 @@ var require_tool_cache = __commonJS({ function _createToolPath(tool, version, arch2) { return __awaiter4(this, void 0, void 0, function* () { const folderPath = path15.join(_getCacheDirectory(), tool, semver8.clean(version) || version, arch2 || ""); - core12.debug(`destination ${folderPath}`); + core14.debug(`destination ${folderPath}`); const markerPath = `${folderPath}.complete`; yield io6.rmRF(folderPath); yield io6.rmRF(markerPath); @@ -81336,19 +81336,19 @@ var require_tool_cache = __commonJS({ const folderPath = path15.join(_getCacheDirectory(), tool, semver8.clean(version) || version, arch2 || ""); const markerPath = `${folderPath}.complete`; fs14.writeFileSync(markerPath, ""); - core12.debug("finished caching tool"); + core14.debug("finished caching tool"); } function isExplicitVersion(versionSpec) { const c = semver8.clean(versionSpec) || ""; - core12.debug(`isExplicit: ${c}`); + core14.debug(`isExplicit: ${c}`); const valid3 = semver8.valid(c) != null; - core12.debug(`explicit? ${valid3}`); + core14.debug(`explicit? ${valid3}`); return valid3; } exports2.isExplicitVersion = isExplicitVersion; function evaluateVersions(versions, versionSpec) { let version = ""; - core12.debug(`evaluating ${versions.length} versions`); + core14.debug(`evaluating ${versions.length} versions`); versions = versions.sort((a, b) => { if (semver8.gt(a, b)) { return 1; @@ -81364,9 +81364,9 @@ var require_tool_cache = __commonJS({ } } if (version) { - core12.debug(`matched: ${version}`); + core14.debug(`matched: ${version}`); } else { - core12.debug("match not found"); + core14.debug("match not found"); } return version; } @@ -84866,7 +84866,7 @@ var fs13 = __toESM(require("fs")); var path14 = __toESM(require("path")); var url = __toESM(require("url")); var import_zlib = __toESM(require("zlib")); -var core11 = __toESM(require_core()); +var core13 = __toESM(require_core()); var jsonschema2 = __toESM(require_lib2()); // src/actions-util.ts @@ -92279,8 +92279,28 @@ async function addFingerprints(sarif, sourceRoot, logger) { } // src/init.ts +var core12 = __toESM(require_core()); var toolrunner4 = __toESM(require_toolrunner()); var io5 = __toESM(require_io()); + +// src/workflow.ts +var core11 = __toESM(require_core()); +function toCodedErrors(errors) { + return Object.entries(errors).reduce( + (acc, [code, message]) => { + acc[code] = { message, code }; + return acc; + }, + {} + ); +} +var WorkflowErrors = toCodedErrors({ + MissingPushHook: `Please specify an on.push hook to analyze and see code scanning alerts from the default branch on the Security tab.`, + CheckoutWrongHead: `git checkout HEAD^2 is no longer necessary. Please remove this step as Code Scanning recommends analyzing the merge commit for best results.`, + InconsistentActionVersion: `Not all workflow steps that use \`github/codeql-action\` actions use the same version. Please ensure that all such steps use the same version to avoid compatibility issues.` +}); + +// src/init.ts async function initCodeQL(toolsInput, apiDetails, tempDir, variant, defaultCliVersion, features, logger) { logger.startGroup("Setup CodeQL tools"); const { @@ -92410,7 +92430,7 @@ async function combineSarifFilesUsingCLI(sarifFiles, gitHubVersion, features, lo logger.warning( `Uploading multiple SARIF runs with the same category is deprecated ${deprecationWarningMessage}. Please update your workflow to upload a single run per category. ${deprecationMoreInformationMessage}` ); - core11.exportVariable("CODEQL_MERGE_SARIF_DEPRECATION_WARNING", "true"); + core13.exportVariable("CODEQL_MERGE_SARIF_DEPRECATION_WARNING", "true"); } return combineSarifFiles(sarifFiles, logger); } @@ -92509,13 +92529,13 @@ async function uploadPayload(payload, repositoryNwo, logger, analysis) { if (httpError !== void 0) { switch (httpError.status) { case 403: - core11.warning(httpError.message || GENERIC_403_MSG); + core13.warning(httpError.message || GENERIC_403_MSG); break; case 404: - core11.warning(httpError.message || GENERIC_404_MSG); + core13.warning(httpError.message || GENERIC_404_MSG); break; default: - core11.warning(httpError.message); + core13.warning(httpError.message); break; } } @@ -92645,9 +92665,9 @@ function validateSarifFileSchema(sarif, sarifFilePath, logger) { const warnings = (result.errors ?? []).filter( (err) => err.name === "format" && typeof err.argument === "string" && warningAttributes.includes(err.argument) ); - for (const warning6 of warnings) { + for (const warning7 of warnings) { logger.info( - `Warning: '${warning6.instance}' is not a valid URI in '${warning6.property}'.` + `Warning: '${warning7.instance}' is not a valid URI in '${warning7.property}'.` ); } if (errors.length > 0) { @@ -92946,7 +92966,7 @@ function validateUniqueCategory(sarif, sentinelPrefix) { `Aborting upload: only one run of the codeql/analyze or codeql/upload-sarif actions is allowed per job per tool/category. The easiest fix is to specify a unique value for the \`category\` input. If .runs[].automationDetails.id is specified in the sarif file, that will take precedence over your configured \`category\`. Category: (${id ? id : "none"}) Tool: (${tool ? tool : "none"})` ); } - core11.exportVariable(sentinelEnvVar, sentinelEnvVar); + core13.exportVariable(sentinelEnvVar, sentinelEnvVar); } } function sanitize(str2) { diff --git a/lib/upload-sarif-action.js b/lib/upload-sarif-action.js index d49ad89b29..093f39d6d1 100644 --- a/lib/upload-sarif-action.js +++ b/lib/upload-sarif-action.js @@ -19743,10 +19743,10 @@ Support boolean input list: \`true | True | TRUE | false | False | FALSE\``); (0, command_1.issueCommand)("error", (0, utils_1.toCommandProperties)(properties), message instanceof Error ? message.toString() : message); } exports2.error = error2; - function warning7(message, properties = {}) { + function warning8(message, properties = {}) { (0, command_1.issueCommand)("warning", (0, utils_1.toCommandProperties)(properties), message instanceof Error ? message.toString() : message); } - exports2.warning = warning7; + exports2.warning = warning8; function notice(message, properties = {}) { (0, command_1.issueCommand)("notice", (0, utils_1.toCommandProperties)(properties), message instanceof Error ? message.toString() : message); } @@ -19755,22 +19755,22 @@ Support boolean input list: \`true | True | TRUE | false | False | FALSE\``); process.stdout.write(message + os3.EOL); } exports2.info = info4; - function startGroup3(name) { + function startGroup4(name) { (0, command_1.issue)("group", name); } - exports2.startGroup = startGroup3; - function endGroup3() { + exports2.startGroup = startGroup4; + function endGroup4() { (0, command_1.issue)("endgroup"); } - exports2.endGroup = endGroup3; + exports2.endGroup = endGroup4; function group(name, fn) { return __awaiter4(this, void 0, void 0, function* () { - startGroup3(name); + startGroup4(name); let result; try { result = yield fn(); } finally { - endGroup3(); + endGroup4(); } return result; }); @@ -34015,7 +34015,7 @@ var require_internal_glob_options_helper = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.getOptions = void 0; - var core14 = __importStar4(require_core()); + var core16 = __importStar4(require_core()); function getOptions(copy) { const result = { followSymbolicLinks: true, @@ -34025,15 +34025,15 @@ var require_internal_glob_options_helper = __commonJS({ if (copy) { if (typeof copy.followSymbolicLinks === "boolean") { result.followSymbolicLinks = copy.followSymbolicLinks; - core14.debug(`followSymbolicLinks '${result.followSymbolicLinks}'`); + core16.debug(`followSymbolicLinks '${result.followSymbolicLinks}'`); } if (typeof copy.implicitDescendants === "boolean") { result.implicitDescendants = copy.implicitDescendants; - core14.debug(`implicitDescendants '${result.implicitDescendants}'`); + core16.debug(`implicitDescendants '${result.implicitDescendants}'`); } if (typeof copy.omitBrokenSymbolicLinks === "boolean") { result.omitBrokenSymbolicLinks = copy.omitBrokenSymbolicLinks; - core14.debug(`omitBrokenSymbolicLinks '${result.omitBrokenSymbolicLinks}'`); + core16.debug(`omitBrokenSymbolicLinks '${result.omitBrokenSymbolicLinks}'`); } } return result; @@ -35487,7 +35487,7 @@ var require_internal_globber = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.DefaultGlobber = void 0; - var core14 = __importStar4(require_core()); + var core16 = __importStar4(require_core()); var fs15 = __importStar4(require("fs")); var globOptionsHelper = __importStar4(require_internal_glob_options_helper()); var path16 = __importStar4(require("path")); @@ -35538,7 +35538,7 @@ var require_internal_globber = __commonJS({ } const stack = []; for (const searchPath of patternHelper.getSearchPaths(patterns)) { - core14.debug(`Search path '${searchPath}'`); + core16.debug(`Search path '${searchPath}'`); try { yield __await4(fs15.promises.lstat(searchPath)); } catch (err) { @@ -35610,7 +35610,7 @@ var require_internal_globber = __commonJS({ } catch (err) { if (err.code === "ENOENT") { if (options.omitBrokenSymbolicLinks) { - core14.debug(`Broken symlink '${item.path}'`); + core16.debug(`Broken symlink '${item.path}'`); return void 0; } throw new Error(`No information found for the path '${item.path}'. This may indicate a broken symbolic link.`); @@ -35626,7 +35626,7 @@ var require_internal_globber = __commonJS({ traversalChain.pop(); } if (traversalChain.some((x) => x === realPath)) { - core14.debug(`Symlink cycle detected for path '${item.path}' and realpath '${realPath}'`); + core16.debug(`Symlink cycle detected for path '${item.path}' and realpath '${realPath}'`); return void 0; } traversalChain.push(realPath); @@ -36950,7 +36950,7 @@ var require_cacheUtils = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.getRuntimeToken = exports2.getCacheVersion = exports2.assertDefined = exports2.getGnuTarPathOnWindows = exports2.getCacheFileName = exports2.getCompressionMethod = exports2.unlinkFile = exports2.resolvePaths = exports2.getArchiveFileSizeInBytes = exports2.createTempDirectory = void 0; - var core14 = __importStar4(require_core()); + var core16 = __importStar4(require_core()); var exec2 = __importStar4(require_exec()); var glob = __importStar4(require_glob()); var io6 = __importStar4(require_io()); @@ -37003,7 +37003,7 @@ var require_cacheUtils = __commonJS({ _e = false; const file = _c; const relativeFile = path16.relative(workspace, file).replace(new RegExp(`\\${path16.sep}`, "g"), "/"); - core14.debug(`Matched: ${relativeFile}`); + core16.debug(`Matched: ${relativeFile}`); if (relativeFile === "") { paths.push("."); } else { @@ -37033,7 +37033,7 @@ var require_cacheUtils = __commonJS({ return __awaiter4(this, void 0, void 0, function* () { let versionOutput = ""; additionalArgs.push("--version"); - core14.debug(`Checking ${app} ${additionalArgs.join(" ")}`); + core16.debug(`Checking ${app} ${additionalArgs.join(" ")}`); try { yield exec2.exec(`${app}`, additionalArgs, { ignoreReturnCode: true, @@ -37044,10 +37044,10 @@ var require_cacheUtils = __commonJS({ } }); } catch (err) { - core14.debug(err.message); + core16.debug(err.message); } versionOutput = versionOutput.trim(); - core14.debug(versionOutput); + core16.debug(versionOutput); return versionOutput; }); } @@ -37055,7 +37055,7 @@ var require_cacheUtils = __commonJS({ return __awaiter4(this, void 0, void 0, function* () { const versionOutput = yield getVersion("zstd", ["--quiet"]); const version = semver8.clean(versionOutput); - core14.debug(`zstd version: ${version}`); + core16.debug(`zstd version: ${version}`); if (versionOutput === "") { return constants_1.CompressionMethod.Gzip; } else { @@ -72367,7 +72367,7 @@ var require_uploadUtils = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.uploadCacheArchiveSDK = exports2.UploadProgress = void 0; - var core14 = __importStar4(require_core()); + var core16 = __importStar4(require_core()); var storage_blob_1 = require_dist7(); var errors_1 = require_errors2(); var UploadProgress = class { @@ -72409,7 +72409,7 @@ var require_uploadUtils = __commonJS({ const percentage = (100 * (transferredBytes / this.contentLength)).toFixed(1); const elapsedTime = Date.now() - this.startTime; const uploadSpeed = (transferredBytes / (1024 * 1024) / (elapsedTime / 1e3)).toFixed(1); - core14.info(`Sent ${transferredBytes} of ${this.contentLength} (${percentage}%), ${uploadSpeed} MBs/sec`); + core16.info(`Sent ${transferredBytes} of ${this.contentLength} (${percentage}%), ${uploadSpeed} MBs/sec`); if (this.isDone()) { this.displayedComplete = true; } @@ -72464,14 +72464,14 @@ var require_uploadUtils = __commonJS({ }; try { uploadProgress.startDisplayTimer(); - core14.debug(`BlobClient: ${blobClient.name}:${blobClient.accountName}:${blobClient.containerName}`); + core16.debug(`BlobClient: ${blobClient.name}:${blobClient.accountName}:${blobClient.containerName}`); const response = yield blockBlobClient.uploadFile(archivePath, uploadOptions); if (response._response.status >= 400) { throw new errors_1.InvalidResponseError(`uploadCacheArchiveSDK: upload failed with status code ${response._response.status}`); } return response; } catch (error2) { - core14.warning(`uploadCacheArchiveSDK: internal error uploading cache archive: ${error2.message}`); + core16.warning(`uploadCacheArchiveSDK: internal error uploading cache archive: ${error2.message}`); throw error2; } finally { uploadProgress.stopDisplayTimer(); @@ -72542,7 +72542,7 @@ var require_requestUtils = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.retryHttpClientResponse = exports2.retryTypedResponse = exports2.retry = exports2.isRetryableStatusCode = exports2.isServerErrorStatusCode = exports2.isSuccessStatusCode = void 0; - var core14 = __importStar4(require_core()); + var core16 = __importStar4(require_core()); var http_client_1 = require_lib(); var constants_1 = require_constants10(); function isSuccessStatusCode(statusCode) { @@ -72603,9 +72603,9 @@ var require_requestUtils = __commonJS({ isRetryable = isRetryableStatusCode(statusCode); errorMessage = `Cache service responded with ${statusCode}`; } - core14.debug(`${name} - Attempt ${attempt} of ${maxAttempts} failed with error: ${errorMessage}`); + core16.debug(`${name} - Attempt ${attempt} of ${maxAttempts} failed with error: ${errorMessage}`); if (!isRetryable) { - core14.debug(`${name} - Error is not retryable`); + core16.debug(`${name} - Error is not retryable`); break; } yield sleep(delay2); @@ -72710,7 +72710,7 @@ var require_downloadUtils = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.downloadCacheStorageSDK = exports2.downloadCacheHttpClientConcurrent = exports2.downloadCacheHttpClient = exports2.DownloadProgress = void 0; - var core14 = __importStar4(require_core()); + var core16 = __importStar4(require_core()); var http_client_1 = require_lib(); var storage_blob_1 = require_dist7(); var buffer = __importStar4(require("buffer")); @@ -72748,7 +72748,7 @@ var require_downloadUtils = __commonJS({ this.segmentIndex = this.segmentIndex + 1; this.segmentSize = segmentSize; this.receivedBytes = 0; - core14.debug(`Downloading segment at offset ${this.segmentOffset} with length ${this.segmentSize}...`); + core16.debug(`Downloading segment at offset ${this.segmentOffset} with length ${this.segmentSize}...`); } /** * Sets the number of bytes received for the current segment. @@ -72782,7 +72782,7 @@ var require_downloadUtils = __commonJS({ const percentage = (100 * (transferredBytes / this.contentLength)).toFixed(1); const elapsedTime = Date.now() - this.startTime; const downloadSpeed = (transferredBytes / (1024 * 1024) / (elapsedTime / 1e3)).toFixed(1); - core14.info(`Received ${transferredBytes} of ${this.contentLength} (${percentage}%), ${downloadSpeed} MBs/sec`); + core16.info(`Received ${transferredBytes} of ${this.contentLength} (${percentage}%), ${downloadSpeed} MBs/sec`); if (this.isDone()) { this.displayedComplete = true; } @@ -72832,7 +72832,7 @@ var require_downloadUtils = __commonJS({ })); downloadResponse.message.socket.setTimeout(constants_1.SocketTimeout, () => { downloadResponse.message.destroy(); - core14.debug(`Aborting download, socket timed out after ${constants_1.SocketTimeout} ms`); + core16.debug(`Aborting download, socket timed out after ${constants_1.SocketTimeout} ms`); }); yield pipeResponseToStream(downloadResponse, writeStream); const contentLengthHeader = downloadResponse.message.headers["content-length"]; @@ -72843,7 +72843,7 @@ var require_downloadUtils = __commonJS({ throw new Error(`Incomplete download. Expected file size: ${expectedLength}, actual file size: ${actualLength}`); } } else { - core14.debug("Unable to validate download, no Content-Length header"); + core16.debug("Unable to validate download, no Content-Length header"); } }); } @@ -72963,7 +72963,7 @@ var require_downloadUtils = __commonJS({ const properties = yield client.getProperties(); const contentLength = (_a = properties.contentLength) !== null && _a !== void 0 ? _a : -1; if (contentLength < 0) { - core14.debug("Unable to determine content length, downloading file with http-client..."); + core16.debug("Unable to determine content length, downloading file with http-client..."); yield downloadCacheHttpClient(archiveLocation, archivePath); } else { const maxSegmentSize = Math.min(134217728, buffer.constants.MAX_LENGTH); @@ -73043,7 +73043,7 @@ var require_options = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.getDownloadOptions = exports2.getUploadOptions = void 0; - var core14 = __importStar4(require_core()); + var core16 = __importStar4(require_core()); function getUploadOptions(copy) { const result = { useAzureSdk: false, @@ -73063,9 +73063,9 @@ var require_options = __commonJS({ } result.uploadConcurrency = !isNaN(Number(process.env["CACHE_UPLOAD_CONCURRENCY"])) ? Math.min(32, Number(process.env["CACHE_UPLOAD_CONCURRENCY"])) : result.uploadConcurrency; result.uploadChunkSize = !isNaN(Number(process.env["CACHE_UPLOAD_CHUNK_SIZE"])) ? Math.min(128 * 1024 * 1024, Number(process.env["CACHE_UPLOAD_CHUNK_SIZE"]) * 1024 * 1024) : result.uploadChunkSize; - core14.debug(`Use Azure SDK: ${result.useAzureSdk}`); - core14.debug(`Upload concurrency: ${result.uploadConcurrency}`); - core14.debug(`Upload chunk size: ${result.uploadChunkSize}`); + core16.debug(`Use Azure SDK: ${result.useAzureSdk}`); + core16.debug(`Upload concurrency: ${result.uploadConcurrency}`); + core16.debug(`Upload chunk size: ${result.uploadChunkSize}`); return result; } exports2.getUploadOptions = getUploadOptions; @@ -73102,12 +73102,12 @@ var require_options = __commonJS({ if (segmentDownloadTimeoutMins && !isNaN(Number(segmentDownloadTimeoutMins)) && isFinite(Number(segmentDownloadTimeoutMins))) { result.segmentTimeoutInMs = Number(segmentDownloadTimeoutMins) * 60 * 1e3; } - core14.debug(`Use Azure SDK: ${result.useAzureSdk}`); - core14.debug(`Download concurrency: ${result.downloadConcurrency}`); - core14.debug(`Request timeout (ms): ${result.timeoutInMs}`); - core14.debug(`Cache segment download timeout mins env var: ${process.env["SEGMENT_DOWNLOAD_TIMEOUT_MINS"]}`); - core14.debug(`Segment download timeout (ms): ${result.segmentTimeoutInMs}`); - core14.debug(`Lookup only: ${result.lookupOnly}`); + core16.debug(`Use Azure SDK: ${result.useAzureSdk}`); + core16.debug(`Download concurrency: ${result.downloadConcurrency}`); + core16.debug(`Request timeout (ms): ${result.timeoutInMs}`); + core16.debug(`Cache segment download timeout mins env var: ${process.env["SEGMENT_DOWNLOAD_TIMEOUT_MINS"]}`); + core16.debug(`Segment download timeout (ms): ${result.segmentTimeoutInMs}`); + core16.debug(`Lookup only: ${result.lookupOnly}`); return result; } exports2.getDownloadOptions = getDownloadOptions; @@ -73287,7 +73287,7 @@ var require_cacheHttpClient = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.saveCache = exports2.reserveCache = exports2.downloadCache = exports2.getCacheEntry = void 0; - var core14 = __importStar4(require_core()); + var core16 = __importStar4(require_core()); var http_client_1 = require_lib(); var auth_1 = require_auth(); var fs15 = __importStar4(require("fs")); @@ -73305,7 +73305,7 @@ var require_cacheHttpClient = __commonJS({ throw new Error("Cache Service Url not found, unable to restore cache."); } const url2 = `${baseUrl}_apis/artifactcache/${resource}`; - core14.debug(`Resource Url: ${url2}`); + core16.debug(`Resource Url: ${url2}`); return url2; } function createAcceptHeader(type2, apiVersion) { @@ -73333,7 +73333,7 @@ var require_cacheHttpClient = __commonJS({ return httpClient.getJson(getCacheApiUrl(resource)); })); if (response.statusCode === 204) { - if (core14.isDebug()) { + if (core16.isDebug()) { yield printCachesListForDiagnostics(keys[0], httpClient, version); } return null; @@ -73346,9 +73346,9 @@ var require_cacheHttpClient = __commonJS({ if (!cacheDownloadUrl) { throw new Error("Cache not found."); } - core14.setSecret(cacheDownloadUrl); - core14.debug(`Cache Result:`); - core14.debug(JSON.stringify(cacheResult)); + core16.setSecret(cacheDownloadUrl); + core16.debug(`Cache Result:`); + core16.debug(JSON.stringify(cacheResult)); return cacheResult; }); } @@ -73363,10 +73363,10 @@ var require_cacheHttpClient = __commonJS({ const cacheListResult = response.result; const totalCount = cacheListResult === null || cacheListResult === void 0 ? void 0 : cacheListResult.totalCount; if (totalCount && totalCount > 0) { - core14.debug(`No matching cache found for cache key '${key}', version '${version} and scope ${process.env["GITHUB_REF"]}. There exist one or more cache(s) with similar key but they have different version or scope. See more info on cache matching here: https://docs.github.com/en/actions/using-workflows/caching-dependencies-to-speed-up-workflows#matching-a-cache-key + core16.debug(`No matching cache found for cache key '${key}', version '${version} and scope ${process.env["GITHUB_REF"]}. There exist one or more cache(s) with similar key but they have different version or scope. See more info on cache matching here: https://docs.github.com/en/actions/using-workflows/caching-dependencies-to-speed-up-workflows#matching-a-cache-key Other caches with similar key:`); for (const cacheEntry of (cacheListResult === null || cacheListResult === void 0 ? void 0 : cacheListResult.artifactCaches) || []) { - core14.debug(`Cache Key: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.cacheKey}, Cache Version: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.cacheVersion}, Cache Scope: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.scope}, Cache Created: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.creationTime}`); + core16.debug(`Cache Key: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.cacheKey}, Cache Version: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.cacheVersion}, Cache Scope: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.scope}, Cache Created: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.creationTime}`); } } } @@ -73411,7 +73411,7 @@ Other caches with similar key:`); } function uploadChunk(httpClient, resourceUrl, openStream, start, end) { return __awaiter4(this, void 0, void 0, function* () { - core14.debug(`Uploading chunk of size ${end - start + 1} bytes at offset ${start} with content range: ${getContentRange(start, end)}`); + core16.debug(`Uploading chunk of size ${end - start + 1} bytes at offset ${start} with content range: ${getContentRange(start, end)}`); const additionalHeaders = { "Content-Type": "application/octet-stream", "Content-Range": getContentRange(start, end) @@ -73433,7 +73433,7 @@ Other caches with similar key:`); const concurrency = utils.assertDefined("uploadConcurrency", uploadOptions.uploadConcurrency); const maxChunkSize = utils.assertDefined("uploadChunkSize", uploadOptions.uploadChunkSize); const parallelUploads = [...new Array(concurrency).keys()]; - core14.debug("Awaiting all uploads"); + core16.debug("Awaiting all uploads"); let offset = 0; try { yield Promise.all(parallelUploads.map(() => __awaiter4(this, void 0, void 0, function* () { @@ -73476,16 +73476,16 @@ Other caches with similar key:`); yield (0, uploadUtils_1.uploadCacheArchiveSDK)(signedUploadURL, archivePath, options); } else { const httpClient = createHttpClient(); - core14.debug("Upload cache"); + core16.debug("Upload cache"); yield uploadFile(httpClient, cacheId, archivePath, options); - core14.debug("Commiting cache"); + core16.debug("Commiting cache"); const cacheSize = utils.getArchiveFileSizeInBytes(archivePath); - core14.info(`Cache Size: ~${Math.round(cacheSize / (1024 * 1024))} MB (${cacheSize} B)`); + core16.info(`Cache Size: ~${Math.round(cacheSize / (1024 * 1024))} MB (${cacheSize} B)`); const commitCacheResponse = yield commitCache(httpClient, cacheId, cacheSize); if (!(0, requestUtils_1.isSuccessStatusCode)(commitCacheResponse.statusCode)) { throw new Error(`Cache service responded with ${commitCacheResponse.statusCode} during commit cache.`); } - core14.info("Cache saved successfully"); + core16.info("Cache saved successfully"); } }); } @@ -78937,7 +78937,7 @@ var require_cache3 = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.saveCache = exports2.restoreCache = exports2.isFeatureAvailable = exports2.FinalizeCacheError = exports2.ReserveCacheError = exports2.ValidationError = void 0; - var core14 = __importStar4(require_core()); + var core16 = __importStar4(require_core()); var path16 = __importStar4(require("path")); var utils = __importStar4(require_cacheUtils()); var cacheHttpClient = __importStar4(require_cacheHttpClient()); @@ -78997,7 +78997,7 @@ var require_cache3 = __commonJS({ function restoreCache3(paths, primaryKey, restoreKeys, options, enableCrossOsArchive = false) { return __awaiter4(this, void 0, void 0, function* () { const cacheServiceVersion = (0, config_1.getCacheServiceVersion)(); - core14.debug(`Cache service version: ${cacheServiceVersion}`); + core16.debug(`Cache service version: ${cacheServiceVersion}`); checkPaths(paths); switch (cacheServiceVersion) { case "v2": @@ -79013,8 +79013,8 @@ var require_cache3 = __commonJS({ return __awaiter4(this, void 0, void 0, function* () { restoreKeys = restoreKeys || []; const keys = [primaryKey, ...restoreKeys]; - core14.debug("Resolved Keys:"); - core14.debug(JSON.stringify(keys)); + core16.debug("Resolved Keys:"); + core16.debug(JSON.stringify(keys)); if (keys.length > 10) { throw new ValidationError(`Key Validation Error: Keys are limited to a maximum of 10.`); } @@ -79032,19 +79032,19 @@ var require_cache3 = __commonJS({ return void 0; } if (options === null || options === void 0 ? void 0 : options.lookupOnly) { - core14.info("Lookup only - skipping download"); + core16.info("Lookup only - skipping download"); return cacheEntry.cacheKey; } archivePath = path16.join(yield utils.createTempDirectory(), utils.getCacheFileName(compressionMethod)); - core14.debug(`Archive Path: ${archivePath}`); + core16.debug(`Archive Path: ${archivePath}`); yield cacheHttpClient.downloadCache(cacheEntry.archiveLocation, archivePath, options); - if (core14.isDebug()) { + if (core16.isDebug()) { yield (0, tar_1.listTar)(archivePath, compressionMethod); } const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath); - core14.info(`Cache Size: ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B)`); + core16.info(`Cache Size: ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B)`); yield (0, tar_1.extractTar)(archivePath, compressionMethod); - core14.info("Cache restored successfully"); + core16.info("Cache restored successfully"); return cacheEntry.cacheKey; } catch (error2) { const typedError = error2; @@ -79052,16 +79052,16 @@ var require_cache3 = __commonJS({ throw error2; } else { if (typedError instanceof http_client_1.HttpClientError && typeof typedError.statusCode === "number" && typedError.statusCode >= 500) { - core14.error(`Failed to restore: ${error2.message}`); + core16.error(`Failed to restore: ${error2.message}`); } else { - core14.warning(`Failed to restore: ${error2.message}`); + core16.warning(`Failed to restore: ${error2.message}`); } } } finally { try { yield utils.unlinkFile(archivePath); } catch (error2) { - core14.debug(`Failed to delete archive: ${error2}`); + core16.debug(`Failed to delete archive: ${error2}`); } } return void 0; @@ -79072,8 +79072,8 @@ var require_cache3 = __commonJS({ options = Object.assign(Object.assign({}, options), { useAzureSdk: true }); restoreKeys = restoreKeys || []; const keys = [primaryKey, ...restoreKeys]; - core14.debug("Resolved Keys:"); - core14.debug(JSON.stringify(keys)); + core16.debug("Resolved Keys:"); + core16.debug(JSON.stringify(keys)); if (keys.length > 10) { throw new ValidationError(`Key Validation Error: Keys are limited to a maximum of 10.`); } @@ -79091,30 +79091,30 @@ var require_cache3 = __commonJS({ }; const response = yield twirpClient.GetCacheEntryDownloadURL(request); if (!response.ok) { - core14.debug(`Cache not found for version ${request.version} of keys: ${keys.join(", ")}`); + core16.debug(`Cache not found for version ${request.version} of keys: ${keys.join(", ")}`); return void 0; } const isRestoreKeyMatch = request.key !== response.matchedKey; if (isRestoreKeyMatch) { - core14.info(`Cache hit for restore-key: ${response.matchedKey}`); + core16.info(`Cache hit for restore-key: ${response.matchedKey}`); } else { - core14.info(`Cache hit for: ${response.matchedKey}`); + core16.info(`Cache hit for: ${response.matchedKey}`); } if (options === null || options === void 0 ? void 0 : options.lookupOnly) { - core14.info("Lookup only - skipping download"); + core16.info("Lookup only - skipping download"); return response.matchedKey; } archivePath = path16.join(yield utils.createTempDirectory(), utils.getCacheFileName(compressionMethod)); - core14.debug(`Archive path: ${archivePath}`); - core14.debug(`Starting download of archive to: ${archivePath}`); + core16.debug(`Archive path: ${archivePath}`); + core16.debug(`Starting download of archive to: ${archivePath}`); yield cacheHttpClient.downloadCache(response.signedDownloadUrl, archivePath, options); const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath); - core14.info(`Cache Size: ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B)`); - if (core14.isDebug()) { + core16.info(`Cache Size: ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B)`); + if (core16.isDebug()) { yield (0, tar_1.listTar)(archivePath, compressionMethod); } yield (0, tar_1.extractTar)(archivePath, compressionMethod); - core14.info("Cache restored successfully"); + core16.info("Cache restored successfully"); return response.matchedKey; } catch (error2) { const typedError = error2; @@ -79122,9 +79122,9 @@ var require_cache3 = __commonJS({ throw error2; } else { if (typedError instanceof http_client_1.HttpClientError && typeof typedError.statusCode === "number" && typedError.statusCode >= 500) { - core14.error(`Failed to restore: ${error2.message}`); + core16.error(`Failed to restore: ${error2.message}`); } else { - core14.warning(`Failed to restore: ${error2.message}`); + core16.warning(`Failed to restore: ${error2.message}`); } } } finally { @@ -79133,7 +79133,7 @@ var require_cache3 = __commonJS({ yield utils.unlinkFile(archivePath); } } catch (error2) { - core14.debug(`Failed to delete archive: ${error2}`); + core16.debug(`Failed to delete archive: ${error2}`); } } return void 0; @@ -79142,7 +79142,7 @@ var require_cache3 = __commonJS({ function saveCache3(paths, key, options, enableCrossOsArchive = false) { return __awaiter4(this, void 0, void 0, function* () { const cacheServiceVersion = (0, config_1.getCacheServiceVersion)(); - core14.debug(`Cache service version: ${cacheServiceVersion}`); + core16.debug(`Cache service version: ${cacheServiceVersion}`); checkPaths(paths); checkKey(key); switch (cacheServiceVersion) { @@ -79161,26 +79161,26 @@ var require_cache3 = __commonJS({ const compressionMethod = yield utils.getCompressionMethod(); let cacheId = -1; const cachePaths = yield utils.resolvePaths(paths); - core14.debug("Cache Paths:"); - core14.debug(`${JSON.stringify(cachePaths)}`); + core16.debug("Cache Paths:"); + core16.debug(`${JSON.stringify(cachePaths)}`); if (cachePaths.length === 0) { throw new Error(`Path Validation Error: Path(s) specified in the action for caching do(es) not exist, hence no cache is being saved.`); } const archiveFolder = yield utils.createTempDirectory(); const archivePath = path16.join(archiveFolder, utils.getCacheFileName(compressionMethod)); - core14.debug(`Archive Path: ${archivePath}`); + core16.debug(`Archive Path: ${archivePath}`); try { yield (0, tar_1.createTar)(archiveFolder, cachePaths, compressionMethod); - if (core14.isDebug()) { + if (core16.isDebug()) { yield (0, tar_1.listTar)(archivePath, compressionMethod); } const fileSizeLimit = 10 * 1024 * 1024 * 1024; const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath); - core14.debug(`File Size: ${archiveFileSize}`); + core16.debug(`File Size: ${archiveFileSize}`); if (archiveFileSize > fileSizeLimit && !(0, config_1.isGhes)()) { throw new Error(`Cache size of ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B) is over the 10GB limit, not saving cache.`); } - core14.debug("Reserving Cache"); + core16.debug("Reserving Cache"); const reserveCacheResponse = yield cacheHttpClient.reserveCache(key, paths, { compressionMethod, enableCrossOsArchive, @@ -79193,26 +79193,26 @@ var require_cache3 = __commonJS({ } else { throw new ReserveCacheError(`Unable to reserve cache with key ${key}, another job may be creating this cache. More details: ${(_e = reserveCacheResponse === null || reserveCacheResponse === void 0 ? void 0 : reserveCacheResponse.error) === null || _e === void 0 ? void 0 : _e.message}`); } - core14.debug(`Saving Cache (ID: ${cacheId})`); + core16.debug(`Saving Cache (ID: ${cacheId})`); yield cacheHttpClient.saveCache(cacheId, archivePath, "", options); } catch (error2) { const typedError = error2; if (typedError.name === ValidationError.name) { throw error2; } else if (typedError.name === ReserveCacheError.name) { - core14.info(`Failed to save: ${typedError.message}`); + core16.info(`Failed to save: ${typedError.message}`); } else { if (typedError instanceof http_client_1.HttpClientError && typeof typedError.statusCode === "number" && typedError.statusCode >= 500) { - core14.error(`Failed to save: ${typedError.message}`); + core16.error(`Failed to save: ${typedError.message}`); } else { - core14.warning(`Failed to save: ${typedError.message}`); + core16.warning(`Failed to save: ${typedError.message}`); } } } finally { try { yield utils.unlinkFile(archivePath); } catch (error2) { - core14.debug(`Failed to delete archive: ${error2}`); + core16.debug(`Failed to delete archive: ${error2}`); } } return cacheId; @@ -79225,23 +79225,23 @@ var require_cache3 = __commonJS({ const twirpClient = cacheTwirpClient.internalCacheTwirpClient(); let cacheId = -1; const cachePaths = yield utils.resolvePaths(paths); - core14.debug("Cache Paths:"); - core14.debug(`${JSON.stringify(cachePaths)}`); + core16.debug("Cache Paths:"); + core16.debug(`${JSON.stringify(cachePaths)}`); if (cachePaths.length === 0) { throw new Error(`Path Validation Error: Path(s) specified in the action for caching do(es) not exist, hence no cache is being saved.`); } const archiveFolder = yield utils.createTempDirectory(); const archivePath = path16.join(archiveFolder, utils.getCacheFileName(compressionMethod)); - core14.debug(`Archive Path: ${archivePath}`); + core16.debug(`Archive Path: ${archivePath}`); try { yield (0, tar_1.createTar)(archiveFolder, cachePaths, compressionMethod); - if (core14.isDebug()) { + if (core16.isDebug()) { yield (0, tar_1.listTar)(archivePath, compressionMethod); } const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath); - core14.debug(`File Size: ${archiveFileSize}`); + core16.debug(`File Size: ${archiveFileSize}`); options.archiveSizeBytes = archiveFileSize; - core14.debug("Reserving Cache"); + core16.debug("Reserving Cache"); const version = utils.getCacheVersion(paths, compressionMethod, enableCrossOsArchive); const request = { key, @@ -79252,16 +79252,16 @@ var require_cache3 = __commonJS({ const response = yield twirpClient.CreateCacheEntry(request); if (!response.ok) { if (response.message) { - core14.warning(`Cache reservation failed: ${response.message}`); + core16.warning(`Cache reservation failed: ${response.message}`); } throw new Error(response.message || "Response was not ok"); } signedUploadUrl = response.signedUploadUrl; } catch (error2) { - core14.debug(`Failed to reserve cache: ${error2}`); + core16.debug(`Failed to reserve cache: ${error2}`); throw new ReserveCacheError(`Unable to reserve cache with key ${key}, another job may be creating this cache.`); } - core14.debug(`Attempting to upload cache located at: ${archivePath}`); + core16.debug(`Attempting to upload cache located at: ${archivePath}`); yield cacheHttpClient.saveCache(cacheId, archivePath, signedUploadUrl, options); const finalizeRequest = { key, @@ -79269,7 +79269,7 @@ var require_cache3 = __commonJS({ sizeBytes: `${archiveFileSize}` }; const finalizeResponse = yield twirpClient.FinalizeCacheEntryUpload(finalizeRequest); - core14.debug(`FinalizeCacheEntryUploadResponse: ${finalizeResponse.ok}`); + core16.debug(`FinalizeCacheEntryUploadResponse: ${finalizeResponse.ok}`); if (!finalizeResponse.ok) { if (finalizeResponse.message) { throw new FinalizeCacheError(finalizeResponse.message); @@ -79282,21 +79282,21 @@ var require_cache3 = __commonJS({ if (typedError.name === ValidationError.name) { throw error2; } else if (typedError.name === ReserveCacheError.name) { - core14.info(`Failed to save: ${typedError.message}`); + core16.info(`Failed to save: ${typedError.message}`); } else if (typedError.name === FinalizeCacheError.name) { - core14.warning(typedError.message); + core16.warning(typedError.message); } else { if (typedError instanceof http_client_1.HttpClientError && typeof typedError.statusCode === "number" && typedError.statusCode >= 500) { - core14.error(`Failed to save: ${typedError.message}`); + core16.error(`Failed to save: ${typedError.message}`); } else { - core14.warning(`Failed to save: ${typedError.message}`); + core16.warning(`Failed to save: ${typedError.message}`); } } } finally { try { yield utils.unlinkFile(archivePath); } catch (error2) { - core14.debug(`Failed to delete archive: ${error2}`); + core16.debug(`Failed to delete archive: ${error2}`); } } return cacheId; @@ -80801,7 +80801,7 @@ var require_retry_helper = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.RetryHelper = void 0; - var core14 = __importStar4(require_core()); + var core16 = __importStar4(require_core()); var RetryHelper = class { constructor(maxAttempts, minSeconds, maxSeconds) { if (maxAttempts < 1) { @@ -80824,10 +80824,10 @@ var require_retry_helper = __commonJS({ if (isRetryable && !isRetryable(err)) { throw err; } - core14.info(err.message); + core16.info(err.message); } const seconds = this.getSleepAmount(); - core14.info(`Waiting ${seconds} seconds before trying again`); + core16.info(`Waiting ${seconds} seconds before trying again`); yield this.sleep(seconds); attempt++; } @@ -80907,7 +80907,7 @@ var require_tool_cache = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.evaluateVersions = exports2.isExplicitVersion = exports2.findFromManifest = exports2.getManifestFromRepo = exports2.findAllVersions = exports2.find = exports2.cacheFile = exports2.cacheDir = exports2.extractZip = exports2.extractXar = exports2.extractTar = exports2.extract7z = exports2.downloadTool = exports2.HTTPError = void 0; - var core14 = __importStar4(require_core()); + var core16 = __importStar4(require_core()); var io6 = __importStar4(require_io()); var crypto = __importStar4(require("crypto")); var fs15 = __importStar4(require("fs")); @@ -80936,8 +80936,8 @@ var require_tool_cache = __commonJS({ return __awaiter4(this, void 0, void 0, function* () { dest = dest || path16.join(_getTempDirectory(), crypto.randomUUID()); yield io6.mkdirP(path16.dirname(dest)); - core14.debug(`Downloading ${url2}`); - core14.debug(`Destination ${dest}`); + core16.debug(`Downloading ${url2}`); + core16.debug(`Destination ${dest}`); const maxAttempts = 3; const minSeconds = _getGlobal("TEST_DOWNLOAD_TOOL_RETRY_MIN_SECONDS", 10); const maxSeconds = _getGlobal("TEST_DOWNLOAD_TOOL_RETRY_MAX_SECONDS", 20); @@ -80964,7 +80964,7 @@ var require_tool_cache = __commonJS({ allowRetries: false }); if (auth) { - core14.debug("set auth"); + core16.debug("set auth"); if (headers === void 0) { headers = {}; } @@ -80973,7 +80973,7 @@ var require_tool_cache = __commonJS({ const response = yield http.get(url2, headers); if (response.message.statusCode !== 200) { const err = new HTTPError2(response.message.statusCode); - core14.debug(`Failed to download from "${url2}". Code(${response.message.statusCode}) Message(${response.message.statusMessage})`); + core16.debug(`Failed to download from "${url2}". Code(${response.message.statusCode}) Message(${response.message.statusMessage})`); throw err; } const pipeline = util.promisify(stream2.pipeline); @@ -80982,16 +80982,16 @@ var require_tool_cache = __commonJS({ let succeeded = false; try { yield pipeline(readStream, fs15.createWriteStream(dest)); - core14.debug("download complete"); + core16.debug("download complete"); succeeded = true; return dest; } finally { if (!succeeded) { - core14.debug("download failed"); + core16.debug("download failed"); try { yield io6.rmRF(dest); } catch (err) { - core14.debug(`Failed to delete '${dest}'. ${err.message}`); + core16.debug(`Failed to delete '${dest}'. ${err.message}`); } } } @@ -81006,7 +81006,7 @@ var require_tool_cache = __commonJS({ process.chdir(dest); if (_7zPath) { try { - const logLevel = core14.isDebug() ? "-bb1" : "-bb0"; + const logLevel = core16.isDebug() ? "-bb1" : "-bb0"; const args = [ "x", logLevel, @@ -81056,7 +81056,7 @@ var require_tool_cache = __commonJS({ throw new Error("parameter 'file' is required"); } dest = yield _createExtractFolder(dest); - core14.debug("Checking tar --version"); + core16.debug("Checking tar --version"); let versionOutput = ""; yield (0, exec_1.exec)("tar --version", [], { ignoreReturnCode: true, @@ -81066,7 +81066,7 @@ var require_tool_cache = __commonJS({ stderr: (data) => versionOutput += data.toString() } }); - core14.debug(versionOutput.trim()); + core16.debug(versionOutput.trim()); const isGnuTar = versionOutput.toUpperCase().includes("GNU TAR"); let args; if (flags instanceof Array) { @@ -81074,7 +81074,7 @@ var require_tool_cache = __commonJS({ } else { args = [flags]; } - if (core14.isDebug() && !flags.includes("v")) { + if (core16.isDebug() && !flags.includes("v")) { args.push("-v"); } let destArg = dest; @@ -81106,7 +81106,7 @@ var require_tool_cache = __commonJS({ args = [flags]; } args.push("-x", "-C", dest, "-f", file); - if (core14.isDebug()) { + if (core16.isDebug()) { args.push("-v"); } const xarPath = yield io6.which("xar", true); @@ -81151,7 +81151,7 @@ var require_tool_cache = __commonJS({ "-Command", pwshCommand ]; - core14.debug(`Using pwsh at path: ${pwshPath}`); + core16.debug(`Using pwsh at path: ${pwshPath}`); yield (0, exec_1.exec)(`"${pwshPath}"`, args); } else { const powershellCommand = [ @@ -81171,7 +81171,7 @@ var require_tool_cache = __commonJS({ powershellCommand ]; const powershellPath = yield io6.which("powershell", true); - core14.debug(`Using powershell at path: ${powershellPath}`); + core16.debug(`Using powershell at path: ${powershellPath}`); yield (0, exec_1.exec)(`"${powershellPath}"`, args); } }); @@ -81180,7 +81180,7 @@ var require_tool_cache = __commonJS({ return __awaiter4(this, void 0, void 0, function* () { const unzipPath = yield io6.which("unzip", true); const args = [file]; - if (!core14.isDebug()) { + if (!core16.isDebug()) { args.unshift("-q"); } args.unshift("-o"); @@ -81191,8 +81191,8 @@ var require_tool_cache = __commonJS({ return __awaiter4(this, void 0, void 0, function* () { version = semver8.clean(version) || version; arch2 = arch2 || os3.arch(); - core14.debug(`Caching tool ${tool} ${version} ${arch2}`); - core14.debug(`source dir: ${sourceDir}`); + core16.debug(`Caching tool ${tool} ${version} ${arch2}`); + core16.debug(`source dir: ${sourceDir}`); if (!fs15.statSync(sourceDir).isDirectory()) { throw new Error("sourceDir is not a directory"); } @@ -81210,14 +81210,14 @@ var require_tool_cache = __commonJS({ return __awaiter4(this, void 0, void 0, function* () { version = semver8.clean(version) || version; arch2 = arch2 || os3.arch(); - core14.debug(`Caching tool ${tool} ${version} ${arch2}`); - core14.debug(`source file: ${sourceFile}`); + core16.debug(`Caching tool ${tool} ${version} ${arch2}`); + core16.debug(`source file: ${sourceFile}`); if (!fs15.statSync(sourceFile).isFile()) { throw new Error("sourceFile is not a file"); } const destFolder = yield _createToolPath(tool, version, arch2); const destPath = path16.join(destFolder, targetFile); - core14.debug(`destination file ${destPath}`); + core16.debug(`destination file ${destPath}`); yield io6.cp(sourceFile, destPath); _completeToolPath(tool, version, arch2); return destFolder; @@ -81241,12 +81241,12 @@ var require_tool_cache = __commonJS({ if (versionSpec) { versionSpec = semver8.clean(versionSpec) || ""; const cachePath = path16.join(_getCacheDirectory(), toolName, versionSpec, arch2); - core14.debug(`checking cache: ${cachePath}`); + core16.debug(`checking cache: ${cachePath}`); if (fs15.existsSync(cachePath) && fs15.existsSync(`${cachePath}.complete`)) { - core14.debug(`Found tool in cache ${toolName} ${versionSpec} ${arch2}`); + core16.debug(`Found tool in cache ${toolName} ${versionSpec} ${arch2}`); toolPath = cachePath; } else { - core14.debug("not found"); + core16.debug("not found"); } } return toolPath; @@ -81277,7 +81277,7 @@ var require_tool_cache = __commonJS({ const http = new httpm.HttpClient("tool-cache"); const headers = {}; if (auth) { - core14.debug("set auth"); + core16.debug("set auth"); headers.authorization = auth; } const response = yield http.getJson(treeUrl, headers); @@ -81298,7 +81298,7 @@ var require_tool_cache = __commonJS({ try { releases = JSON.parse(versionsRaw); } catch (_a) { - core14.debug("Invalid json"); + core16.debug("Invalid json"); } } return releases; @@ -81324,7 +81324,7 @@ var require_tool_cache = __commonJS({ function _createToolPath(tool, version, arch2) { return __awaiter4(this, void 0, void 0, function* () { const folderPath = path16.join(_getCacheDirectory(), tool, semver8.clean(version) || version, arch2 || ""); - core14.debug(`destination ${folderPath}`); + core16.debug(`destination ${folderPath}`); const markerPath = `${folderPath}.complete`; yield io6.rmRF(folderPath); yield io6.rmRF(markerPath); @@ -81336,19 +81336,19 @@ var require_tool_cache = __commonJS({ const folderPath = path16.join(_getCacheDirectory(), tool, semver8.clean(version) || version, arch2 || ""); const markerPath = `${folderPath}.complete`; fs15.writeFileSync(markerPath, ""); - core14.debug("finished caching tool"); + core16.debug("finished caching tool"); } function isExplicitVersion(versionSpec) { const c = semver8.clean(versionSpec) || ""; - core14.debug(`isExplicit: ${c}`); + core16.debug(`isExplicit: ${c}`); const valid3 = semver8.valid(c) != null; - core14.debug(`explicit? ${valid3}`); + core16.debug(`explicit? ${valid3}`); return valid3; } exports2.isExplicitVersion = isExplicitVersion; function evaluateVersions(versions, versionSpec) { let version = ""; - core14.debug(`evaluating ${versions.length} versions`); + core16.debug(`evaluating ${versions.length} versions`); versions = versions.sort((a, b) => { if (semver8.gt(a, b)) { return 1; @@ -81364,9 +81364,9 @@ var require_tool_cache = __commonJS({ } } if (version) { - core14.debug(`matched: ${version}`); + core16.debug(`matched: ${version}`); } else { - core14.debug("match not found"); + core16.debug("match not found"); } return version; } @@ -84839,7 +84839,7 @@ var require_sarif_schema_2_1_0 = __commonJS({ }); // src/upload-sarif-action.ts -var core13 = __toESM(require_core()); +var core15 = __toESM(require_core()); // src/actions-util.ts var fs4 = __toESM(require("fs")); @@ -90055,7 +90055,7 @@ var fs14 = __toESM(require("fs")); var path15 = __toESM(require("path")); var url = __toESM(require("url")); var import_zlib = __toESM(require("zlib")); -var core12 = __toESM(require_core()); +var core14 = __toESM(require_core()); var jsonschema2 = __toESM(require_lib2()); // src/codeql.ts @@ -92952,8 +92952,28 @@ async function addFingerprints(sarif, sourceRoot, logger) { } // src/init.ts +var core13 = __toESM(require_core()); var toolrunner4 = __toESM(require_toolrunner()); var io5 = __toESM(require_io()); + +// src/workflow.ts +var core12 = __toESM(require_core()); +function toCodedErrors(errors) { + return Object.entries(errors).reduce( + (acc, [code, message]) => { + acc[code] = { message, code }; + return acc; + }, + {} + ); +} +var WorkflowErrors = toCodedErrors({ + MissingPushHook: `Please specify an on.push hook to analyze and see code scanning alerts from the default branch on the Security tab.`, + CheckoutWrongHead: `git checkout HEAD^2 is no longer necessary. Please remove this step as Code Scanning recommends analyzing the merge commit for best results.`, + InconsistentActionVersion: `Not all workflow steps that use \`github/codeql-action\` actions use the same version. Please ensure that all such steps use the same version to avoid compatibility issues.` +}); + +// src/init.ts async function initCodeQL(toolsInput, apiDetails, tempDir, variant, defaultCliVersion, features, logger) { logger.startGroup("Setup CodeQL tools"); const { @@ -93083,7 +93103,7 @@ async function combineSarifFilesUsingCLI(sarifFiles, gitHubVersion, features, lo logger.warning( `Uploading multiple SARIF runs with the same category is deprecated ${deprecationWarningMessage}. Please update your workflow to upload a single run per category. ${deprecationMoreInformationMessage}` ); - core12.exportVariable("CODEQL_MERGE_SARIF_DEPRECATION_WARNING", "true"); + core14.exportVariable("CODEQL_MERGE_SARIF_DEPRECATION_WARNING", "true"); } return combineSarifFiles(sarifFiles, logger); } @@ -93182,13 +93202,13 @@ async function uploadPayload(payload, repositoryNwo, logger, analysis) { if (httpError !== void 0) { switch (httpError.status) { case 403: - core12.warning(httpError.message || GENERIC_403_MSG); + core14.warning(httpError.message || GENERIC_403_MSG); break; case 404: - core12.warning(httpError.message || GENERIC_404_MSG); + core14.warning(httpError.message || GENERIC_404_MSG); break; default: - core12.warning(httpError.message); + core14.warning(httpError.message); break; } } @@ -93301,9 +93321,9 @@ function validateSarifFileSchema(sarif, sarifFilePath, logger) { const warnings = (result.errors ?? []).filter( (err) => err.name === "format" && typeof err.argument === "string" && warningAttributes.includes(err.argument) ); - for (const warning7 of warnings) { + for (const warning8 of warnings) { logger.info( - `Warning: '${warning7.instance}' is not a valid URI in '${warning7.property}'.` + `Warning: '${warning8.instance}' is not a valid URI in '${warning8.property}'.` ); } if (errors.length > 0) { @@ -93572,7 +93592,7 @@ function validateUniqueCategory(sarif, sentinelPrefix) { `Aborting upload: only one run of the codeql/analyze or codeql/upload-sarif actions is allowed per job per tool/category. The easiest fix is to specify a unique value for the \`category\` input. If .runs[].automationDetails.id is specified in the sarif file, that will take precedence over your configured \`category\`. Category: (${id ? id : "none"}) Tool: (${tool ? tool : "none"})` ); } - core12.exportVariable(sentinelEnvVar, sentinelEnvVar); + core14.exportVariable(sentinelEnvVar, sentinelEnvVar); } } function sanitize(str2) { @@ -93712,11 +93732,11 @@ async function run() { } const codeScanningResult = uploadResults["code-scanning" /* CodeScanning */]; if (codeScanningResult !== void 0) { - core13.setOutput("sarif-id", codeScanningResult.sarifID); + core15.setOutput("sarif-id", codeScanningResult.sarifID); } - core13.setOutput("sarif-ids", JSON.stringify(uploadResults)); + core15.setOutput("sarif-ids", JSON.stringify(uploadResults)); if (shouldSkipSarifUpload()) { - core13.debug( + core15.debug( "SARIF upload disabled by an environment variable. Waiting for processing is disabled." ); } else if (getRequiredInput("wait-for-processing") === "true") { @@ -93736,7 +93756,7 @@ async function run() { } catch (unwrappedError) { const error2 = isThirdPartyAnalysis("upload-sarif" /* UploadSarif */) && unwrappedError instanceof InvalidSarifUploadError ? new ConfigurationError(unwrappedError.message) : wrapError(unwrappedError); const message = error2.message; - core13.setFailed(message); + core15.setFailed(message); const errorStatusReportBase = await createStatusReportBase( "upload-sarif" /* UploadSarif */, getActionsStatus(error2), @@ -93757,7 +93777,7 @@ async function runWrapper() { try { await run(); } catch (error2) { - core13.setFailed( + core15.setFailed( `codeql/upload-sarif action failed: ${getErrorMessage(error2)}` ); } diff --git a/src/init-action.ts b/src/init-action.ts index d10c6a81a9..6984f9a379 100644 --- a/src/init-action.ts +++ b/src/init-action.ts @@ -40,6 +40,7 @@ import { loadPropertiesFromApi } from "./feature-flags/properties"; import { checkInstallPython311, checkPacksForOverlayCompatibility, + checkWorkflow, cleanupDatabaseClusterDirectory, initCodeQL, initConfig, @@ -86,7 +87,6 @@ import { getErrorMessage, BuildMode, } from "./util"; -import { validateWorkflow } from "./workflow"; /** * Sends a status report indicating that the `init` Action is starting. @@ -288,19 +288,9 @@ async function run() { toolsSource = initCodeQLResult.toolsSource; zstdAvailability = initCodeQLResult.zstdAvailability; - // Check the workflow for problems, unless `SKIP_WORKFLOW_VALIDATION` is `true`. - if (process.env[EnvVar.SKIP_WORKFLOW_VALIDATION] !== "true") { - core.startGroup("Validating workflow"); - const validateWorkflowResult = await validateWorkflow(codeql, logger); - if (validateWorkflowResult === undefined) { - logger.info("Detected no issues with the code scanning workflow."); - } else { - logger.warning( - `Unable to validate code scanning workflow: ${validateWorkflowResult}`, - ); - } - core.endGroup(); - } + // Check the workflow for problems. If there are any problems, they are reported + // to the workflow log. No exceptions are thrown. + await checkWorkflow(logger, codeql); // Set CODEQL_ENABLE_EXPERIMENTAL_FEATURES for Rust if between 2.19.3 (included) and 2.22.1 (excluded) // We need to set this environment variable before initializing the config, otherwise Rust diff --git a/src/init.test.ts b/src/init.test.ts index 6640e081f3..a4d5d48aaa 100644 --- a/src/init.test.ts +++ b/src/init.test.ts @@ -2,23 +2,81 @@ import * as fs from "fs"; import path from "path"; import test, { ExecutionContext } from "ava"; +import * as sinon from "sinon"; import { createStubCodeQL } from "./codeql"; +import { EnvVar } from "./environment"; import { checkPacksForOverlayCompatibility, + checkWorkflow, cleanupDatabaseClusterDirectory, } from "./init"; import { KnownLanguage } from "./languages"; import { LoggedMessage, + checkExpectedLogMessages, createTestConfig, getRecordingLogger, setupTests, } from "./testing-utils"; import { ConfigurationError, withTmpDir } from "./util"; +import * as workflow from "./workflow"; setupTests(test); +test("checkWorkflow - validates workflow if `SKIP_WORKFLOW_VALIDATION` is not set", async (t) => { + const messages: LoggedMessage[] = []; + const codeql = createStubCodeQL({}); + + const validateWorkflow = sinon.stub(workflow, "validateWorkflow"); + validateWorkflow.resolves(undefined); + + await checkWorkflow(getRecordingLogger(messages), codeql); + + t.assert( + validateWorkflow.calledOnce, + "`checkWorkflow` unexpectedly did not call `validateWorkflow`", + ); + checkExpectedLogMessages(t, messages, [ + "Detected no issues with the code scanning workflow.", + ]); +}); + +test("checkWorkflow - logs problems with workflow validation", async (t) => { + const messages: LoggedMessage[] = []; + const codeql = createStubCodeQL({}); + + const validateWorkflow = sinon.stub(workflow, "validateWorkflow"); + validateWorkflow.resolves("problem"); + + await checkWorkflow(getRecordingLogger(messages), codeql); + + t.assert( + validateWorkflow.calledOnce, + "`checkWorkflow` unexpectedly did not call `validateWorkflow`", + ); + checkExpectedLogMessages(t, messages, [ + "Unable to validate code scanning workflow: problem", + ]); +}); + +test("checkWorkflow - skips validation if `SKIP_WORKFLOW_VALIDATION` is `true`", async (t) => { + process.env[EnvVar.SKIP_WORKFLOW_VALIDATION] = "true"; + + const messages: LoggedMessage[] = []; + const codeql = createStubCodeQL({}); + + const validateWorkflow = sinon.stub(workflow, "validateWorkflow"); + + await checkWorkflow(getRecordingLogger(messages), codeql); + + t.assert( + validateWorkflow.notCalled, + "`checkWorkflow` called `validateWorkflow` unexpectedly", + ); + t.is(messages.length, 0); +}); + test("cleanupDatabaseClusterDirectory cleans up where possible", async (t) => { await withTmpDir(async (tmpDir: string) => { const dbLocation = path.resolve(tmpDir, "dbs"); diff --git a/src/init.ts b/src/init.ts index b399ef8d9d..f0a3b1dd59 100644 --- a/src/init.ts +++ b/src/init.ts @@ -1,6 +1,7 @@ import * as fs from "fs"; import * as path from "path"; +import * as core from "@actions/core"; import * as toolrunner from "@actions/exec/lib/toolrunner"; import * as io from "@actions/io"; import * as yaml from "js-yaml"; @@ -9,6 +10,7 @@ import { getOptionalInput, isSelfHostedRunner } from "./actions-util"; import { GitHubApiDetails } from "./api-client"; import { CodeQL, setupCodeQL } from "./codeql"; import * as configUtils from "./config-utils"; +import { EnvVar } from "./environment"; import { CodeQLDefaultVersionInfo, FeatureEnablement } from "./feature-flags"; import { KnownLanguage, Language } from "./languages"; import { Logger, withGroupAsync } from "./logging"; @@ -16,6 +18,29 @@ import { ToolsSource } from "./setup-codeql"; import { ZstdAvailability } from "./tar"; import { ToolsDownloadStatusReport } from "./tools-download"; import * as util from "./util"; +import { validateWorkflow } from "./workflow"; + +/** + * A wrapper around `validateWorkflow` which reports the outcome. + * + * @param logger The logger to use. + * @param codeql The CodeQL instance. + */ +export async function checkWorkflow(logger: Logger, codeql: CodeQL) { + // Check the workflow for problems, unless `SKIP_WORKFLOW_VALIDATION` is `true`. + if (process.env[EnvVar.SKIP_WORKFLOW_VALIDATION] !== "true") { + core.startGroup("Validating workflow"); + const validateWorkflowResult = await validateWorkflow(codeql, logger); + if (validateWorkflowResult === undefined) { + logger.info("Detected no issues with the code scanning workflow."); + } else { + logger.warning( + `Unable to validate code scanning workflow: ${validateWorkflowResult}`, + ); + } + core.endGroup(); + } +} export async function initCodeQL( toolsInput: string | undefined, From 50601762ea63865e1852fdf110a64b4e23b6dcdc Mon Sep 17 00:00:00 2001 From: "Michael B. Gale" Date: Mon, 27 Oct 2025 16:07:57 +0000 Subject: [PATCH 3/5] Also skip workflow validation for `dynamic` workflows --- lib/init-action.js | 2 +- src/init.test.ts | 23 +++++++++++++++++++++++ src/init.ts | 14 +++++++++++--- 3 files changed, 35 insertions(+), 4 deletions(-) diff --git a/lib/init-action.js b/lib/init-action.js index 103f85087c..415bbdd1c1 100644 --- a/lib/init-action.js +++ b/lib/init-action.js @@ -91829,7 +91829,7 @@ async function getWorkflowAbsolutePath(logger) { // src/init.ts async function checkWorkflow(logger, codeql) { - if (process.env["CODEQL_ACTION_SKIP_WORKFLOW_VALIDATION" /* SKIP_WORKFLOW_VALIDATION */] !== "true") { + if (!isDynamicWorkflow() && process.env["CODEQL_ACTION_SKIP_WORKFLOW_VALIDATION" /* SKIP_WORKFLOW_VALIDATION */] !== "true") { core12.startGroup("Validating workflow"); const validateWorkflowResult = await validateWorkflow(codeql, logger); if (validateWorkflowResult === void 0) { diff --git a/src/init.test.ts b/src/init.test.ts index a4d5d48aaa..bd267fb26b 100644 --- a/src/init.test.ts +++ b/src/init.test.ts @@ -4,6 +4,7 @@ import path from "path"; import test, { ExecutionContext } from "ava"; import * as sinon from "sinon"; +import * as actionsUtil from "./actions-util"; import { createStubCodeQL } from "./codeql"; import { EnvVar } from "./environment"; import { @@ -28,6 +29,7 @@ test("checkWorkflow - validates workflow if `SKIP_WORKFLOW_VALIDATION` is not se const messages: LoggedMessage[] = []; const codeql = createStubCodeQL({}); + sinon.stub(actionsUtil, "isDynamicWorkflow").returns(false); const validateWorkflow = sinon.stub(workflow, "validateWorkflow"); validateWorkflow.resolves(undefined); @@ -46,6 +48,7 @@ test("checkWorkflow - logs problems with workflow validation", async (t) => { const messages: LoggedMessage[] = []; const codeql = createStubCodeQL({}); + sinon.stub(actionsUtil, "isDynamicWorkflow").returns(false); const validateWorkflow = sinon.stub(workflow, "validateWorkflow"); validateWorkflow.resolves("problem"); @@ -66,6 +69,7 @@ test("checkWorkflow - skips validation if `SKIP_WORKFLOW_VALIDATION` is `true`", const messages: LoggedMessage[] = []; const codeql = createStubCodeQL({}); + sinon.stub(actionsUtil, "isDynamicWorkflow").returns(false); const validateWorkflow = sinon.stub(workflow, "validateWorkflow"); await checkWorkflow(getRecordingLogger(messages), codeql); @@ -77,6 +81,25 @@ test("checkWorkflow - skips validation if `SKIP_WORKFLOW_VALIDATION` is `true`", t.is(messages.length, 0); }); +test("checkWorkflow - skips validation for `dynamic` workflows", async (t) => { + const messages: LoggedMessage[] = []; + const codeql = createStubCodeQL({}); + + const isDynamicWorkflow = sinon + .stub(actionsUtil, "isDynamicWorkflow") + .returns(true); + const validateWorkflow = sinon.stub(workflow, "validateWorkflow"); + + await checkWorkflow(getRecordingLogger(messages), codeql); + + t.assert(isDynamicWorkflow.calledOnce); + t.assert( + validateWorkflow.notCalled, + "`checkWorkflow` called `validateWorkflow` unexpectedly", + ); + t.is(messages.length, 0); +}); + test("cleanupDatabaseClusterDirectory cleans up where possible", async (t) => { await withTmpDir(async (tmpDir: string) => { const dbLocation = path.resolve(tmpDir, "dbs"); diff --git a/src/init.ts b/src/init.ts index f0a3b1dd59..7d8df61f68 100644 --- a/src/init.ts +++ b/src/init.ts @@ -6,7 +6,11 @@ import * as toolrunner from "@actions/exec/lib/toolrunner"; import * as io from "@actions/io"; import * as yaml from "js-yaml"; -import { getOptionalInput, isSelfHostedRunner } from "./actions-util"; +import { + getOptionalInput, + isDynamicWorkflow, + isSelfHostedRunner, +} from "./actions-util"; import { GitHubApiDetails } from "./api-client"; import { CodeQL, setupCodeQL } from "./codeql"; import * as configUtils from "./config-utils"; @@ -27,8 +31,12 @@ import { validateWorkflow } from "./workflow"; * @param codeql The CodeQL instance. */ export async function checkWorkflow(logger: Logger, codeql: CodeQL) { - // Check the workflow for problems, unless `SKIP_WORKFLOW_VALIDATION` is `true`. - if (process.env[EnvVar.SKIP_WORKFLOW_VALIDATION] !== "true") { + // Check the workflow for problems, unless `SKIP_WORKFLOW_VALIDATION` is `true` + // or the workflow trigger is `dynamic`. + if ( + !isDynamicWorkflow() && + process.env[EnvVar.SKIP_WORKFLOW_VALIDATION] !== "true" + ) { core.startGroup("Validating workflow"); const validateWorkflowResult = await validateWorkflow(codeql, logger); if (validateWorkflowResult === undefined) { From 55c083790a0c92eadbda8a97f2f7c12ac848ac9f Mon Sep 17 00:00:00 2001 From: "Michael B. Gale" Date: Mon, 27 Oct 2025 17:01:23 +0000 Subject: [PATCH 4/5] Move `checkWorkflow` to `workflow.ts` --- lib/analyze-action.js | 376 ++++++++-------- lib/init-action-post.js | 851 ++++++++++++++++++------------------- lib/init-action.js | 785 +++++++++++++++++----------------- lib/setup-codeql-action.js | 354 ++++++++------- lib/upload-lib.js | 332 +++++++-------- lib/upload-sarif-action.js | 344 +++++++-------- src/init-action.ts | 2 +- src/init.test.ts | 81 ---- src/init.ts | 35 +- src/workflow.test.ts | 88 +++- src/workflow.ts | 37 +- 11 files changed, 1607 insertions(+), 1678 deletions(-) diff --git a/lib/analyze-action.js b/lib/analyze-action.js index 6b4092b81c..d3148efde0 100644 --- a/lib/analyze-action.js +++ b/lib/analyze-action.js @@ -19743,10 +19743,10 @@ Support boolean input list: \`true | True | TRUE | false | False | FALSE\``); (0, command_1.issueCommand)("error", (0, utils_1.toCommandProperties)(properties), message instanceof Error ? message.toString() : message); } exports2.error = error2; - function warning8(message, properties = {}) { + function warning7(message, properties = {}) { (0, command_1.issueCommand)("warning", (0, utils_1.toCommandProperties)(properties), message instanceof Error ? message.toString() : message); } - exports2.warning = warning8; + exports2.warning = warning7; function notice(message, properties = {}) { (0, command_1.issueCommand)("notice", (0, utils_1.toCommandProperties)(properties), message instanceof Error ? message.toString() : message); } @@ -19755,22 +19755,22 @@ Support boolean input list: \`true | True | TRUE | false | False | FALSE\``); process.stdout.write(message + os5.EOL); } exports2.info = info4; - function startGroup4(name) { + function startGroup3(name) { (0, command_1.issue)("group", name); } - exports2.startGroup = startGroup4; - function endGroup4() { + exports2.startGroup = startGroup3; + function endGroup3() { (0, command_1.issue)("endgroup"); } - exports2.endGroup = endGroup4; + exports2.endGroup = endGroup3; function group(name, fn) { return __awaiter4(this, void 0, void 0, function* () { - startGroup4(name); + startGroup3(name); let result; try { result = yield fn(); } finally { - endGroup4(); + endGroup3(); } return result; }); @@ -35312,7 +35312,7 @@ var require_internal_glob_options_helper = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.getOptions = void 0; - var core17 = __importStar4(require_core()); + var core15 = __importStar4(require_core()); function getOptions(copy) { const result = { followSymbolicLinks: true, @@ -35322,15 +35322,15 @@ var require_internal_glob_options_helper = __commonJS({ if (copy) { if (typeof copy.followSymbolicLinks === "boolean") { result.followSymbolicLinks = copy.followSymbolicLinks; - core17.debug(`followSymbolicLinks '${result.followSymbolicLinks}'`); + core15.debug(`followSymbolicLinks '${result.followSymbolicLinks}'`); } if (typeof copy.implicitDescendants === "boolean") { result.implicitDescendants = copy.implicitDescendants; - core17.debug(`implicitDescendants '${result.implicitDescendants}'`); + core15.debug(`implicitDescendants '${result.implicitDescendants}'`); } if (typeof copy.omitBrokenSymbolicLinks === "boolean") { result.omitBrokenSymbolicLinks = copy.omitBrokenSymbolicLinks; - core17.debug(`omitBrokenSymbolicLinks '${result.omitBrokenSymbolicLinks}'`); + core15.debug(`omitBrokenSymbolicLinks '${result.omitBrokenSymbolicLinks}'`); } } return result; @@ -36784,7 +36784,7 @@ var require_internal_globber = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.DefaultGlobber = void 0; - var core17 = __importStar4(require_core()); + var core15 = __importStar4(require_core()); var fs20 = __importStar4(require("fs")); var globOptionsHelper = __importStar4(require_internal_glob_options_helper()); var path20 = __importStar4(require("path")); @@ -36835,7 +36835,7 @@ var require_internal_globber = __commonJS({ } const stack = []; for (const searchPath of patternHelper.getSearchPaths(patterns)) { - core17.debug(`Search path '${searchPath}'`); + core15.debug(`Search path '${searchPath}'`); try { yield __await4(fs20.promises.lstat(searchPath)); } catch (err) { @@ -36907,7 +36907,7 @@ var require_internal_globber = __commonJS({ } catch (err) { if (err.code === "ENOENT") { if (options.omitBrokenSymbolicLinks) { - core17.debug(`Broken symlink '${item.path}'`); + core15.debug(`Broken symlink '${item.path}'`); return void 0; } throw new Error(`No information found for the path '${item.path}'. This may indicate a broken symbolic link.`); @@ -36923,7 +36923,7 @@ var require_internal_globber = __commonJS({ traversalChain.pop(); } if (traversalChain.some((x) => x === realPath)) { - core17.debug(`Symlink cycle detected for path '${item.path}' and realpath '${realPath}'`); + core15.debug(`Symlink cycle detected for path '${item.path}' and realpath '${realPath}'`); return void 0; } traversalChain.push(realPath); @@ -38247,7 +38247,7 @@ var require_cacheUtils = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.getRuntimeToken = exports2.getCacheVersion = exports2.assertDefined = exports2.getGnuTarPathOnWindows = exports2.getCacheFileName = exports2.getCompressionMethod = exports2.unlinkFile = exports2.resolvePaths = exports2.getArchiveFileSizeInBytes = exports2.createTempDirectory = void 0; - var core17 = __importStar4(require_core()); + var core15 = __importStar4(require_core()); var exec2 = __importStar4(require_exec()); var glob2 = __importStar4(require_glob()); var io7 = __importStar4(require_io()); @@ -38300,7 +38300,7 @@ var require_cacheUtils = __commonJS({ _e = false; const file = _c; const relativeFile = path20.relative(workspace, file).replace(new RegExp(`\\${path20.sep}`, "g"), "/"); - core17.debug(`Matched: ${relativeFile}`); + core15.debug(`Matched: ${relativeFile}`); if (relativeFile === "") { paths.push("."); } else { @@ -38330,7 +38330,7 @@ var require_cacheUtils = __commonJS({ return __awaiter4(this, void 0, void 0, function* () { let versionOutput = ""; additionalArgs.push("--version"); - core17.debug(`Checking ${app} ${additionalArgs.join(" ")}`); + core15.debug(`Checking ${app} ${additionalArgs.join(" ")}`); try { yield exec2.exec(`${app}`, additionalArgs, { ignoreReturnCode: true, @@ -38341,10 +38341,10 @@ var require_cacheUtils = __commonJS({ } }); } catch (err) { - core17.debug(err.message); + core15.debug(err.message); } versionOutput = versionOutput.trim(); - core17.debug(versionOutput); + core15.debug(versionOutput); return versionOutput; }); } @@ -38352,7 +38352,7 @@ var require_cacheUtils = __commonJS({ return __awaiter4(this, void 0, void 0, function* () { const versionOutput = yield getVersion("zstd", ["--quiet"]); const version = semver8.clean(versionOutput); - core17.debug(`zstd version: ${version}`); + core15.debug(`zstd version: ${version}`); if (versionOutput === "") { return constants_1.CompressionMethod.Gzip; } else { @@ -73664,7 +73664,7 @@ var require_uploadUtils = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.uploadCacheArchiveSDK = exports2.UploadProgress = void 0; - var core17 = __importStar4(require_core()); + var core15 = __importStar4(require_core()); var storage_blob_1 = require_dist7(); var errors_1 = require_errors2(); var UploadProgress = class { @@ -73706,7 +73706,7 @@ var require_uploadUtils = __commonJS({ const percentage = (100 * (transferredBytes / this.contentLength)).toFixed(1); const elapsedTime = Date.now() - this.startTime; const uploadSpeed = (transferredBytes / (1024 * 1024) / (elapsedTime / 1e3)).toFixed(1); - core17.info(`Sent ${transferredBytes} of ${this.contentLength} (${percentage}%), ${uploadSpeed} MBs/sec`); + core15.info(`Sent ${transferredBytes} of ${this.contentLength} (${percentage}%), ${uploadSpeed} MBs/sec`); if (this.isDone()) { this.displayedComplete = true; } @@ -73761,14 +73761,14 @@ var require_uploadUtils = __commonJS({ }; try { uploadProgress.startDisplayTimer(); - core17.debug(`BlobClient: ${blobClient.name}:${blobClient.accountName}:${blobClient.containerName}`); + core15.debug(`BlobClient: ${blobClient.name}:${blobClient.accountName}:${blobClient.containerName}`); const response = yield blockBlobClient.uploadFile(archivePath, uploadOptions); if (response._response.status >= 400) { throw new errors_1.InvalidResponseError(`uploadCacheArchiveSDK: upload failed with status code ${response._response.status}`); } return response; } catch (error2) { - core17.warning(`uploadCacheArchiveSDK: internal error uploading cache archive: ${error2.message}`); + core15.warning(`uploadCacheArchiveSDK: internal error uploading cache archive: ${error2.message}`); throw error2; } finally { uploadProgress.stopDisplayTimer(); @@ -73839,7 +73839,7 @@ var require_requestUtils = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.retryHttpClientResponse = exports2.retryTypedResponse = exports2.retry = exports2.isRetryableStatusCode = exports2.isServerErrorStatusCode = exports2.isSuccessStatusCode = void 0; - var core17 = __importStar4(require_core()); + var core15 = __importStar4(require_core()); var http_client_1 = require_lib(); var constants_1 = require_constants10(); function isSuccessStatusCode(statusCode) { @@ -73900,9 +73900,9 @@ var require_requestUtils = __commonJS({ isRetryable = isRetryableStatusCode(statusCode); errorMessage = `Cache service responded with ${statusCode}`; } - core17.debug(`${name} - Attempt ${attempt} of ${maxAttempts} failed with error: ${errorMessage}`); + core15.debug(`${name} - Attempt ${attempt} of ${maxAttempts} failed with error: ${errorMessage}`); if (!isRetryable) { - core17.debug(`${name} - Error is not retryable`); + core15.debug(`${name} - Error is not retryable`); break; } yield sleep(delay2); @@ -74007,7 +74007,7 @@ var require_downloadUtils = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.downloadCacheStorageSDK = exports2.downloadCacheHttpClientConcurrent = exports2.downloadCacheHttpClient = exports2.DownloadProgress = void 0; - var core17 = __importStar4(require_core()); + var core15 = __importStar4(require_core()); var http_client_1 = require_lib(); var storage_blob_1 = require_dist7(); var buffer = __importStar4(require("buffer")); @@ -74045,7 +74045,7 @@ var require_downloadUtils = __commonJS({ this.segmentIndex = this.segmentIndex + 1; this.segmentSize = segmentSize; this.receivedBytes = 0; - core17.debug(`Downloading segment at offset ${this.segmentOffset} with length ${this.segmentSize}...`); + core15.debug(`Downloading segment at offset ${this.segmentOffset} with length ${this.segmentSize}...`); } /** * Sets the number of bytes received for the current segment. @@ -74079,7 +74079,7 @@ var require_downloadUtils = __commonJS({ const percentage = (100 * (transferredBytes / this.contentLength)).toFixed(1); const elapsedTime = Date.now() - this.startTime; const downloadSpeed = (transferredBytes / (1024 * 1024) / (elapsedTime / 1e3)).toFixed(1); - core17.info(`Received ${transferredBytes} of ${this.contentLength} (${percentage}%), ${downloadSpeed} MBs/sec`); + core15.info(`Received ${transferredBytes} of ${this.contentLength} (${percentage}%), ${downloadSpeed} MBs/sec`); if (this.isDone()) { this.displayedComplete = true; } @@ -74129,7 +74129,7 @@ var require_downloadUtils = __commonJS({ })); downloadResponse.message.socket.setTimeout(constants_1.SocketTimeout, () => { downloadResponse.message.destroy(); - core17.debug(`Aborting download, socket timed out after ${constants_1.SocketTimeout} ms`); + core15.debug(`Aborting download, socket timed out after ${constants_1.SocketTimeout} ms`); }); yield pipeResponseToStream(downloadResponse, writeStream); const contentLengthHeader = downloadResponse.message.headers["content-length"]; @@ -74140,7 +74140,7 @@ var require_downloadUtils = __commonJS({ throw new Error(`Incomplete download. Expected file size: ${expectedLength}, actual file size: ${actualLength}`); } } else { - core17.debug("Unable to validate download, no Content-Length header"); + core15.debug("Unable to validate download, no Content-Length header"); } }); } @@ -74260,7 +74260,7 @@ var require_downloadUtils = __commonJS({ const properties = yield client.getProperties(); const contentLength = (_a = properties.contentLength) !== null && _a !== void 0 ? _a : -1; if (contentLength < 0) { - core17.debug("Unable to determine content length, downloading file with http-client..."); + core15.debug("Unable to determine content length, downloading file with http-client..."); yield downloadCacheHttpClient(archiveLocation, archivePath); } else { const maxSegmentSize = Math.min(134217728, buffer.constants.MAX_LENGTH); @@ -74340,7 +74340,7 @@ var require_options = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.getDownloadOptions = exports2.getUploadOptions = void 0; - var core17 = __importStar4(require_core()); + var core15 = __importStar4(require_core()); function getUploadOptions(copy) { const result = { useAzureSdk: false, @@ -74360,9 +74360,9 @@ var require_options = __commonJS({ } result.uploadConcurrency = !isNaN(Number(process.env["CACHE_UPLOAD_CONCURRENCY"])) ? Math.min(32, Number(process.env["CACHE_UPLOAD_CONCURRENCY"])) : result.uploadConcurrency; result.uploadChunkSize = !isNaN(Number(process.env["CACHE_UPLOAD_CHUNK_SIZE"])) ? Math.min(128 * 1024 * 1024, Number(process.env["CACHE_UPLOAD_CHUNK_SIZE"]) * 1024 * 1024) : result.uploadChunkSize; - core17.debug(`Use Azure SDK: ${result.useAzureSdk}`); - core17.debug(`Upload concurrency: ${result.uploadConcurrency}`); - core17.debug(`Upload chunk size: ${result.uploadChunkSize}`); + core15.debug(`Use Azure SDK: ${result.useAzureSdk}`); + core15.debug(`Upload concurrency: ${result.uploadConcurrency}`); + core15.debug(`Upload chunk size: ${result.uploadChunkSize}`); return result; } exports2.getUploadOptions = getUploadOptions; @@ -74399,12 +74399,12 @@ var require_options = __commonJS({ if (segmentDownloadTimeoutMins && !isNaN(Number(segmentDownloadTimeoutMins)) && isFinite(Number(segmentDownloadTimeoutMins))) { result.segmentTimeoutInMs = Number(segmentDownloadTimeoutMins) * 60 * 1e3; } - core17.debug(`Use Azure SDK: ${result.useAzureSdk}`); - core17.debug(`Download concurrency: ${result.downloadConcurrency}`); - core17.debug(`Request timeout (ms): ${result.timeoutInMs}`); - core17.debug(`Cache segment download timeout mins env var: ${process.env["SEGMENT_DOWNLOAD_TIMEOUT_MINS"]}`); - core17.debug(`Segment download timeout (ms): ${result.segmentTimeoutInMs}`); - core17.debug(`Lookup only: ${result.lookupOnly}`); + core15.debug(`Use Azure SDK: ${result.useAzureSdk}`); + core15.debug(`Download concurrency: ${result.downloadConcurrency}`); + core15.debug(`Request timeout (ms): ${result.timeoutInMs}`); + core15.debug(`Cache segment download timeout mins env var: ${process.env["SEGMENT_DOWNLOAD_TIMEOUT_MINS"]}`); + core15.debug(`Segment download timeout (ms): ${result.segmentTimeoutInMs}`); + core15.debug(`Lookup only: ${result.lookupOnly}`); return result; } exports2.getDownloadOptions = getDownloadOptions; @@ -74584,7 +74584,7 @@ var require_cacheHttpClient = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.saveCache = exports2.reserveCache = exports2.downloadCache = exports2.getCacheEntry = void 0; - var core17 = __importStar4(require_core()); + var core15 = __importStar4(require_core()); var http_client_1 = require_lib(); var auth_1 = require_auth(); var fs20 = __importStar4(require("fs")); @@ -74602,7 +74602,7 @@ var require_cacheHttpClient = __commonJS({ throw new Error("Cache Service Url not found, unable to restore cache."); } const url2 = `${baseUrl}_apis/artifactcache/${resource}`; - core17.debug(`Resource Url: ${url2}`); + core15.debug(`Resource Url: ${url2}`); return url2; } function createAcceptHeader(type2, apiVersion) { @@ -74630,7 +74630,7 @@ var require_cacheHttpClient = __commonJS({ return httpClient.getJson(getCacheApiUrl(resource)); })); if (response.statusCode === 204) { - if (core17.isDebug()) { + if (core15.isDebug()) { yield printCachesListForDiagnostics(keys[0], httpClient, version); } return null; @@ -74643,9 +74643,9 @@ var require_cacheHttpClient = __commonJS({ if (!cacheDownloadUrl) { throw new Error("Cache not found."); } - core17.setSecret(cacheDownloadUrl); - core17.debug(`Cache Result:`); - core17.debug(JSON.stringify(cacheResult)); + core15.setSecret(cacheDownloadUrl); + core15.debug(`Cache Result:`); + core15.debug(JSON.stringify(cacheResult)); return cacheResult; }); } @@ -74660,10 +74660,10 @@ var require_cacheHttpClient = __commonJS({ const cacheListResult = response.result; const totalCount = cacheListResult === null || cacheListResult === void 0 ? void 0 : cacheListResult.totalCount; if (totalCount && totalCount > 0) { - core17.debug(`No matching cache found for cache key '${key}', version '${version} and scope ${process.env["GITHUB_REF"]}. There exist one or more cache(s) with similar key but they have different version or scope. See more info on cache matching here: https://docs.github.com/en/actions/using-workflows/caching-dependencies-to-speed-up-workflows#matching-a-cache-key + core15.debug(`No matching cache found for cache key '${key}', version '${version} and scope ${process.env["GITHUB_REF"]}. There exist one or more cache(s) with similar key but they have different version or scope. See more info on cache matching here: https://docs.github.com/en/actions/using-workflows/caching-dependencies-to-speed-up-workflows#matching-a-cache-key Other caches with similar key:`); for (const cacheEntry of (cacheListResult === null || cacheListResult === void 0 ? void 0 : cacheListResult.artifactCaches) || []) { - core17.debug(`Cache Key: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.cacheKey}, Cache Version: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.cacheVersion}, Cache Scope: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.scope}, Cache Created: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.creationTime}`); + core15.debug(`Cache Key: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.cacheKey}, Cache Version: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.cacheVersion}, Cache Scope: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.scope}, Cache Created: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.creationTime}`); } } } @@ -74708,7 +74708,7 @@ Other caches with similar key:`); } function uploadChunk(httpClient, resourceUrl, openStream, start, end) { return __awaiter4(this, void 0, void 0, function* () { - core17.debug(`Uploading chunk of size ${end - start + 1} bytes at offset ${start} with content range: ${getContentRange(start, end)}`); + core15.debug(`Uploading chunk of size ${end - start + 1} bytes at offset ${start} with content range: ${getContentRange(start, end)}`); const additionalHeaders = { "Content-Type": "application/octet-stream", "Content-Range": getContentRange(start, end) @@ -74730,7 +74730,7 @@ Other caches with similar key:`); const concurrency = utils.assertDefined("uploadConcurrency", uploadOptions.uploadConcurrency); const maxChunkSize = utils.assertDefined("uploadChunkSize", uploadOptions.uploadChunkSize); const parallelUploads = [...new Array(concurrency).keys()]; - core17.debug("Awaiting all uploads"); + core15.debug("Awaiting all uploads"); let offset = 0; try { yield Promise.all(parallelUploads.map(() => __awaiter4(this, void 0, void 0, function* () { @@ -74773,16 +74773,16 @@ Other caches with similar key:`); yield (0, uploadUtils_1.uploadCacheArchiveSDK)(signedUploadURL, archivePath, options); } else { const httpClient = createHttpClient(); - core17.debug("Upload cache"); + core15.debug("Upload cache"); yield uploadFile(httpClient, cacheId, archivePath, options); - core17.debug("Commiting cache"); + core15.debug("Commiting cache"); const cacheSize = utils.getArchiveFileSizeInBytes(archivePath); - core17.info(`Cache Size: ~${Math.round(cacheSize / (1024 * 1024))} MB (${cacheSize} B)`); + core15.info(`Cache Size: ~${Math.round(cacheSize / (1024 * 1024))} MB (${cacheSize} B)`); const commitCacheResponse = yield commitCache(httpClient, cacheId, cacheSize); if (!(0, requestUtils_1.isSuccessStatusCode)(commitCacheResponse.statusCode)) { throw new Error(`Cache service responded with ${commitCacheResponse.statusCode} during commit cache.`); } - core17.info("Cache saved successfully"); + core15.info("Cache saved successfully"); } }); } @@ -80234,7 +80234,7 @@ var require_cache3 = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.saveCache = exports2.restoreCache = exports2.isFeatureAvailable = exports2.FinalizeCacheError = exports2.ReserveCacheError = exports2.ValidationError = void 0; - var core17 = __importStar4(require_core()); + var core15 = __importStar4(require_core()); var path20 = __importStar4(require("path")); var utils = __importStar4(require_cacheUtils()); var cacheHttpClient = __importStar4(require_cacheHttpClient()); @@ -80294,7 +80294,7 @@ var require_cache3 = __commonJS({ function restoreCache4(paths, primaryKey, restoreKeys, options, enableCrossOsArchive = false) { return __awaiter4(this, void 0, void 0, function* () { const cacheServiceVersion = (0, config_1.getCacheServiceVersion)(); - core17.debug(`Cache service version: ${cacheServiceVersion}`); + core15.debug(`Cache service version: ${cacheServiceVersion}`); checkPaths(paths); switch (cacheServiceVersion) { case "v2": @@ -80310,8 +80310,8 @@ var require_cache3 = __commonJS({ return __awaiter4(this, void 0, void 0, function* () { restoreKeys = restoreKeys || []; const keys = [primaryKey, ...restoreKeys]; - core17.debug("Resolved Keys:"); - core17.debug(JSON.stringify(keys)); + core15.debug("Resolved Keys:"); + core15.debug(JSON.stringify(keys)); if (keys.length > 10) { throw new ValidationError(`Key Validation Error: Keys are limited to a maximum of 10.`); } @@ -80329,19 +80329,19 @@ var require_cache3 = __commonJS({ return void 0; } if (options === null || options === void 0 ? void 0 : options.lookupOnly) { - core17.info("Lookup only - skipping download"); + core15.info("Lookup only - skipping download"); return cacheEntry.cacheKey; } archivePath = path20.join(yield utils.createTempDirectory(), utils.getCacheFileName(compressionMethod)); - core17.debug(`Archive Path: ${archivePath}`); + core15.debug(`Archive Path: ${archivePath}`); yield cacheHttpClient.downloadCache(cacheEntry.archiveLocation, archivePath, options); - if (core17.isDebug()) { + if (core15.isDebug()) { yield (0, tar_1.listTar)(archivePath, compressionMethod); } const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath); - core17.info(`Cache Size: ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B)`); + core15.info(`Cache Size: ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B)`); yield (0, tar_1.extractTar)(archivePath, compressionMethod); - core17.info("Cache restored successfully"); + core15.info("Cache restored successfully"); return cacheEntry.cacheKey; } catch (error2) { const typedError = error2; @@ -80349,16 +80349,16 @@ var require_cache3 = __commonJS({ throw error2; } else { if (typedError instanceof http_client_1.HttpClientError && typeof typedError.statusCode === "number" && typedError.statusCode >= 500) { - core17.error(`Failed to restore: ${error2.message}`); + core15.error(`Failed to restore: ${error2.message}`); } else { - core17.warning(`Failed to restore: ${error2.message}`); + core15.warning(`Failed to restore: ${error2.message}`); } } } finally { try { yield utils.unlinkFile(archivePath); } catch (error2) { - core17.debug(`Failed to delete archive: ${error2}`); + core15.debug(`Failed to delete archive: ${error2}`); } } return void 0; @@ -80369,8 +80369,8 @@ var require_cache3 = __commonJS({ options = Object.assign(Object.assign({}, options), { useAzureSdk: true }); restoreKeys = restoreKeys || []; const keys = [primaryKey, ...restoreKeys]; - core17.debug("Resolved Keys:"); - core17.debug(JSON.stringify(keys)); + core15.debug("Resolved Keys:"); + core15.debug(JSON.stringify(keys)); if (keys.length > 10) { throw new ValidationError(`Key Validation Error: Keys are limited to a maximum of 10.`); } @@ -80388,30 +80388,30 @@ var require_cache3 = __commonJS({ }; const response = yield twirpClient.GetCacheEntryDownloadURL(request); if (!response.ok) { - core17.debug(`Cache not found for version ${request.version} of keys: ${keys.join(", ")}`); + core15.debug(`Cache not found for version ${request.version} of keys: ${keys.join(", ")}`); return void 0; } const isRestoreKeyMatch = request.key !== response.matchedKey; if (isRestoreKeyMatch) { - core17.info(`Cache hit for restore-key: ${response.matchedKey}`); + core15.info(`Cache hit for restore-key: ${response.matchedKey}`); } else { - core17.info(`Cache hit for: ${response.matchedKey}`); + core15.info(`Cache hit for: ${response.matchedKey}`); } if (options === null || options === void 0 ? void 0 : options.lookupOnly) { - core17.info("Lookup only - skipping download"); + core15.info("Lookup only - skipping download"); return response.matchedKey; } archivePath = path20.join(yield utils.createTempDirectory(), utils.getCacheFileName(compressionMethod)); - core17.debug(`Archive path: ${archivePath}`); - core17.debug(`Starting download of archive to: ${archivePath}`); + core15.debug(`Archive path: ${archivePath}`); + core15.debug(`Starting download of archive to: ${archivePath}`); yield cacheHttpClient.downloadCache(response.signedDownloadUrl, archivePath, options); const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath); - core17.info(`Cache Size: ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B)`); - if (core17.isDebug()) { + core15.info(`Cache Size: ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B)`); + if (core15.isDebug()) { yield (0, tar_1.listTar)(archivePath, compressionMethod); } yield (0, tar_1.extractTar)(archivePath, compressionMethod); - core17.info("Cache restored successfully"); + core15.info("Cache restored successfully"); return response.matchedKey; } catch (error2) { const typedError = error2; @@ -80419,9 +80419,9 @@ var require_cache3 = __commonJS({ throw error2; } else { if (typedError instanceof http_client_1.HttpClientError && typeof typedError.statusCode === "number" && typedError.statusCode >= 500) { - core17.error(`Failed to restore: ${error2.message}`); + core15.error(`Failed to restore: ${error2.message}`); } else { - core17.warning(`Failed to restore: ${error2.message}`); + core15.warning(`Failed to restore: ${error2.message}`); } } } finally { @@ -80430,7 +80430,7 @@ var require_cache3 = __commonJS({ yield utils.unlinkFile(archivePath); } } catch (error2) { - core17.debug(`Failed to delete archive: ${error2}`); + core15.debug(`Failed to delete archive: ${error2}`); } } return void 0; @@ -80439,7 +80439,7 @@ var require_cache3 = __commonJS({ function saveCache4(paths, key, options, enableCrossOsArchive = false) { return __awaiter4(this, void 0, void 0, function* () { const cacheServiceVersion = (0, config_1.getCacheServiceVersion)(); - core17.debug(`Cache service version: ${cacheServiceVersion}`); + core15.debug(`Cache service version: ${cacheServiceVersion}`); checkPaths(paths); checkKey(key); switch (cacheServiceVersion) { @@ -80458,26 +80458,26 @@ var require_cache3 = __commonJS({ const compressionMethod = yield utils.getCompressionMethod(); let cacheId = -1; const cachePaths = yield utils.resolvePaths(paths); - core17.debug("Cache Paths:"); - core17.debug(`${JSON.stringify(cachePaths)}`); + core15.debug("Cache Paths:"); + core15.debug(`${JSON.stringify(cachePaths)}`); if (cachePaths.length === 0) { throw new Error(`Path Validation Error: Path(s) specified in the action for caching do(es) not exist, hence no cache is being saved.`); } const archiveFolder = yield utils.createTempDirectory(); const archivePath = path20.join(archiveFolder, utils.getCacheFileName(compressionMethod)); - core17.debug(`Archive Path: ${archivePath}`); + core15.debug(`Archive Path: ${archivePath}`); try { yield (0, tar_1.createTar)(archiveFolder, cachePaths, compressionMethod); - if (core17.isDebug()) { + if (core15.isDebug()) { yield (0, tar_1.listTar)(archivePath, compressionMethod); } const fileSizeLimit = 10 * 1024 * 1024 * 1024; const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath); - core17.debug(`File Size: ${archiveFileSize}`); + core15.debug(`File Size: ${archiveFileSize}`); if (archiveFileSize > fileSizeLimit && !(0, config_1.isGhes)()) { throw new Error(`Cache size of ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B) is over the 10GB limit, not saving cache.`); } - core17.debug("Reserving Cache"); + core15.debug("Reserving Cache"); const reserveCacheResponse = yield cacheHttpClient.reserveCache(key, paths, { compressionMethod, enableCrossOsArchive, @@ -80490,26 +80490,26 @@ var require_cache3 = __commonJS({ } else { throw new ReserveCacheError2(`Unable to reserve cache with key ${key}, another job may be creating this cache. More details: ${(_e = reserveCacheResponse === null || reserveCacheResponse === void 0 ? void 0 : reserveCacheResponse.error) === null || _e === void 0 ? void 0 : _e.message}`); } - core17.debug(`Saving Cache (ID: ${cacheId})`); + core15.debug(`Saving Cache (ID: ${cacheId})`); yield cacheHttpClient.saveCache(cacheId, archivePath, "", options); } catch (error2) { const typedError = error2; if (typedError.name === ValidationError.name) { throw error2; } else if (typedError.name === ReserveCacheError2.name) { - core17.info(`Failed to save: ${typedError.message}`); + core15.info(`Failed to save: ${typedError.message}`); } else { if (typedError instanceof http_client_1.HttpClientError && typeof typedError.statusCode === "number" && typedError.statusCode >= 500) { - core17.error(`Failed to save: ${typedError.message}`); + core15.error(`Failed to save: ${typedError.message}`); } else { - core17.warning(`Failed to save: ${typedError.message}`); + core15.warning(`Failed to save: ${typedError.message}`); } } } finally { try { yield utils.unlinkFile(archivePath); } catch (error2) { - core17.debug(`Failed to delete archive: ${error2}`); + core15.debug(`Failed to delete archive: ${error2}`); } } return cacheId; @@ -80522,23 +80522,23 @@ var require_cache3 = __commonJS({ const twirpClient = cacheTwirpClient.internalCacheTwirpClient(); let cacheId = -1; const cachePaths = yield utils.resolvePaths(paths); - core17.debug("Cache Paths:"); - core17.debug(`${JSON.stringify(cachePaths)}`); + core15.debug("Cache Paths:"); + core15.debug(`${JSON.stringify(cachePaths)}`); if (cachePaths.length === 0) { throw new Error(`Path Validation Error: Path(s) specified in the action for caching do(es) not exist, hence no cache is being saved.`); } const archiveFolder = yield utils.createTempDirectory(); const archivePath = path20.join(archiveFolder, utils.getCacheFileName(compressionMethod)); - core17.debug(`Archive Path: ${archivePath}`); + core15.debug(`Archive Path: ${archivePath}`); try { yield (0, tar_1.createTar)(archiveFolder, cachePaths, compressionMethod); - if (core17.isDebug()) { + if (core15.isDebug()) { yield (0, tar_1.listTar)(archivePath, compressionMethod); } const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath); - core17.debug(`File Size: ${archiveFileSize}`); + core15.debug(`File Size: ${archiveFileSize}`); options.archiveSizeBytes = archiveFileSize; - core17.debug("Reserving Cache"); + core15.debug("Reserving Cache"); const version = utils.getCacheVersion(paths, compressionMethod, enableCrossOsArchive); const request = { key, @@ -80549,16 +80549,16 @@ var require_cache3 = __commonJS({ const response = yield twirpClient.CreateCacheEntry(request); if (!response.ok) { if (response.message) { - core17.warning(`Cache reservation failed: ${response.message}`); + core15.warning(`Cache reservation failed: ${response.message}`); } throw new Error(response.message || "Response was not ok"); } signedUploadUrl = response.signedUploadUrl; } catch (error2) { - core17.debug(`Failed to reserve cache: ${error2}`); + core15.debug(`Failed to reserve cache: ${error2}`); throw new ReserveCacheError2(`Unable to reserve cache with key ${key}, another job may be creating this cache.`); } - core17.debug(`Attempting to upload cache located at: ${archivePath}`); + core15.debug(`Attempting to upload cache located at: ${archivePath}`); yield cacheHttpClient.saveCache(cacheId, archivePath, signedUploadUrl, options); const finalizeRequest = { key, @@ -80566,7 +80566,7 @@ var require_cache3 = __commonJS({ sizeBytes: `${archiveFileSize}` }; const finalizeResponse = yield twirpClient.FinalizeCacheEntryUpload(finalizeRequest); - core17.debug(`FinalizeCacheEntryUploadResponse: ${finalizeResponse.ok}`); + core15.debug(`FinalizeCacheEntryUploadResponse: ${finalizeResponse.ok}`); if (!finalizeResponse.ok) { if (finalizeResponse.message) { throw new FinalizeCacheError(finalizeResponse.message); @@ -80579,21 +80579,21 @@ var require_cache3 = __commonJS({ if (typedError.name === ValidationError.name) { throw error2; } else if (typedError.name === ReserveCacheError2.name) { - core17.info(`Failed to save: ${typedError.message}`); + core15.info(`Failed to save: ${typedError.message}`); } else if (typedError.name === FinalizeCacheError.name) { - core17.warning(typedError.message); + core15.warning(typedError.message); } else { if (typedError instanceof http_client_1.HttpClientError && typeof typedError.statusCode === "number" && typedError.statusCode >= 500) { - core17.error(`Failed to save: ${typedError.message}`); + core15.error(`Failed to save: ${typedError.message}`); } else { - core17.warning(`Failed to save: ${typedError.message}`); + core15.warning(`Failed to save: ${typedError.message}`); } } } finally { try { yield utils.unlinkFile(archivePath); } catch (error2) { - core17.debug(`Failed to delete archive: ${error2}`); + core15.debug(`Failed to delete archive: ${error2}`); } } return cacheId; @@ -80801,7 +80801,7 @@ var require_retry_helper = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.RetryHelper = void 0; - var core17 = __importStar4(require_core()); + var core15 = __importStar4(require_core()); var RetryHelper = class { constructor(maxAttempts, minSeconds, maxSeconds) { if (maxAttempts < 1) { @@ -80824,10 +80824,10 @@ var require_retry_helper = __commonJS({ if (isRetryable && !isRetryable(err)) { throw err; } - core17.info(err.message); + core15.info(err.message); } const seconds = this.getSleepAmount(); - core17.info(`Waiting ${seconds} seconds before trying again`); + core15.info(`Waiting ${seconds} seconds before trying again`); yield this.sleep(seconds); attempt++; } @@ -80907,7 +80907,7 @@ var require_tool_cache = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.evaluateVersions = exports2.isExplicitVersion = exports2.findFromManifest = exports2.getManifestFromRepo = exports2.findAllVersions = exports2.find = exports2.cacheFile = exports2.cacheDir = exports2.extractZip = exports2.extractXar = exports2.extractTar = exports2.extract7z = exports2.downloadTool = exports2.HTTPError = void 0; - var core17 = __importStar4(require_core()); + var core15 = __importStar4(require_core()); var io7 = __importStar4(require_io()); var crypto2 = __importStar4(require("crypto")); var fs20 = __importStar4(require("fs")); @@ -80936,8 +80936,8 @@ var require_tool_cache = __commonJS({ return __awaiter4(this, void 0, void 0, function* () { dest = dest || path20.join(_getTempDirectory(), crypto2.randomUUID()); yield io7.mkdirP(path20.dirname(dest)); - core17.debug(`Downloading ${url2}`); - core17.debug(`Destination ${dest}`); + core15.debug(`Downloading ${url2}`); + core15.debug(`Destination ${dest}`); const maxAttempts = 3; const minSeconds = _getGlobal("TEST_DOWNLOAD_TOOL_RETRY_MIN_SECONDS", 10); const maxSeconds = _getGlobal("TEST_DOWNLOAD_TOOL_RETRY_MAX_SECONDS", 20); @@ -80964,7 +80964,7 @@ var require_tool_cache = __commonJS({ allowRetries: false }); if (auth) { - core17.debug("set auth"); + core15.debug("set auth"); if (headers === void 0) { headers = {}; } @@ -80973,7 +80973,7 @@ var require_tool_cache = __commonJS({ const response = yield http.get(url2, headers); if (response.message.statusCode !== 200) { const err = new HTTPError2(response.message.statusCode); - core17.debug(`Failed to download from "${url2}". Code(${response.message.statusCode}) Message(${response.message.statusMessage})`); + core15.debug(`Failed to download from "${url2}". Code(${response.message.statusCode}) Message(${response.message.statusMessage})`); throw err; } const pipeline = util.promisify(stream2.pipeline); @@ -80982,16 +80982,16 @@ var require_tool_cache = __commonJS({ let succeeded = false; try { yield pipeline(readStream, fs20.createWriteStream(dest)); - core17.debug("download complete"); + core15.debug("download complete"); succeeded = true; return dest; } finally { if (!succeeded) { - core17.debug("download failed"); + core15.debug("download failed"); try { yield io7.rmRF(dest); } catch (err) { - core17.debug(`Failed to delete '${dest}'. ${err.message}`); + core15.debug(`Failed to delete '${dest}'. ${err.message}`); } } } @@ -81006,7 +81006,7 @@ var require_tool_cache = __commonJS({ process.chdir(dest); if (_7zPath) { try { - const logLevel = core17.isDebug() ? "-bb1" : "-bb0"; + const logLevel = core15.isDebug() ? "-bb1" : "-bb0"; const args = [ "x", logLevel, @@ -81056,7 +81056,7 @@ var require_tool_cache = __commonJS({ throw new Error("parameter 'file' is required"); } dest = yield _createExtractFolder(dest); - core17.debug("Checking tar --version"); + core15.debug("Checking tar --version"); let versionOutput = ""; yield (0, exec_1.exec)("tar --version", [], { ignoreReturnCode: true, @@ -81066,7 +81066,7 @@ var require_tool_cache = __commonJS({ stderr: (data) => versionOutput += data.toString() } }); - core17.debug(versionOutput.trim()); + core15.debug(versionOutput.trim()); const isGnuTar = versionOutput.toUpperCase().includes("GNU TAR"); let args; if (flags instanceof Array) { @@ -81074,7 +81074,7 @@ var require_tool_cache = __commonJS({ } else { args = [flags]; } - if (core17.isDebug() && !flags.includes("v")) { + if (core15.isDebug() && !flags.includes("v")) { args.push("-v"); } let destArg = dest; @@ -81106,7 +81106,7 @@ var require_tool_cache = __commonJS({ args = [flags]; } args.push("-x", "-C", dest, "-f", file); - if (core17.isDebug()) { + if (core15.isDebug()) { args.push("-v"); } const xarPath = yield io7.which("xar", true); @@ -81151,7 +81151,7 @@ var require_tool_cache = __commonJS({ "-Command", pwshCommand ]; - core17.debug(`Using pwsh at path: ${pwshPath}`); + core15.debug(`Using pwsh at path: ${pwshPath}`); yield (0, exec_1.exec)(`"${pwshPath}"`, args); } else { const powershellCommand = [ @@ -81171,7 +81171,7 @@ var require_tool_cache = __commonJS({ powershellCommand ]; const powershellPath = yield io7.which("powershell", true); - core17.debug(`Using powershell at path: ${powershellPath}`); + core15.debug(`Using powershell at path: ${powershellPath}`); yield (0, exec_1.exec)(`"${powershellPath}"`, args); } }); @@ -81180,7 +81180,7 @@ var require_tool_cache = __commonJS({ return __awaiter4(this, void 0, void 0, function* () { const unzipPath = yield io7.which("unzip", true); const args = [file]; - if (!core17.isDebug()) { + if (!core15.isDebug()) { args.unshift("-q"); } args.unshift("-o"); @@ -81191,8 +81191,8 @@ var require_tool_cache = __commonJS({ return __awaiter4(this, void 0, void 0, function* () { version = semver8.clean(version) || version; arch2 = arch2 || os5.arch(); - core17.debug(`Caching tool ${tool} ${version} ${arch2}`); - core17.debug(`source dir: ${sourceDir}`); + core15.debug(`Caching tool ${tool} ${version} ${arch2}`); + core15.debug(`source dir: ${sourceDir}`); if (!fs20.statSync(sourceDir).isDirectory()) { throw new Error("sourceDir is not a directory"); } @@ -81210,14 +81210,14 @@ var require_tool_cache = __commonJS({ return __awaiter4(this, void 0, void 0, function* () { version = semver8.clean(version) || version; arch2 = arch2 || os5.arch(); - core17.debug(`Caching tool ${tool} ${version} ${arch2}`); - core17.debug(`source file: ${sourceFile}`); + core15.debug(`Caching tool ${tool} ${version} ${arch2}`); + core15.debug(`source file: ${sourceFile}`); if (!fs20.statSync(sourceFile).isFile()) { throw new Error("sourceFile is not a file"); } const destFolder = yield _createToolPath(tool, version, arch2); const destPath = path20.join(destFolder, targetFile); - core17.debug(`destination file ${destPath}`); + core15.debug(`destination file ${destPath}`); yield io7.cp(sourceFile, destPath); _completeToolPath(tool, version, arch2); return destFolder; @@ -81241,12 +81241,12 @@ var require_tool_cache = __commonJS({ if (versionSpec) { versionSpec = semver8.clean(versionSpec) || ""; const cachePath = path20.join(_getCacheDirectory(), toolName, versionSpec, arch2); - core17.debug(`checking cache: ${cachePath}`); + core15.debug(`checking cache: ${cachePath}`); if (fs20.existsSync(cachePath) && fs20.existsSync(`${cachePath}.complete`)) { - core17.debug(`Found tool in cache ${toolName} ${versionSpec} ${arch2}`); + core15.debug(`Found tool in cache ${toolName} ${versionSpec} ${arch2}`); toolPath = cachePath; } else { - core17.debug("not found"); + core15.debug("not found"); } } return toolPath; @@ -81277,7 +81277,7 @@ var require_tool_cache = __commonJS({ const http = new httpm.HttpClient("tool-cache"); const headers = {}; if (auth) { - core17.debug("set auth"); + core15.debug("set auth"); headers.authorization = auth; } const response = yield http.getJson(treeUrl, headers); @@ -81298,7 +81298,7 @@ var require_tool_cache = __commonJS({ try { releases = JSON.parse(versionsRaw); } catch (_a) { - core17.debug("Invalid json"); + core15.debug("Invalid json"); } } return releases; @@ -81324,7 +81324,7 @@ var require_tool_cache = __commonJS({ function _createToolPath(tool, version, arch2) { return __awaiter4(this, void 0, void 0, function* () { const folderPath = path20.join(_getCacheDirectory(), tool, semver8.clean(version) || version, arch2 || ""); - core17.debug(`destination ${folderPath}`); + core15.debug(`destination ${folderPath}`); const markerPath = `${folderPath}.complete`; yield io7.rmRF(folderPath); yield io7.rmRF(markerPath); @@ -81336,19 +81336,19 @@ var require_tool_cache = __commonJS({ const folderPath = path20.join(_getCacheDirectory(), tool, semver8.clean(version) || version, arch2 || ""); const markerPath = `${folderPath}.complete`; fs20.writeFileSync(markerPath, ""); - core17.debug("finished caching tool"); + core15.debug("finished caching tool"); } function isExplicitVersion(versionSpec) { const c = semver8.clean(versionSpec) || ""; - core17.debug(`isExplicit: ${c}`); + core15.debug(`isExplicit: ${c}`); const valid3 = semver8.valid(c) != null; - core17.debug(`explicit? ${valid3}`); + core15.debug(`explicit? ${valid3}`); return valid3; } exports2.isExplicitVersion = isExplicitVersion; function evaluateVersions(versions, versionSpec) { let version = ""; - core17.debug(`evaluating ${versions.length} versions`); + core15.debug(`evaluating ${versions.length} versions`); versions = versions.sort((a, b) => { if (semver8.gt(a, b)) { return 1; @@ -81364,9 +81364,9 @@ var require_tool_cache = __commonJS({ } } if (version) { - core17.debug(`matched: ${version}`); + core15.debug(`matched: ${version}`); } else { - core17.debug("match not found"); + core15.debug("match not found"); } return version; } @@ -81975,7 +81975,7 @@ var require_internal_glob_options_helper2 = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.getOptions = void 0; - var core17 = __importStar4(require_core()); + var core15 = __importStar4(require_core()); function getOptions(copy) { const result = { followSymbolicLinks: true, @@ -81987,23 +81987,23 @@ var require_internal_glob_options_helper2 = __commonJS({ if (copy) { if (typeof copy.followSymbolicLinks === "boolean") { result.followSymbolicLinks = copy.followSymbolicLinks; - core17.debug(`followSymbolicLinks '${result.followSymbolicLinks}'`); + core15.debug(`followSymbolicLinks '${result.followSymbolicLinks}'`); } if (typeof copy.implicitDescendants === "boolean") { result.implicitDescendants = copy.implicitDescendants; - core17.debug(`implicitDescendants '${result.implicitDescendants}'`); + core15.debug(`implicitDescendants '${result.implicitDescendants}'`); } if (typeof copy.matchDirectories === "boolean") { result.matchDirectories = copy.matchDirectories; - core17.debug(`matchDirectories '${result.matchDirectories}'`); + core15.debug(`matchDirectories '${result.matchDirectories}'`); } if (typeof copy.omitBrokenSymbolicLinks === "boolean") { result.omitBrokenSymbolicLinks = copy.omitBrokenSymbolicLinks; - core17.debug(`omitBrokenSymbolicLinks '${result.omitBrokenSymbolicLinks}'`); + core15.debug(`omitBrokenSymbolicLinks '${result.omitBrokenSymbolicLinks}'`); } if (typeof copy.excludeHiddenFiles === "boolean") { result.excludeHiddenFiles = copy.excludeHiddenFiles; - core17.debug(`excludeHiddenFiles '${result.excludeHiddenFiles}'`); + core15.debug(`excludeHiddenFiles '${result.excludeHiddenFiles}'`); } } return result; @@ -82687,7 +82687,7 @@ var require_internal_globber2 = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.DefaultGlobber = void 0; - var core17 = __importStar4(require_core()); + var core15 = __importStar4(require_core()); var fs20 = __importStar4(require("fs")); var globOptionsHelper = __importStar4(require_internal_glob_options_helper2()); var path20 = __importStar4(require("path")); @@ -82740,7 +82740,7 @@ var require_internal_globber2 = __commonJS({ } const stack = []; for (const searchPath of patternHelper.getSearchPaths(patterns)) { - core17.debug(`Search path '${searchPath}'`); + core15.debug(`Search path '${searchPath}'`); try { yield __await4(fs20.promises.lstat(searchPath)); } catch (err) { @@ -82815,7 +82815,7 @@ var require_internal_globber2 = __commonJS({ } catch (err) { if (err.code === "ENOENT") { if (options.omitBrokenSymbolicLinks) { - core17.debug(`Broken symlink '${item.path}'`); + core15.debug(`Broken symlink '${item.path}'`); return void 0; } throw new Error(`No information found for the path '${item.path}'. This may indicate a broken symbolic link.`); @@ -82831,7 +82831,7 @@ var require_internal_globber2 = __commonJS({ traversalChain.pop(); } if (traversalChain.some((x) => x === realPath)) { - core17.debug(`Symlink cycle detected for path '${item.path}' and realpath '${realPath}'`); + core15.debug(`Symlink cycle detected for path '${item.path}' and realpath '${realPath}'`); return void 0; } traversalChain.push(realPath); @@ -82924,7 +82924,7 @@ var require_internal_hash_files = __commonJS({ Object.defineProperty(exports2, "__esModule", { value: true }); exports2.hashFiles = void 0; var crypto2 = __importStar4(require("crypto")); - var core17 = __importStar4(require_core()); + var core15 = __importStar4(require_core()); var fs20 = __importStar4(require("fs")); var stream2 = __importStar4(require("stream")); var util = __importStar4(require("util")); @@ -82933,7 +82933,7 @@ var require_internal_hash_files = __commonJS({ var _a, e_1, _b, _c; var _d; return __awaiter4(this, void 0, void 0, function* () { - const writeDelegate = verbose ? core17.info : core17.debug; + const writeDelegate = verbose ? core15.info : core15.debug; let hasMatch = false; const githubWorkspace = currentWorkspace ? currentWorkspace : (_d = process.env["GITHUB_WORKSPACE"]) !== null && _d !== void 0 ? _d : process.cwd(); const result = crypto2.createHash("sha256"); @@ -85944,7 +85944,7 @@ module.exports = __toCommonJS(analyze_action_exports); var fs19 = __toESM(require("fs")); var import_path4 = __toESM(require("path")); var import_perf_hooks3 = require("perf_hooks"); -var core16 = __toESM(require_core()); +var core14 = __toESM(require_core()); // src/actions-util.ts var fs5 = __toESM(require("fs")); @@ -94350,7 +94350,7 @@ var fs18 = __toESM(require("fs")); var path18 = __toESM(require("path")); var url = __toESM(require("url")); var import_zlib = __toESM(require("zlib")); -var core15 = __toESM(require_core()); +var core13 = __toESM(require_core()); var jsonschema2 = __toESM(require_lib2()); // src/fingerprints.ts @@ -95476,28 +95476,8 @@ async function addFingerprints(sarif, sourceRoot, logger) { } // src/init.ts -var core14 = __toESM(require_core()); var toolrunner4 = __toESM(require_toolrunner()); var io6 = __toESM(require_io()); - -// src/workflow.ts -var core13 = __toESM(require_core()); -function toCodedErrors(errors) { - return Object.entries(errors).reduce( - (acc, [code, message]) => { - acc[code] = { message, code }; - return acc; - }, - {} - ); -} -var WorkflowErrors = toCodedErrors({ - MissingPushHook: `Please specify an on.push hook to analyze and see code scanning alerts from the default branch on the Security tab.`, - CheckoutWrongHead: `git checkout HEAD^2 is no longer necessary. Please remove this step as Code Scanning recommends analyzing the merge commit for best results.`, - InconsistentActionVersion: `Not all workflow steps that use \`github/codeql-action\` actions use the same version. Please ensure that all such steps use the same version to avoid compatibility issues.` -}); - -// src/init.ts async function initCodeQL(toolsInput, apiDetails, tempDir, variant, defaultCliVersion, features, logger) { logger.startGroup("Setup CodeQL tools"); const { @@ -95627,7 +95607,7 @@ async function combineSarifFilesUsingCLI(sarifFiles, gitHubVersion, features, lo logger.warning( `Uploading multiple SARIF runs with the same category is deprecated ${deprecationWarningMessage}. Please update your workflow to upload a single run per category. ${deprecationMoreInformationMessage}` ); - core15.exportVariable("CODEQL_MERGE_SARIF_DEPRECATION_WARNING", "true"); + core13.exportVariable("CODEQL_MERGE_SARIF_DEPRECATION_WARNING", "true"); } return combineSarifFiles(sarifFiles, logger); } @@ -95726,13 +95706,13 @@ async function uploadPayload(payload, repositoryNwo, logger, analysis) { if (httpError !== void 0) { switch (httpError.status) { case 403: - core15.warning(httpError.message || GENERIC_403_MSG); + core13.warning(httpError.message || GENERIC_403_MSG); break; case 404: - core15.warning(httpError.message || GENERIC_404_MSG); + core13.warning(httpError.message || GENERIC_404_MSG); break; default: - core15.warning(httpError.message); + core13.warning(httpError.message); break; } } @@ -95862,9 +95842,9 @@ function validateSarifFileSchema(sarif, sarifFilePath, logger) { const warnings = (result.errors ?? []).filter( (err) => err.name === "format" && typeof err.argument === "string" && warningAttributes.includes(err.argument) ); - for (const warning8 of warnings) { + for (const warning7 of warnings) { logger.info( - `Warning: '${warning8.instance}' is not a valid URI in '${warning8.property}'.` + `Warning: '${warning7.instance}' is not a valid URI in '${warning7.property}'.` ); } if (errors.length > 0) { @@ -96163,7 +96143,7 @@ function validateUniqueCategory(sarif, sentinelPrefix) { `Aborting upload: only one run of the codeql/analyze or codeql/upload-sarif actions is allowed per job per tool/category. The easiest fix is to specify a unique value for the \`category\` input. If .runs[].automationDetails.id is specified in the sarif file, that will take precedence over your configured \`category\`. Category: (${id ? id : "none"}) Tool: (${tool ? tool : "none"})` ); } - core15.exportVariable(sentinelEnvVar, sentinelEnvVar); + core13.exportVariable(sentinelEnvVar, sentinelEnvVar); } } function sanitize(str2) { @@ -96381,7 +96361,7 @@ async function run() { } const apiDetails = getApiDetails(); const outputDir = getRequiredInput("output"); - core16.exportVariable("CODEQL_ACTION_SARIF_RESULTS_OUTPUT_DIR" /* SARIF_RESULTS_OUTPUT_DIR */, outputDir); + core14.exportVariable("CODEQL_ACTION_SARIF_RESULTS_OUTPUT_DIR" /* SARIF_RESULTS_OUTPUT_DIR */, outputDir); const threads = getThreadsFlag( getOptionalInput("threads") || process.env["CODEQL_THREADS"], logger @@ -96433,8 +96413,8 @@ async function run() { for (const language of config.languages) { dbLocations[language] = getCodeQLDatabasePath(config, language); } - core16.setOutput("db-locations", dbLocations); - core16.setOutput("sarif-output", import_path4.default.resolve(outputDir)); + core14.setOutput("db-locations", dbLocations); + core14.setOutput("sarif-output", import_path4.default.resolve(outputDir)); const uploadKind = getUploadValue( getOptionalInput("upload") ); @@ -96478,13 +96458,13 @@ async function run() { logger.info("Not uploading results"); } if (uploadResults["code-scanning" /* CodeScanning */] !== void 0) { - core16.setOutput( + core14.setOutput( "sarif-id", uploadResults["code-scanning" /* CodeScanning */].sarifID ); } if (uploadResults["code-quality" /* CodeQuality */] !== void 0) { - core16.setOutput( + core14.setOutput( "quality-sarif-id", uploadResults["code-quality" /* CodeQuality */].sarifID ); @@ -96523,15 +96503,15 @@ async function run() { ); } if (getOptionalInput("expect-error") === "true") { - core16.setFailed( + core14.setFailed( `expect-error input was set to true but no error was thrown.` ); } - core16.exportVariable("CODEQL_ACTION_ANALYZE_DID_COMPLETE_SUCCESSFULLY" /* ANALYZE_DID_COMPLETE_SUCCESSFULLY */, "true"); + core14.exportVariable("CODEQL_ACTION_ANALYZE_DID_COMPLETE_SUCCESSFULLY" /* ANALYZE_DID_COMPLETE_SUCCESSFULLY */, "true"); } catch (unwrappedError) { const error2 = wrapError(unwrappedError); if (getOptionalInput("expect-error") !== "true" || hasBadExpectErrorInput()) { - core16.setFailed(error2.message); + core14.setFailed(error2.message); } await sendStatusReport2( startedAt, @@ -96596,7 +96576,7 @@ async function runWrapper() { try { await runPromise; } catch (error2) { - core16.setFailed(`analyze action failed: ${getErrorMessage(error2)}`); + core14.setFailed(`analyze action failed: ${getErrorMessage(error2)}`); } await checkForTimeout(); } diff --git a/lib/init-action-post.js b/lib/init-action-post.js index d24c949014..0ed7b0dd62 100644 --- a/lib/init-action-post.js +++ b/lib/init-action-post.js @@ -35312,7 +35312,7 @@ var require_internal_glob_options_helper = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.getOptions = void 0; - var core19 = __importStar4(require_core()); + var core18 = __importStar4(require_core()); function getOptions(copy) { const result = { followSymbolicLinks: true, @@ -35322,15 +35322,15 @@ var require_internal_glob_options_helper = __commonJS({ if (copy) { if (typeof copy.followSymbolicLinks === "boolean") { result.followSymbolicLinks = copy.followSymbolicLinks; - core19.debug(`followSymbolicLinks '${result.followSymbolicLinks}'`); + core18.debug(`followSymbolicLinks '${result.followSymbolicLinks}'`); } if (typeof copy.implicitDescendants === "boolean") { result.implicitDescendants = copy.implicitDescendants; - core19.debug(`implicitDescendants '${result.implicitDescendants}'`); + core18.debug(`implicitDescendants '${result.implicitDescendants}'`); } if (typeof copy.omitBrokenSymbolicLinks === "boolean") { result.omitBrokenSymbolicLinks = copy.omitBrokenSymbolicLinks; - core19.debug(`omitBrokenSymbolicLinks '${result.omitBrokenSymbolicLinks}'`); + core18.debug(`omitBrokenSymbolicLinks '${result.omitBrokenSymbolicLinks}'`); } } return result; @@ -36784,7 +36784,7 @@ var require_internal_globber = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.DefaultGlobber = void 0; - var core19 = __importStar4(require_core()); + var core18 = __importStar4(require_core()); var fs20 = __importStar4(require("fs")); var globOptionsHelper = __importStar4(require_internal_glob_options_helper()); var path19 = __importStar4(require("path")); @@ -36835,7 +36835,7 @@ var require_internal_globber = __commonJS({ } const stack = []; for (const searchPath of patternHelper.getSearchPaths(patterns)) { - core19.debug(`Search path '${searchPath}'`); + core18.debug(`Search path '${searchPath}'`); try { yield __await4(fs20.promises.lstat(searchPath)); } catch (err) { @@ -36907,7 +36907,7 @@ var require_internal_globber = __commonJS({ } catch (err) { if (err.code === "ENOENT") { if (options.omitBrokenSymbolicLinks) { - core19.debug(`Broken symlink '${item.path}'`); + core18.debug(`Broken symlink '${item.path}'`); return void 0; } throw new Error(`No information found for the path '${item.path}'. This may indicate a broken symbolic link.`); @@ -36923,7 +36923,7 @@ var require_internal_globber = __commonJS({ traversalChain.pop(); } if (traversalChain.some((x) => x === realPath)) { - core19.debug(`Symlink cycle detected for path '${item.path}' and realpath '${realPath}'`); + core18.debug(`Symlink cycle detected for path '${item.path}' and realpath '${realPath}'`); return void 0; } traversalChain.push(realPath); @@ -38247,7 +38247,7 @@ var require_cacheUtils = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.getRuntimeToken = exports2.getCacheVersion = exports2.assertDefined = exports2.getGnuTarPathOnWindows = exports2.getCacheFileName = exports2.getCompressionMethod = exports2.unlinkFile = exports2.resolvePaths = exports2.getArchiveFileSizeInBytes = exports2.createTempDirectory = void 0; - var core19 = __importStar4(require_core()); + var core18 = __importStar4(require_core()); var exec2 = __importStar4(require_exec()); var glob2 = __importStar4(require_glob()); var io7 = __importStar4(require_io()); @@ -38300,7 +38300,7 @@ var require_cacheUtils = __commonJS({ _e = false; const file = _c; const relativeFile = path19.relative(workspace, file).replace(new RegExp(`\\${path19.sep}`, "g"), "/"); - core19.debug(`Matched: ${relativeFile}`); + core18.debug(`Matched: ${relativeFile}`); if (relativeFile === "") { paths.push("."); } else { @@ -38330,7 +38330,7 @@ var require_cacheUtils = __commonJS({ return __awaiter4(this, void 0, void 0, function* () { let versionOutput = ""; additionalArgs.push("--version"); - core19.debug(`Checking ${app} ${additionalArgs.join(" ")}`); + core18.debug(`Checking ${app} ${additionalArgs.join(" ")}`); try { yield exec2.exec(`${app}`, additionalArgs, { ignoreReturnCode: true, @@ -38341,10 +38341,10 @@ var require_cacheUtils = __commonJS({ } }); } catch (err) { - core19.debug(err.message); + core18.debug(err.message); } versionOutput = versionOutput.trim(); - core19.debug(versionOutput); + core18.debug(versionOutput); return versionOutput; }); } @@ -38352,7 +38352,7 @@ var require_cacheUtils = __commonJS({ return __awaiter4(this, void 0, void 0, function* () { const versionOutput = yield getVersion("zstd", ["--quiet"]); const version = semver8.clean(versionOutput); - core19.debug(`zstd version: ${version}`); + core18.debug(`zstd version: ${version}`); if (versionOutput === "") { return constants_1.CompressionMethod.Gzip; } else { @@ -73664,7 +73664,7 @@ var require_uploadUtils = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.uploadCacheArchiveSDK = exports2.UploadProgress = void 0; - var core19 = __importStar4(require_core()); + var core18 = __importStar4(require_core()); var storage_blob_1 = require_dist7(); var errors_1 = require_errors2(); var UploadProgress = class { @@ -73706,7 +73706,7 @@ var require_uploadUtils = __commonJS({ const percentage = (100 * (transferredBytes / this.contentLength)).toFixed(1); const elapsedTime = Date.now() - this.startTime; const uploadSpeed = (transferredBytes / (1024 * 1024) / (elapsedTime / 1e3)).toFixed(1); - core19.info(`Sent ${transferredBytes} of ${this.contentLength} (${percentage}%), ${uploadSpeed} MBs/sec`); + core18.info(`Sent ${transferredBytes} of ${this.contentLength} (${percentage}%), ${uploadSpeed} MBs/sec`); if (this.isDone()) { this.displayedComplete = true; } @@ -73761,14 +73761,14 @@ var require_uploadUtils = __commonJS({ }; try { uploadProgress.startDisplayTimer(); - core19.debug(`BlobClient: ${blobClient.name}:${blobClient.accountName}:${blobClient.containerName}`); + core18.debug(`BlobClient: ${blobClient.name}:${blobClient.accountName}:${blobClient.containerName}`); const response = yield blockBlobClient.uploadFile(archivePath, uploadOptions); if (response._response.status >= 400) { throw new errors_1.InvalidResponseError(`uploadCacheArchiveSDK: upload failed with status code ${response._response.status}`); } return response; } catch (error2) { - core19.warning(`uploadCacheArchiveSDK: internal error uploading cache archive: ${error2.message}`); + core18.warning(`uploadCacheArchiveSDK: internal error uploading cache archive: ${error2.message}`); throw error2; } finally { uploadProgress.stopDisplayTimer(); @@ -73839,7 +73839,7 @@ var require_requestUtils = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.retryHttpClientResponse = exports2.retryTypedResponse = exports2.retry = exports2.isRetryableStatusCode = exports2.isServerErrorStatusCode = exports2.isSuccessStatusCode = void 0; - var core19 = __importStar4(require_core()); + var core18 = __importStar4(require_core()); var http_client_1 = require_lib(); var constants_1 = require_constants10(); function isSuccessStatusCode(statusCode) { @@ -73900,9 +73900,9 @@ var require_requestUtils = __commonJS({ isRetryable = isRetryableStatusCode(statusCode); errorMessage = `Cache service responded with ${statusCode}`; } - core19.debug(`${name} - Attempt ${attempt} of ${maxAttempts} failed with error: ${errorMessage}`); + core18.debug(`${name} - Attempt ${attempt} of ${maxAttempts} failed with error: ${errorMessage}`); if (!isRetryable) { - core19.debug(`${name} - Error is not retryable`); + core18.debug(`${name} - Error is not retryable`); break; } yield sleep(delay2); @@ -74007,7 +74007,7 @@ var require_downloadUtils = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.downloadCacheStorageSDK = exports2.downloadCacheHttpClientConcurrent = exports2.downloadCacheHttpClient = exports2.DownloadProgress = void 0; - var core19 = __importStar4(require_core()); + var core18 = __importStar4(require_core()); var http_client_1 = require_lib(); var storage_blob_1 = require_dist7(); var buffer = __importStar4(require("buffer")); @@ -74045,7 +74045,7 @@ var require_downloadUtils = __commonJS({ this.segmentIndex = this.segmentIndex + 1; this.segmentSize = segmentSize; this.receivedBytes = 0; - core19.debug(`Downloading segment at offset ${this.segmentOffset} with length ${this.segmentSize}...`); + core18.debug(`Downloading segment at offset ${this.segmentOffset} with length ${this.segmentSize}...`); } /** * Sets the number of bytes received for the current segment. @@ -74079,7 +74079,7 @@ var require_downloadUtils = __commonJS({ const percentage = (100 * (transferredBytes / this.contentLength)).toFixed(1); const elapsedTime = Date.now() - this.startTime; const downloadSpeed = (transferredBytes / (1024 * 1024) / (elapsedTime / 1e3)).toFixed(1); - core19.info(`Received ${transferredBytes} of ${this.contentLength} (${percentage}%), ${downloadSpeed} MBs/sec`); + core18.info(`Received ${transferredBytes} of ${this.contentLength} (${percentage}%), ${downloadSpeed} MBs/sec`); if (this.isDone()) { this.displayedComplete = true; } @@ -74129,7 +74129,7 @@ var require_downloadUtils = __commonJS({ })); downloadResponse.message.socket.setTimeout(constants_1.SocketTimeout, () => { downloadResponse.message.destroy(); - core19.debug(`Aborting download, socket timed out after ${constants_1.SocketTimeout} ms`); + core18.debug(`Aborting download, socket timed out after ${constants_1.SocketTimeout} ms`); }); yield pipeResponseToStream(downloadResponse, writeStream); const contentLengthHeader = downloadResponse.message.headers["content-length"]; @@ -74140,7 +74140,7 @@ var require_downloadUtils = __commonJS({ throw new Error(`Incomplete download. Expected file size: ${expectedLength}, actual file size: ${actualLength}`); } } else { - core19.debug("Unable to validate download, no Content-Length header"); + core18.debug("Unable to validate download, no Content-Length header"); } }); } @@ -74260,7 +74260,7 @@ var require_downloadUtils = __commonJS({ const properties = yield client.getProperties(); const contentLength = (_a = properties.contentLength) !== null && _a !== void 0 ? _a : -1; if (contentLength < 0) { - core19.debug("Unable to determine content length, downloading file with http-client..."); + core18.debug("Unable to determine content length, downloading file with http-client..."); yield downloadCacheHttpClient(archiveLocation, archivePath); } else { const maxSegmentSize = Math.min(134217728, buffer.constants.MAX_LENGTH); @@ -74340,7 +74340,7 @@ var require_options = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.getDownloadOptions = exports2.getUploadOptions = void 0; - var core19 = __importStar4(require_core()); + var core18 = __importStar4(require_core()); function getUploadOptions(copy) { const result = { useAzureSdk: false, @@ -74360,9 +74360,9 @@ var require_options = __commonJS({ } result.uploadConcurrency = !isNaN(Number(process.env["CACHE_UPLOAD_CONCURRENCY"])) ? Math.min(32, Number(process.env["CACHE_UPLOAD_CONCURRENCY"])) : result.uploadConcurrency; result.uploadChunkSize = !isNaN(Number(process.env["CACHE_UPLOAD_CHUNK_SIZE"])) ? Math.min(128 * 1024 * 1024, Number(process.env["CACHE_UPLOAD_CHUNK_SIZE"]) * 1024 * 1024) : result.uploadChunkSize; - core19.debug(`Use Azure SDK: ${result.useAzureSdk}`); - core19.debug(`Upload concurrency: ${result.uploadConcurrency}`); - core19.debug(`Upload chunk size: ${result.uploadChunkSize}`); + core18.debug(`Use Azure SDK: ${result.useAzureSdk}`); + core18.debug(`Upload concurrency: ${result.uploadConcurrency}`); + core18.debug(`Upload chunk size: ${result.uploadChunkSize}`); return result; } exports2.getUploadOptions = getUploadOptions; @@ -74399,12 +74399,12 @@ var require_options = __commonJS({ if (segmentDownloadTimeoutMins && !isNaN(Number(segmentDownloadTimeoutMins)) && isFinite(Number(segmentDownloadTimeoutMins))) { result.segmentTimeoutInMs = Number(segmentDownloadTimeoutMins) * 60 * 1e3; } - core19.debug(`Use Azure SDK: ${result.useAzureSdk}`); - core19.debug(`Download concurrency: ${result.downloadConcurrency}`); - core19.debug(`Request timeout (ms): ${result.timeoutInMs}`); - core19.debug(`Cache segment download timeout mins env var: ${process.env["SEGMENT_DOWNLOAD_TIMEOUT_MINS"]}`); - core19.debug(`Segment download timeout (ms): ${result.segmentTimeoutInMs}`); - core19.debug(`Lookup only: ${result.lookupOnly}`); + core18.debug(`Use Azure SDK: ${result.useAzureSdk}`); + core18.debug(`Download concurrency: ${result.downloadConcurrency}`); + core18.debug(`Request timeout (ms): ${result.timeoutInMs}`); + core18.debug(`Cache segment download timeout mins env var: ${process.env["SEGMENT_DOWNLOAD_TIMEOUT_MINS"]}`); + core18.debug(`Segment download timeout (ms): ${result.segmentTimeoutInMs}`); + core18.debug(`Lookup only: ${result.lookupOnly}`); return result; } exports2.getDownloadOptions = getDownloadOptions; @@ -74584,7 +74584,7 @@ var require_cacheHttpClient = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.saveCache = exports2.reserveCache = exports2.downloadCache = exports2.getCacheEntry = void 0; - var core19 = __importStar4(require_core()); + var core18 = __importStar4(require_core()); var http_client_1 = require_lib(); var auth_1 = require_auth(); var fs20 = __importStar4(require("fs")); @@ -74602,7 +74602,7 @@ var require_cacheHttpClient = __commonJS({ throw new Error("Cache Service Url not found, unable to restore cache."); } const url2 = `${baseUrl}_apis/artifactcache/${resource}`; - core19.debug(`Resource Url: ${url2}`); + core18.debug(`Resource Url: ${url2}`); return url2; } function createAcceptHeader(type2, apiVersion) { @@ -74630,7 +74630,7 @@ var require_cacheHttpClient = __commonJS({ return httpClient.getJson(getCacheApiUrl(resource)); })); if (response.statusCode === 204) { - if (core19.isDebug()) { + if (core18.isDebug()) { yield printCachesListForDiagnostics(keys[0], httpClient, version); } return null; @@ -74643,9 +74643,9 @@ var require_cacheHttpClient = __commonJS({ if (!cacheDownloadUrl) { throw new Error("Cache not found."); } - core19.setSecret(cacheDownloadUrl); - core19.debug(`Cache Result:`); - core19.debug(JSON.stringify(cacheResult)); + core18.setSecret(cacheDownloadUrl); + core18.debug(`Cache Result:`); + core18.debug(JSON.stringify(cacheResult)); return cacheResult; }); } @@ -74660,10 +74660,10 @@ var require_cacheHttpClient = __commonJS({ const cacheListResult = response.result; const totalCount = cacheListResult === null || cacheListResult === void 0 ? void 0 : cacheListResult.totalCount; if (totalCount && totalCount > 0) { - core19.debug(`No matching cache found for cache key '${key}', version '${version} and scope ${process.env["GITHUB_REF"]}. There exist one or more cache(s) with similar key but they have different version or scope. See more info on cache matching here: https://docs.github.com/en/actions/using-workflows/caching-dependencies-to-speed-up-workflows#matching-a-cache-key + core18.debug(`No matching cache found for cache key '${key}', version '${version} and scope ${process.env["GITHUB_REF"]}. There exist one or more cache(s) with similar key but they have different version or scope. See more info on cache matching here: https://docs.github.com/en/actions/using-workflows/caching-dependencies-to-speed-up-workflows#matching-a-cache-key Other caches with similar key:`); for (const cacheEntry of (cacheListResult === null || cacheListResult === void 0 ? void 0 : cacheListResult.artifactCaches) || []) { - core19.debug(`Cache Key: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.cacheKey}, Cache Version: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.cacheVersion}, Cache Scope: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.scope}, Cache Created: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.creationTime}`); + core18.debug(`Cache Key: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.cacheKey}, Cache Version: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.cacheVersion}, Cache Scope: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.scope}, Cache Created: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.creationTime}`); } } } @@ -74708,7 +74708,7 @@ Other caches with similar key:`); } function uploadChunk(httpClient, resourceUrl, openStream, start, end) { return __awaiter4(this, void 0, void 0, function* () { - core19.debug(`Uploading chunk of size ${end - start + 1} bytes at offset ${start} with content range: ${getContentRange(start, end)}`); + core18.debug(`Uploading chunk of size ${end - start + 1} bytes at offset ${start} with content range: ${getContentRange(start, end)}`); const additionalHeaders = { "Content-Type": "application/octet-stream", "Content-Range": getContentRange(start, end) @@ -74730,7 +74730,7 @@ Other caches with similar key:`); const concurrency = utils.assertDefined("uploadConcurrency", uploadOptions.uploadConcurrency); const maxChunkSize = utils.assertDefined("uploadChunkSize", uploadOptions.uploadChunkSize); const parallelUploads = [...new Array(concurrency).keys()]; - core19.debug("Awaiting all uploads"); + core18.debug("Awaiting all uploads"); let offset = 0; try { yield Promise.all(parallelUploads.map(() => __awaiter4(this, void 0, void 0, function* () { @@ -74773,16 +74773,16 @@ Other caches with similar key:`); yield (0, uploadUtils_1.uploadCacheArchiveSDK)(signedUploadURL, archivePath, options); } else { const httpClient = createHttpClient(); - core19.debug("Upload cache"); + core18.debug("Upload cache"); yield uploadFile(httpClient, cacheId, archivePath, options); - core19.debug("Commiting cache"); + core18.debug("Commiting cache"); const cacheSize = utils.getArchiveFileSizeInBytes(archivePath); - core19.info(`Cache Size: ~${Math.round(cacheSize / (1024 * 1024))} MB (${cacheSize} B)`); + core18.info(`Cache Size: ~${Math.round(cacheSize / (1024 * 1024))} MB (${cacheSize} B)`); const commitCacheResponse = yield commitCache(httpClient, cacheId, cacheSize); if (!(0, requestUtils_1.isSuccessStatusCode)(commitCacheResponse.statusCode)) { throw new Error(`Cache service responded with ${commitCacheResponse.statusCode} during commit cache.`); } - core19.info("Cache saved successfully"); + core18.info("Cache saved successfully"); } }); } @@ -80234,7 +80234,7 @@ var require_cache3 = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.saveCache = exports2.restoreCache = exports2.isFeatureAvailable = exports2.FinalizeCacheError = exports2.ReserveCacheError = exports2.ValidationError = void 0; - var core19 = __importStar4(require_core()); + var core18 = __importStar4(require_core()); var path19 = __importStar4(require("path")); var utils = __importStar4(require_cacheUtils()); var cacheHttpClient = __importStar4(require_cacheHttpClient()); @@ -80294,7 +80294,7 @@ var require_cache3 = __commonJS({ function restoreCache4(paths, primaryKey, restoreKeys, options, enableCrossOsArchive = false) { return __awaiter4(this, void 0, void 0, function* () { const cacheServiceVersion = (0, config_1.getCacheServiceVersion)(); - core19.debug(`Cache service version: ${cacheServiceVersion}`); + core18.debug(`Cache service version: ${cacheServiceVersion}`); checkPaths(paths); switch (cacheServiceVersion) { case "v2": @@ -80310,8 +80310,8 @@ var require_cache3 = __commonJS({ return __awaiter4(this, void 0, void 0, function* () { restoreKeys = restoreKeys || []; const keys = [primaryKey, ...restoreKeys]; - core19.debug("Resolved Keys:"); - core19.debug(JSON.stringify(keys)); + core18.debug("Resolved Keys:"); + core18.debug(JSON.stringify(keys)); if (keys.length > 10) { throw new ValidationError(`Key Validation Error: Keys are limited to a maximum of 10.`); } @@ -80329,19 +80329,19 @@ var require_cache3 = __commonJS({ return void 0; } if (options === null || options === void 0 ? void 0 : options.lookupOnly) { - core19.info("Lookup only - skipping download"); + core18.info("Lookup only - skipping download"); return cacheEntry.cacheKey; } archivePath = path19.join(yield utils.createTempDirectory(), utils.getCacheFileName(compressionMethod)); - core19.debug(`Archive Path: ${archivePath}`); + core18.debug(`Archive Path: ${archivePath}`); yield cacheHttpClient.downloadCache(cacheEntry.archiveLocation, archivePath, options); - if (core19.isDebug()) { + if (core18.isDebug()) { yield (0, tar_1.listTar)(archivePath, compressionMethod); } const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath); - core19.info(`Cache Size: ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B)`); + core18.info(`Cache Size: ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B)`); yield (0, tar_1.extractTar)(archivePath, compressionMethod); - core19.info("Cache restored successfully"); + core18.info("Cache restored successfully"); return cacheEntry.cacheKey; } catch (error2) { const typedError = error2; @@ -80349,16 +80349,16 @@ var require_cache3 = __commonJS({ throw error2; } else { if (typedError instanceof http_client_1.HttpClientError && typeof typedError.statusCode === "number" && typedError.statusCode >= 500) { - core19.error(`Failed to restore: ${error2.message}`); + core18.error(`Failed to restore: ${error2.message}`); } else { - core19.warning(`Failed to restore: ${error2.message}`); + core18.warning(`Failed to restore: ${error2.message}`); } } } finally { try { yield utils.unlinkFile(archivePath); } catch (error2) { - core19.debug(`Failed to delete archive: ${error2}`); + core18.debug(`Failed to delete archive: ${error2}`); } } return void 0; @@ -80369,8 +80369,8 @@ var require_cache3 = __commonJS({ options = Object.assign(Object.assign({}, options), { useAzureSdk: true }); restoreKeys = restoreKeys || []; const keys = [primaryKey, ...restoreKeys]; - core19.debug("Resolved Keys:"); - core19.debug(JSON.stringify(keys)); + core18.debug("Resolved Keys:"); + core18.debug(JSON.stringify(keys)); if (keys.length > 10) { throw new ValidationError(`Key Validation Error: Keys are limited to a maximum of 10.`); } @@ -80388,30 +80388,30 @@ var require_cache3 = __commonJS({ }; const response = yield twirpClient.GetCacheEntryDownloadURL(request); if (!response.ok) { - core19.debug(`Cache not found for version ${request.version} of keys: ${keys.join(", ")}`); + core18.debug(`Cache not found for version ${request.version} of keys: ${keys.join(", ")}`); return void 0; } const isRestoreKeyMatch = request.key !== response.matchedKey; if (isRestoreKeyMatch) { - core19.info(`Cache hit for restore-key: ${response.matchedKey}`); + core18.info(`Cache hit for restore-key: ${response.matchedKey}`); } else { - core19.info(`Cache hit for: ${response.matchedKey}`); + core18.info(`Cache hit for: ${response.matchedKey}`); } if (options === null || options === void 0 ? void 0 : options.lookupOnly) { - core19.info("Lookup only - skipping download"); + core18.info("Lookup only - skipping download"); return response.matchedKey; } archivePath = path19.join(yield utils.createTempDirectory(), utils.getCacheFileName(compressionMethod)); - core19.debug(`Archive path: ${archivePath}`); - core19.debug(`Starting download of archive to: ${archivePath}`); + core18.debug(`Archive path: ${archivePath}`); + core18.debug(`Starting download of archive to: ${archivePath}`); yield cacheHttpClient.downloadCache(response.signedDownloadUrl, archivePath, options); const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath); - core19.info(`Cache Size: ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B)`); - if (core19.isDebug()) { + core18.info(`Cache Size: ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B)`); + if (core18.isDebug()) { yield (0, tar_1.listTar)(archivePath, compressionMethod); } yield (0, tar_1.extractTar)(archivePath, compressionMethod); - core19.info("Cache restored successfully"); + core18.info("Cache restored successfully"); return response.matchedKey; } catch (error2) { const typedError = error2; @@ -80419,9 +80419,9 @@ var require_cache3 = __commonJS({ throw error2; } else { if (typedError instanceof http_client_1.HttpClientError && typeof typedError.statusCode === "number" && typedError.statusCode >= 500) { - core19.error(`Failed to restore: ${error2.message}`); + core18.error(`Failed to restore: ${error2.message}`); } else { - core19.warning(`Failed to restore: ${error2.message}`); + core18.warning(`Failed to restore: ${error2.message}`); } } } finally { @@ -80430,7 +80430,7 @@ var require_cache3 = __commonJS({ yield utils.unlinkFile(archivePath); } } catch (error2) { - core19.debug(`Failed to delete archive: ${error2}`); + core18.debug(`Failed to delete archive: ${error2}`); } } return void 0; @@ -80439,7 +80439,7 @@ var require_cache3 = __commonJS({ function saveCache4(paths, key, options, enableCrossOsArchive = false) { return __awaiter4(this, void 0, void 0, function* () { const cacheServiceVersion = (0, config_1.getCacheServiceVersion)(); - core19.debug(`Cache service version: ${cacheServiceVersion}`); + core18.debug(`Cache service version: ${cacheServiceVersion}`); checkPaths(paths); checkKey(key); switch (cacheServiceVersion) { @@ -80458,26 +80458,26 @@ var require_cache3 = __commonJS({ const compressionMethod = yield utils.getCompressionMethod(); let cacheId = -1; const cachePaths = yield utils.resolvePaths(paths); - core19.debug("Cache Paths:"); - core19.debug(`${JSON.stringify(cachePaths)}`); + core18.debug("Cache Paths:"); + core18.debug(`${JSON.stringify(cachePaths)}`); if (cachePaths.length === 0) { throw new Error(`Path Validation Error: Path(s) specified in the action for caching do(es) not exist, hence no cache is being saved.`); } const archiveFolder = yield utils.createTempDirectory(); const archivePath = path19.join(archiveFolder, utils.getCacheFileName(compressionMethod)); - core19.debug(`Archive Path: ${archivePath}`); + core18.debug(`Archive Path: ${archivePath}`); try { yield (0, tar_1.createTar)(archiveFolder, cachePaths, compressionMethod); - if (core19.isDebug()) { + if (core18.isDebug()) { yield (0, tar_1.listTar)(archivePath, compressionMethod); } const fileSizeLimit = 10 * 1024 * 1024 * 1024; const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath); - core19.debug(`File Size: ${archiveFileSize}`); + core18.debug(`File Size: ${archiveFileSize}`); if (archiveFileSize > fileSizeLimit && !(0, config_1.isGhes)()) { throw new Error(`Cache size of ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B) is over the 10GB limit, not saving cache.`); } - core19.debug("Reserving Cache"); + core18.debug("Reserving Cache"); const reserveCacheResponse = yield cacheHttpClient.reserveCache(key, paths, { compressionMethod, enableCrossOsArchive, @@ -80490,26 +80490,26 @@ var require_cache3 = __commonJS({ } else { throw new ReserveCacheError2(`Unable to reserve cache with key ${key}, another job may be creating this cache. More details: ${(_e = reserveCacheResponse === null || reserveCacheResponse === void 0 ? void 0 : reserveCacheResponse.error) === null || _e === void 0 ? void 0 : _e.message}`); } - core19.debug(`Saving Cache (ID: ${cacheId})`); + core18.debug(`Saving Cache (ID: ${cacheId})`); yield cacheHttpClient.saveCache(cacheId, archivePath, "", options); } catch (error2) { const typedError = error2; if (typedError.name === ValidationError.name) { throw error2; } else if (typedError.name === ReserveCacheError2.name) { - core19.info(`Failed to save: ${typedError.message}`); + core18.info(`Failed to save: ${typedError.message}`); } else { if (typedError instanceof http_client_1.HttpClientError && typeof typedError.statusCode === "number" && typedError.statusCode >= 500) { - core19.error(`Failed to save: ${typedError.message}`); + core18.error(`Failed to save: ${typedError.message}`); } else { - core19.warning(`Failed to save: ${typedError.message}`); + core18.warning(`Failed to save: ${typedError.message}`); } } } finally { try { yield utils.unlinkFile(archivePath); } catch (error2) { - core19.debug(`Failed to delete archive: ${error2}`); + core18.debug(`Failed to delete archive: ${error2}`); } } return cacheId; @@ -80522,23 +80522,23 @@ var require_cache3 = __commonJS({ const twirpClient = cacheTwirpClient.internalCacheTwirpClient(); let cacheId = -1; const cachePaths = yield utils.resolvePaths(paths); - core19.debug("Cache Paths:"); - core19.debug(`${JSON.stringify(cachePaths)}`); + core18.debug("Cache Paths:"); + core18.debug(`${JSON.stringify(cachePaths)}`); if (cachePaths.length === 0) { throw new Error(`Path Validation Error: Path(s) specified in the action for caching do(es) not exist, hence no cache is being saved.`); } const archiveFolder = yield utils.createTempDirectory(); const archivePath = path19.join(archiveFolder, utils.getCacheFileName(compressionMethod)); - core19.debug(`Archive Path: ${archivePath}`); + core18.debug(`Archive Path: ${archivePath}`); try { yield (0, tar_1.createTar)(archiveFolder, cachePaths, compressionMethod); - if (core19.isDebug()) { + if (core18.isDebug()) { yield (0, tar_1.listTar)(archivePath, compressionMethod); } const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath); - core19.debug(`File Size: ${archiveFileSize}`); + core18.debug(`File Size: ${archiveFileSize}`); options.archiveSizeBytes = archiveFileSize; - core19.debug("Reserving Cache"); + core18.debug("Reserving Cache"); const version = utils.getCacheVersion(paths, compressionMethod, enableCrossOsArchive); const request = { key, @@ -80549,16 +80549,16 @@ var require_cache3 = __commonJS({ const response = yield twirpClient.CreateCacheEntry(request); if (!response.ok) { if (response.message) { - core19.warning(`Cache reservation failed: ${response.message}`); + core18.warning(`Cache reservation failed: ${response.message}`); } throw new Error(response.message || "Response was not ok"); } signedUploadUrl = response.signedUploadUrl; } catch (error2) { - core19.debug(`Failed to reserve cache: ${error2}`); + core18.debug(`Failed to reserve cache: ${error2}`); throw new ReserveCacheError2(`Unable to reserve cache with key ${key}, another job may be creating this cache.`); } - core19.debug(`Attempting to upload cache located at: ${archivePath}`); + core18.debug(`Attempting to upload cache located at: ${archivePath}`); yield cacheHttpClient.saveCache(cacheId, archivePath, signedUploadUrl, options); const finalizeRequest = { key, @@ -80566,7 +80566,7 @@ var require_cache3 = __commonJS({ sizeBytes: `${archiveFileSize}` }; const finalizeResponse = yield twirpClient.FinalizeCacheEntryUpload(finalizeRequest); - core19.debug(`FinalizeCacheEntryUploadResponse: ${finalizeResponse.ok}`); + core18.debug(`FinalizeCacheEntryUploadResponse: ${finalizeResponse.ok}`); if (!finalizeResponse.ok) { if (finalizeResponse.message) { throw new FinalizeCacheError(finalizeResponse.message); @@ -80579,21 +80579,21 @@ var require_cache3 = __commonJS({ if (typedError.name === ValidationError.name) { throw error2; } else if (typedError.name === ReserveCacheError2.name) { - core19.info(`Failed to save: ${typedError.message}`); + core18.info(`Failed to save: ${typedError.message}`); } else if (typedError.name === FinalizeCacheError.name) { - core19.warning(typedError.message); + core18.warning(typedError.message); } else { if (typedError instanceof http_client_1.HttpClientError && typeof typedError.statusCode === "number" && typedError.statusCode >= 500) { - core19.error(`Failed to save: ${typedError.message}`); + core18.error(`Failed to save: ${typedError.message}`); } else { - core19.warning(`Failed to save: ${typedError.message}`); + core18.warning(`Failed to save: ${typedError.message}`); } } } finally { try { yield utils.unlinkFile(archivePath); } catch (error2) { - core19.debug(`Failed to delete archive: ${error2}`); + core18.debug(`Failed to delete archive: ${error2}`); } } return cacheId; @@ -80801,7 +80801,7 @@ var require_retry_helper = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.RetryHelper = void 0; - var core19 = __importStar4(require_core()); + var core18 = __importStar4(require_core()); var RetryHelper = class { constructor(maxAttempts, minSeconds, maxSeconds) { if (maxAttempts < 1) { @@ -80824,10 +80824,10 @@ var require_retry_helper = __commonJS({ if (isRetryable && !isRetryable(err)) { throw err; } - core19.info(err.message); + core18.info(err.message); } const seconds = this.getSleepAmount(); - core19.info(`Waiting ${seconds} seconds before trying again`); + core18.info(`Waiting ${seconds} seconds before trying again`); yield this.sleep(seconds); attempt++; } @@ -80907,7 +80907,7 @@ var require_tool_cache = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.evaluateVersions = exports2.isExplicitVersion = exports2.findFromManifest = exports2.getManifestFromRepo = exports2.findAllVersions = exports2.find = exports2.cacheFile = exports2.cacheDir = exports2.extractZip = exports2.extractXar = exports2.extractTar = exports2.extract7z = exports2.downloadTool = exports2.HTTPError = void 0; - var core19 = __importStar4(require_core()); + var core18 = __importStar4(require_core()); var io7 = __importStar4(require_io()); var crypto = __importStar4(require("crypto")); var fs20 = __importStar4(require("fs")); @@ -80936,8 +80936,8 @@ var require_tool_cache = __commonJS({ return __awaiter4(this, void 0, void 0, function* () { dest = dest || path19.join(_getTempDirectory(), crypto.randomUUID()); yield io7.mkdirP(path19.dirname(dest)); - core19.debug(`Downloading ${url2}`); - core19.debug(`Destination ${dest}`); + core18.debug(`Downloading ${url2}`); + core18.debug(`Destination ${dest}`); const maxAttempts = 3; const minSeconds = _getGlobal("TEST_DOWNLOAD_TOOL_RETRY_MIN_SECONDS", 10); const maxSeconds = _getGlobal("TEST_DOWNLOAD_TOOL_RETRY_MAX_SECONDS", 20); @@ -80964,7 +80964,7 @@ var require_tool_cache = __commonJS({ allowRetries: false }); if (auth) { - core19.debug("set auth"); + core18.debug("set auth"); if (headers === void 0) { headers = {}; } @@ -80973,7 +80973,7 @@ var require_tool_cache = __commonJS({ const response = yield http.get(url2, headers); if (response.message.statusCode !== 200) { const err = new HTTPError2(response.message.statusCode); - core19.debug(`Failed to download from "${url2}". Code(${response.message.statusCode}) Message(${response.message.statusMessage})`); + core18.debug(`Failed to download from "${url2}". Code(${response.message.statusCode}) Message(${response.message.statusMessage})`); throw err; } const pipeline = util.promisify(stream2.pipeline); @@ -80982,16 +80982,16 @@ var require_tool_cache = __commonJS({ let succeeded = false; try { yield pipeline(readStream, fs20.createWriteStream(dest)); - core19.debug("download complete"); + core18.debug("download complete"); succeeded = true; return dest; } finally { if (!succeeded) { - core19.debug("download failed"); + core18.debug("download failed"); try { yield io7.rmRF(dest); } catch (err) { - core19.debug(`Failed to delete '${dest}'. ${err.message}`); + core18.debug(`Failed to delete '${dest}'. ${err.message}`); } } } @@ -81006,7 +81006,7 @@ var require_tool_cache = __commonJS({ process.chdir(dest); if (_7zPath) { try { - const logLevel = core19.isDebug() ? "-bb1" : "-bb0"; + const logLevel = core18.isDebug() ? "-bb1" : "-bb0"; const args = [ "x", logLevel, @@ -81056,7 +81056,7 @@ var require_tool_cache = __commonJS({ throw new Error("parameter 'file' is required"); } dest = yield _createExtractFolder(dest); - core19.debug("Checking tar --version"); + core18.debug("Checking tar --version"); let versionOutput = ""; yield (0, exec_1.exec)("tar --version", [], { ignoreReturnCode: true, @@ -81066,7 +81066,7 @@ var require_tool_cache = __commonJS({ stderr: (data) => versionOutput += data.toString() } }); - core19.debug(versionOutput.trim()); + core18.debug(versionOutput.trim()); const isGnuTar = versionOutput.toUpperCase().includes("GNU TAR"); let args; if (flags instanceof Array) { @@ -81074,7 +81074,7 @@ var require_tool_cache = __commonJS({ } else { args = [flags]; } - if (core19.isDebug() && !flags.includes("v")) { + if (core18.isDebug() && !flags.includes("v")) { args.push("-v"); } let destArg = dest; @@ -81106,7 +81106,7 @@ var require_tool_cache = __commonJS({ args = [flags]; } args.push("-x", "-C", dest, "-f", file); - if (core19.isDebug()) { + if (core18.isDebug()) { args.push("-v"); } const xarPath = yield io7.which("xar", true); @@ -81151,7 +81151,7 @@ var require_tool_cache = __commonJS({ "-Command", pwshCommand ]; - core19.debug(`Using pwsh at path: ${pwshPath}`); + core18.debug(`Using pwsh at path: ${pwshPath}`); yield (0, exec_1.exec)(`"${pwshPath}"`, args); } else { const powershellCommand = [ @@ -81171,7 +81171,7 @@ var require_tool_cache = __commonJS({ powershellCommand ]; const powershellPath = yield io7.which("powershell", true); - core19.debug(`Using powershell at path: ${powershellPath}`); + core18.debug(`Using powershell at path: ${powershellPath}`); yield (0, exec_1.exec)(`"${powershellPath}"`, args); } }); @@ -81180,7 +81180,7 @@ var require_tool_cache = __commonJS({ return __awaiter4(this, void 0, void 0, function* () { const unzipPath = yield io7.which("unzip", true); const args = [file]; - if (!core19.isDebug()) { + if (!core18.isDebug()) { args.unshift("-q"); } args.unshift("-o"); @@ -81191,8 +81191,8 @@ var require_tool_cache = __commonJS({ return __awaiter4(this, void 0, void 0, function* () { version = semver8.clean(version) || version; arch2 = arch2 || os3.arch(); - core19.debug(`Caching tool ${tool} ${version} ${arch2}`); - core19.debug(`source dir: ${sourceDir}`); + core18.debug(`Caching tool ${tool} ${version} ${arch2}`); + core18.debug(`source dir: ${sourceDir}`); if (!fs20.statSync(sourceDir).isDirectory()) { throw new Error("sourceDir is not a directory"); } @@ -81210,14 +81210,14 @@ var require_tool_cache = __commonJS({ return __awaiter4(this, void 0, void 0, function* () { version = semver8.clean(version) || version; arch2 = arch2 || os3.arch(); - core19.debug(`Caching tool ${tool} ${version} ${arch2}`); - core19.debug(`source file: ${sourceFile}`); + core18.debug(`Caching tool ${tool} ${version} ${arch2}`); + core18.debug(`source file: ${sourceFile}`); if (!fs20.statSync(sourceFile).isFile()) { throw new Error("sourceFile is not a file"); } const destFolder = yield _createToolPath(tool, version, arch2); const destPath = path19.join(destFolder, targetFile); - core19.debug(`destination file ${destPath}`); + core18.debug(`destination file ${destPath}`); yield io7.cp(sourceFile, destPath); _completeToolPath(tool, version, arch2); return destFolder; @@ -81241,12 +81241,12 @@ var require_tool_cache = __commonJS({ if (versionSpec) { versionSpec = semver8.clean(versionSpec) || ""; const cachePath = path19.join(_getCacheDirectory(), toolName, versionSpec, arch2); - core19.debug(`checking cache: ${cachePath}`); + core18.debug(`checking cache: ${cachePath}`); if (fs20.existsSync(cachePath) && fs20.existsSync(`${cachePath}.complete`)) { - core19.debug(`Found tool in cache ${toolName} ${versionSpec} ${arch2}`); + core18.debug(`Found tool in cache ${toolName} ${versionSpec} ${arch2}`); toolPath = cachePath; } else { - core19.debug("not found"); + core18.debug("not found"); } } return toolPath; @@ -81277,7 +81277,7 @@ var require_tool_cache = __commonJS({ const http = new httpm.HttpClient("tool-cache"); const headers = {}; if (auth) { - core19.debug("set auth"); + core18.debug("set auth"); headers.authorization = auth; } const response = yield http.getJson(treeUrl, headers); @@ -81298,7 +81298,7 @@ var require_tool_cache = __commonJS({ try { releases = JSON.parse(versionsRaw); } catch (_a) { - core19.debug("Invalid json"); + core18.debug("Invalid json"); } } return releases; @@ -81324,7 +81324,7 @@ var require_tool_cache = __commonJS({ function _createToolPath(tool, version, arch2) { return __awaiter4(this, void 0, void 0, function* () { const folderPath = path19.join(_getCacheDirectory(), tool, semver8.clean(version) || version, arch2 || ""); - core19.debug(`destination ${folderPath}`); + core18.debug(`destination ${folderPath}`); const markerPath = `${folderPath}.complete`; yield io7.rmRF(folderPath); yield io7.rmRF(markerPath); @@ -81336,19 +81336,19 @@ var require_tool_cache = __commonJS({ const folderPath = path19.join(_getCacheDirectory(), tool, semver8.clean(version) || version, arch2 || ""); const markerPath = `${folderPath}.complete`; fs20.writeFileSync(markerPath, ""); - core19.debug("finished caching tool"); + core18.debug("finished caching tool"); } function isExplicitVersion(versionSpec) { const c = semver8.clean(versionSpec) || ""; - core19.debug(`isExplicit: ${c}`); + core18.debug(`isExplicit: ${c}`); const valid3 = semver8.valid(c) != null; - core19.debug(`explicit? ${valid3}`); + core18.debug(`explicit? ${valid3}`); return valid3; } exports2.isExplicitVersion = isExplicitVersion; function evaluateVersions(versions, versionSpec) { let version = ""; - core19.debug(`evaluating ${versions.length} versions`); + core18.debug(`evaluating ${versions.length} versions`); versions = versions.sort((a, b) => { if (semver8.gt(a, b)) { return 1; @@ -81364,9 +81364,9 @@ var require_tool_cache = __commonJS({ } } if (version) { - core19.debug(`matched: ${version}`); + core18.debug(`matched: ${version}`); } else { - core19.debug("match not found"); + core18.debug("match not found"); } return version; } @@ -84040,14 +84040,14 @@ var require_retention = __commonJS({ Object.defineProperty(exports2, "__esModule", { value: true }); exports2.getExpiration = void 0; var generated_1 = require_generated(); - var core19 = __importStar4(require_core()); + var core18 = __importStar4(require_core()); function getExpiration(retentionDays) { if (!retentionDays) { return void 0; } const maxRetentionDays = getRetentionDays(); if (maxRetentionDays && maxRetentionDays < retentionDays) { - core19.warning(`Retention days cannot be greater than the maximum allowed retention set within the repository. Using ${maxRetentionDays} instead.`); + core18.warning(`Retention days cannot be greater than the maximum allowed retention set within the repository. Using ${maxRetentionDays} instead.`); retentionDays = maxRetentionDays; } const expirationDate = /* @__PURE__ */ new Date(); @@ -84639,7 +84639,7 @@ var require_util11 = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.getBackendIdsFromToken = void 0; - var core19 = __importStar4(require_core()); + var core18 = __importStar4(require_core()); var config_1 = require_config2(); var jwt_decode_1 = __importDefault4(require_jwt_decode_cjs()); var InvalidJwtError = new Error("Failed to get backend IDs: The provided JWT token is invalid and/or missing claims"); @@ -84665,8 +84665,8 @@ var require_util11 = __commonJS({ workflowRunBackendId: scopeParts[1], workflowJobRunBackendId: scopeParts[2] }; - core19.debug(`Workflow Run Backend ID: ${ids.workflowRunBackendId}`); - core19.debug(`Workflow Job Run Backend ID: ${ids.workflowJobRunBackendId}`); + core18.debug(`Workflow Run Backend ID: ${ids.workflowRunBackendId}`); + core18.debug(`Workflow Job Run Backend ID: ${ids.workflowJobRunBackendId}`); return ids; } throw InvalidJwtError; @@ -84737,7 +84737,7 @@ var require_blob_upload = __commonJS({ exports2.uploadZipToBlobStorage = void 0; var storage_blob_1 = require_dist7(); var config_1 = require_config2(); - var core19 = __importStar4(require_core()); + var core18 = __importStar4(require_core()); var crypto = __importStar4(require("crypto")); var stream2 = __importStar4(require("stream")); var errors_1 = require_errors3(); @@ -84763,9 +84763,9 @@ var require_blob_upload = __commonJS({ const bufferSize = (0, config_1.getUploadChunkSize)(); const blobClient = new storage_blob_1.BlobClient(authenticatedUploadURL); const blockBlobClient = blobClient.getBlockBlobClient(); - core19.debug(`Uploading artifact zip to blob storage with maxConcurrency: ${maxConcurrency}, bufferSize: ${bufferSize}`); + core18.debug(`Uploading artifact zip to blob storage with maxConcurrency: ${maxConcurrency}, bufferSize: ${bufferSize}`); const uploadCallback = (progress) => { - core19.info(`Uploaded bytes ${progress.loadedBytes}`); + core18.info(`Uploaded bytes ${progress.loadedBytes}`); uploadByteCount = progress.loadedBytes; lastProgressTime = Date.now(); }; @@ -84779,7 +84779,7 @@ var require_blob_upload = __commonJS({ const hashStream = crypto.createHash("sha256"); zipUploadStream.pipe(uploadStream); zipUploadStream.pipe(hashStream).setEncoding("hex"); - core19.info("Beginning upload of artifact content to blob storage"); + core18.info("Beginning upload of artifact content to blob storage"); try { yield Promise.race([ blockBlobClient.uploadStream(uploadStream, bufferSize, maxConcurrency, options), @@ -84793,12 +84793,12 @@ var require_blob_upload = __commonJS({ } finally { abortController.abort(); } - core19.info("Finished uploading artifact content to blob storage!"); + core18.info("Finished uploading artifact content to blob storage!"); hashStream.end(); sha256Hash = hashStream.read(); - core19.info(`SHA256 hash of uploaded artifact zip is ${sha256Hash}`); + core18.info(`SHA256 hash of uploaded artifact zip is ${sha256Hash}`); if (uploadByteCount === 0) { - core19.warning(`No data was uploaded to blob storage. Reported upload byte count is 0.`); + core18.warning(`No data was uploaded to blob storage. Reported upload byte count is 0.`); } return { uploadSize: uploadByteCount, @@ -109676,7 +109676,7 @@ var require_zip2 = __commonJS({ var stream2 = __importStar4(require("stream")); var promises_1 = require("fs/promises"); var archiver2 = __importStar4(require_archiver()); - var core19 = __importStar4(require_core()); + var core18 = __importStar4(require_core()); var config_1 = require_config2(); exports2.DEFAULT_COMPRESSION_LEVEL = 6; var ZipUploadStream = class extends stream2.Transform { @@ -109693,7 +109693,7 @@ var require_zip2 = __commonJS({ exports2.ZipUploadStream = ZipUploadStream; function createZipUploadStream(uploadSpecification, compressionLevel = exports2.DEFAULT_COMPRESSION_LEVEL) { return __awaiter4(this, void 0, void 0, function* () { - core19.debug(`Creating Artifact archive with compressionLevel: ${compressionLevel}`); + core18.debug(`Creating Artifact archive with compressionLevel: ${compressionLevel}`); const zip = archiver2.create("zip", { highWaterMark: (0, config_1.getUploadChunkSize)(), zlib: { level: compressionLevel } @@ -109717,8 +109717,8 @@ var require_zip2 = __commonJS({ } const bufferSize = (0, config_1.getUploadChunkSize)(); const zipUploadStream = new ZipUploadStream(bufferSize); - core19.debug(`Zip write high watermark value ${zipUploadStream.writableHighWaterMark}`); - core19.debug(`Zip read high watermark value ${zipUploadStream.readableHighWaterMark}`); + core18.debug(`Zip write high watermark value ${zipUploadStream.writableHighWaterMark}`); + core18.debug(`Zip read high watermark value ${zipUploadStream.readableHighWaterMark}`); zip.pipe(zipUploadStream); zip.finalize(); return zipUploadStream; @@ -109726,24 +109726,24 @@ var require_zip2 = __commonJS({ } exports2.createZipUploadStream = createZipUploadStream; var zipErrorCallback = (error2) => { - core19.error("An error has occurred while creating the zip file for upload"); - core19.info(error2); + core18.error("An error has occurred while creating the zip file for upload"); + core18.info(error2); throw new Error("An error has occurred during zip creation for the artifact"); }; var zipWarningCallback = (error2) => { if (error2.code === "ENOENT") { - core19.warning("ENOENT warning during artifact zip creation. No such file or directory"); - core19.info(error2); + core18.warning("ENOENT warning during artifact zip creation. No such file or directory"); + core18.info(error2); } else { - core19.warning(`A non-blocking warning has occurred during artifact zip creation: ${error2.code}`); - core19.info(error2); + core18.warning(`A non-blocking warning has occurred during artifact zip creation: ${error2.code}`); + core18.info(error2); } }; var zipFinishCallback = () => { - core19.debug("Zip stream for upload has finished."); + core18.debug("Zip stream for upload has finished."); }; var zipEndCallback = () => { - core19.debug("Zip stream for upload has ended."); + core18.debug("Zip stream for upload has ended."); }; } }); @@ -109808,7 +109808,7 @@ var require_upload_artifact = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.uploadArtifact = void 0; - var core19 = __importStar4(require_core()); + var core18 = __importStar4(require_core()); var retention_1 = require_retention(); var path_and_artifact_name_validation_1 = require_path_and_artifact_name_validation(); var artifact_twirp_client_1 = require_artifact_twirp_client2(); @@ -109855,13 +109855,13 @@ var require_upload_artifact = __commonJS({ value: `sha256:${uploadResult.sha256Hash}` }); } - core19.info(`Finalizing artifact upload`); + core18.info(`Finalizing artifact upload`); const finalizeArtifactResp = yield artifactClient.FinalizeArtifact(finalizeArtifactReq); if (!finalizeArtifactResp.ok) { throw new errors_1.InvalidResponseError("FinalizeArtifact: response from backend was not ok"); } const artifactId = BigInt(finalizeArtifactResp.artifactId); - core19.info(`Artifact ${name}.zip successfully finalized. Artifact ID ${artifactId}`); + core18.info(`Artifact ${name}.zip successfully finalized. Artifact ID ${artifactId}`); return { size: uploadResult.uploadSize, digest: uploadResult.sha256Hash, @@ -117009,7 +117009,7 @@ var require_download_artifact = __commonJS({ var crypto = __importStar4(require("crypto")); var stream2 = __importStar4(require("stream")); var github3 = __importStar4(require_github2()); - var core19 = __importStar4(require_core()); + var core18 = __importStar4(require_core()); var httpClient = __importStar4(require_lib()); var unzip_stream_1 = __importDefault4(require_unzip()); var user_agent_1 = require_user_agent2(); @@ -117045,7 +117045,7 @@ var require_download_artifact = __commonJS({ return yield streamExtractExternal(url2, directory); } catch (error2) { retryCount++; - core19.debug(`Failed to download artifact after ${retryCount} retries due to ${error2.message}. Retrying in 5 seconds...`); + core18.debug(`Failed to download artifact after ${retryCount} retries due to ${error2.message}. Retrying in 5 seconds...`); yield new Promise((resolve8) => setTimeout(resolve8, 5e3)); } } @@ -117074,7 +117074,7 @@ var require_download_artifact = __commonJS({ extractStream.on("data", () => { timer.refresh(); }).on("error", (error2) => { - core19.debug(`response.message: Artifact download failed: ${error2.message}`); + core18.debug(`response.message: Artifact download failed: ${error2.message}`); clearTimeout(timer); reject(error2); }).pipe(unzip_stream_1.default.Extract({ path: directory })).on("close", () => { @@ -117082,7 +117082,7 @@ var require_download_artifact = __commonJS({ if (hashStream) { hashStream.end(); sha256Digest = hashStream.read(); - core19.info(`SHA256 digest of downloaded artifact is ${sha256Digest}`); + core18.info(`SHA256 digest of downloaded artifact is ${sha256Digest}`); } resolve8({ sha256Digest: `sha256:${sha256Digest}` }); }).on("error", (error2) => { @@ -117097,7 +117097,7 @@ var require_download_artifact = __commonJS({ const downloadPath = yield resolveOrCreateDirectory(options === null || options === void 0 ? void 0 : options.path); const api = github3.getOctokit(token); let digestMismatch = false; - core19.info(`Downloading artifact '${artifactId}' from '${repositoryOwner}/${repositoryName}'`); + core18.info(`Downloading artifact '${artifactId}' from '${repositoryOwner}/${repositoryName}'`); const { headers, status } = yield api.rest.actions.downloadArtifact({ owner: repositoryOwner, repo: repositoryName, @@ -117114,16 +117114,16 @@ var require_download_artifact = __commonJS({ if (!location) { throw new Error(`Unable to redirect to artifact download url`); } - core19.info(`Redirecting to blob download url: ${scrubQueryParameters(location)}`); + core18.info(`Redirecting to blob download url: ${scrubQueryParameters(location)}`); try { - core19.info(`Starting download of artifact to: ${downloadPath}`); + core18.info(`Starting download of artifact to: ${downloadPath}`); const extractResponse = yield streamExtract(location, downloadPath); - core19.info(`Artifact download completed successfully.`); + core18.info(`Artifact download completed successfully.`); if (options === null || options === void 0 ? void 0 : options.expectedHash) { if ((options === null || options === void 0 ? void 0 : options.expectedHash) !== extractResponse.sha256Digest) { digestMismatch = true; - core19.debug(`Computed digest: ${extractResponse.sha256Digest}`); - core19.debug(`Expected digest: ${options.expectedHash}`); + core18.debug(`Computed digest: ${extractResponse.sha256Digest}`); + core18.debug(`Expected digest: ${options.expectedHash}`); } } } catch (error2) { @@ -117150,7 +117150,7 @@ var require_download_artifact = __commonJS({ Are you trying to download from a different run? Try specifying a github-token with \`actions:read\` scope.`); } if (artifacts.length > 1) { - core19.warning("Multiple artifacts found, defaulting to first."); + core18.warning("Multiple artifacts found, defaulting to first."); } const signedReq = { workflowRunBackendId: artifacts[0].workflowRunBackendId, @@ -117158,16 +117158,16 @@ Are you trying to download from a different run? Try specifying a github-token w name: artifacts[0].name }; const { signedUrl } = yield artifactClient.GetSignedArtifactURL(signedReq); - core19.info(`Redirecting to blob download url: ${scrubQueryParameters(signedUrl)}`); + core18.info(`Redirecting to blob download url: ${scrubQueryParameters(signedUrl)}`); try { - core19.info(`Starting download of artifact to: ${downloadPath}`); + core18.info(`Starting download of artifact to: ${downloadPath}`); const extractResponse = yield streamExtract(signedUrl, downloadPath); - core19.info(`Artifact download completed successfully.`); + core18.info(`Artifact download completed successfully.`); if (options === null || options === void 0 ? void 0 : options.expectedHash) { if ((options === null || options === void 0 ? void 0 : options.expectedHash) !== extractResponse.sha256Digest) { digestMismatch = true; - core19.debug(`Computed digest: ${extractResponse.sha256Digest}`); - core19.debug(`Expected digest: ${options.expectedHash}`); + core18.debug(`Computed digest: ${extractResponse.sha256Digest}`); + core18.debug(`Expected digest: ${options.expectedHash}`); } } } catch (error2) { @@ -117180,10 +117180,10 @@ Are you trying to download from a different run? Try specifying a github-token w function resolveOrCreateDirectory(downloadPath = (0, config_1.getGitHubWorkspaceDir)()) { return __awaiter4(this, void 0, void 0, function* () { if (!(yield exists(downloadPath))) { - core19.debug(`Artifact destination folder does not exist, creating: ${downloadPath}`); + core18.debug(`Artifact destination folder does not exist, creating: ${downloadPath}`); yield promises_1.default.mkdir(downloadPath, { recursive: true }); } else { - core19.debug(`Artifact destination folder already exists: ${downloadPath}`); + core18.debug(`Artifact destination folder already exists: ${downloadPath}`); } return downloadPath; }); @@ -117224,7 +117224,7 @@ var require_retry_options = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.getRetryOptions = void 0; - var core19 = __importStar4(require_core()); + var core18 = __importStar4(require_core()); var defaultMaxRetryNumber = 5; var defaultExemptStatusCodes = [400, 401, 403, 404, 422]; function getRetryOptions(defaultOptions, retries = defaultMaxRetryNumber, exemptStatusCodes = defaultExemptStatusCodes) { @@ -117239,7 +117239,7 @@ var require_retry_options = __commonJS({ retryOptions.doNotRetry = exemptStatusCodes; } const requestOptions = Object.assign(Object.assign({}, defaultOptions.request), { retries }); - core19.debug(`GitHub client configured with: (retries: ${requestOptions.retries}, retry-exempt-status-code: ${(_a = retryOptions.doNotRetry) !== null && _a !== void 0 ? _a : "octokit default: [400, 401, 403, 404, 422]"})`); + core18.debug(`GitHub client configured with: (retries: ${requestOptions.retries}, retry-exempt-status-code: ${(_a = retryOptions.doNotRetry) !== null && _a !== void 0 ? _a : "octokit default: [400, 401, 403, 404, 422]"})`); return [retryOptions, requestOptions]; } exports2.getRetryOptions = getRetryOptions; @@ -117396,7 +117396,7 @@ var require_get_artifact = __commonJS({ exports2.getArtifactInternal = exports2.getArtifactPublic = void 0; var github_1 = require_github2(); var plugin_retry_1 = require_dist_node25(); - var core19 = __importStar4(require_core()); + var core18 = __importStar4(require_core()); var utils_1 = require_utils13(); var retry_options_1 = require_retry_options(); var plugin_request_log_1 = require_dist_node24(); @@ -117434,7 +117434,7 @@ var require_get_artifact = __commonJS({ let artifact2 = getArtifactResp.data.artifacts[0]; if (getArtifactResp.data.artifacts.length > 1) { artifact2 = getArtifactResp.data.artifacts.sort((a, b) => b.id - a.id)[0]; - core19.debug(`More than one artifact found for a single name, returning newest (id: ${artifact2.id})`); + core18.debug(`More than one artifact found for a single name, returning newest (id: ${artifact2.id})`); } return { artifact: { @@ -117467,7 +117467,7 @@ var require_get_artifact = __commonJS({ let artifact2 = res.artifacts[0]; if (res.artifacts.length > 1) { artifact2 = res.artifacts.sort((a, b) => Number(b.databaseId) - Number(a.databaseId))[0]; - core19.debug(`More than one artifact found for a single name, returning newest (id: ${artifact2.databaseId})`); + core18.debug(`More than one artifact found for a single name, returning newest (id: ${artifact2.databaseId})`); } return { artifact: { @@ -119415,7 +119415,7 @@ var require_requestUtils2 = __commonJS({ Object.defineProperty(exports2, "__esModule", { value: true }); exports2.retryHttpClientRequest = exports2.retry = void 0; var utils_1 = require_utils14(); - var core19 = __importStar4(require_core()); + var core18 = __importStar4(require_core()); var config_variables_1 = require_config_variables(); function retry3(name, operation, customErrorMessages, maxAttempts) { return __awaiter4(this, void 0, void 0, function* () { @@ -119442,13 +119442,13 @@ var require_requestUtils2 = __commonJS({ errorMessage = error2.message; } if (!isRetryable) { - core19.info(`${name} - Error is not retryable`); + core18.info(`${name} - Error is not retryable`); if (response) { (0, utils_1.displayHttpDiagnostics)(response); } break; } - core19.info(`${name} - Attempt ${attempt} of ${maxAttempts} failed with error: ${errorMessage}`); + core18.info(`${name} - Attempt ${attempt} of ${maxAttempts} failed with error: ${errorMessage}`); yield (0, utils_1.sleep)((0, utils_1.getExponentialRetryTimeInMilliseconds)(attempt)); attempt++; } @@ -119532,7 +119532,7 @@ var require_upload_http_client = __commonJS({ Object.defineProperty(exports2, "__esModule", { value: true }); exports2.UploadHttpClient = void 0; var fs20 = __importStar4(require("fs")); - var core19 = __importStar4(require_core()); + var core18 = __importStar4(require_core()); var tmp = __importStar4(require_tmp_promise()); var stream2 = __importStar4(require("stream")); var utils_1 = require_utils14(); @@ -119597,7 +119597,7 @@ var require_upload_http_client = __commonJS({ return __awaiter4(this, void 0, void 0, function* () { const FILE_CONCURRENCY = (0, config_variables_1.getUploadFileConcurrency)(); const MAX_CHUNK_SIZE = (0, config_variables_1.getUploadChunkSize)(); - core19.debug(`File Concurrency: ${FILE_CONCURRENCY}, and Chunk Size: ${MAX_CHUNK_SIZE}`); + core18.debug(`File Concurrency: ${FILE_CONCURRENCY}, and Chunk Size: ${MAX_CHUNK_SIZE}`); const parameters = []; let continueOnError = true; if (options) { @@ -119634,15 +119634,15 @@ var require_upload_http_client = __commonJS({ } const startTime = perf_hooks_1.performance.now(); const uploadFileResult = yield this.uploadFileAsync(index, currentFileParameters); - if (core19.isDebug()) { - core19.debug(`File: ${++completedFiles}/${filesToUpload.length}. ${currentFileParameters.file} took ${(perf_hooks_1.performance.now() - startTime).toFixed(3)} milliseconds to finish upload`); + if (core18.isDebug()) { + core18.debug(`File: ${++completedFiles}/${filesToUpload.length}. ${currentFileParameters.file} took ${(perf_hooks_1.performance.now() - startTime).toFixed(3)} milliseconds to finish upload`); } uploadFileSize += uploadFileResult.successfulUploadSize; totalFileSize += uploadFileResult.totalSize; if (uploadFileResult.isSuccess === false) { failedItemsToReport.push(currentFileParameters.file); if (!continueOnError) { - core19.error(`aborting artifact upload`); + core18.error(`aborting artifact upload`); abortPendingFileUploads = true; } } @@ -119651,7 +119651,7 @@ var require_upload_http_client = __commonJS({ }))); this.statusReporter.stop(); this.uploadHttpManager.disposeAndReplaceAllClients(); - core19.info(`Total size of all the files uploaded is ${uploadFileSize} bytes`); + core18.info(`Total size of all the files uploaded is ${uploadFileSize} bytes`); return { uploadSize: uploadFileSize, totalSize: totalFileSize, @@ -119677,16 +119677,16 @@ var require_upload_http_client = __commonJS({ let uploadFileSize = 0; let isGzip = true; if (!isFIFO && totalFileSize < 65536) { - core19.debug(`${parameters.file} is less than 64k in size. Creating a gzip file in-memory to potentially reduce the upload size`); + core18.debug(`${parameters.file} is less than 64k in size. Creating a gzip file in-memory to potentially reduce the upload size`); const buffer = yield (0, upload_gzip_1.createGZipFileInBuffer)(parameters.file); let openUploadStream; if (totalFileSize < buffer.byteLength) { - core19.debug(`The gzip file created for ${parameters.file} did not help with reducing the size of the file. The original file will be uploaded as-is`); + core18.debug(`The gzip file created for ${parameters.file} did not help with reducing the size of the file. The original file will be uploaded as-is`); openUploadStream = () => fs20.createReadStream(parameters.file); isGzip = false; uploadFileSize = totalFileSize; } else { - core19.debug(`A gzip file created for ${parameters.file} helped with reducing the size of the original file. The file will be uploaded using gzip.`); + core18.debug(`A gzip file created for ${parameters.file} helped with reducing the size of the original file. The file will be uploaded using gzip.`); openUploadStream = () => { const passThrough = new stream2.PassThrough(); passThrough.end(buffer); @@ -119698,7 +119698,7 @@ var require_upload_http_client = __commonJS({ if (!result) { isUploadSuccessful = false; failedChunkSizes += uploadFileSize; - core19.warning(`Aborting upload for ${parameters.file} due to failure`); + core18.warning(`Aborting upload for ${parameters.file} due to failure`); } return { isSuccess: isUploadSuccessful, @@ -119707,16 +119707,16 @@ var require_upload_http_client = __commonJS({ }; } else { const tempFile = yield tmp.file(); - core19.debug(`${parameters.file} is greater than 64k in size. Creating a gzip file on-disk ${tempFile.path} to potentially reduce the upload size`); + core18.debug(`${parameters.file} is greater than 64k in size. Creating a gzip file on-disk ${tempFile.path} to potentially reduce the upload size`); uploadFileSize = yield (0, upload_gzip_1.createGZipFileOnDisk)(parameters.file, tempFile.path); let uploadFilePath = tempFile.path; if (!isFIFO && totalFileSize < uploadFileSize) { - core19.debug(`The gzip file created for ${parameters.file} did not help with reducing the size of the file. The original file will be uploaded as-is`); + core18.debug(`The gzip file created for ${parameters.file} did not help with reducing the size of the file. The original file will be uploaded as-is`); uploadFileSize = totalFileSize; uploadFilePath = parameters.file; isGzip = false; } else { - core19.debug(`The gzip file created for ${parameters.file} is smaller than the original file. The file will be uploaded using gzip.`); + core18.debug(`The gzip file created for ${parameters.file} is smaller than the original file. The file will be uploaded using gzip.`); } let abortFileUpload = false; while (offset < uploadFileSize) { @@ -119736,7 +119736,7 @@ var require_upload_http_client = __commonJS({ if (!result) { isUploadSuccessful = false; failedChunkSizes += chunkSize; - core19.warning(`Aborting upload for ${parameters.file} due to failure`); + core18.warning(`Aborting upload for ${parameters.file} due to failure`); abortFileUpload = true; } else { if (uploadFileSize > 8388608) { @@ -119744,7 +119744,7 @@ var require_upload_http_client = __commonJS({ } } } - core19.debug(`deleting temporary gzip file ${tempFile.path}`); + core18.debug(`deleting temporary gzip file ${tempFile.path}`); yield tempFile.cleanup(); return { isSuccess: isUploadSuccessful, @@ -119783,7 +119783,7 @@ var require_upload_http_client = __commonJS({ if (response) { (0, utils_1.displayHttpDiagnostics)(response); } - core19.info(`Retry limit has been reached for chunk at offset ${start} to ${resourceUrl}`); + core18.info(`Retry limit has been reached for chunk at offset ${start} to ${resourceUrl}`); return true; } return false; @@ -119791,14 +119791,14 @@ var require_upload_http_client = __commonJS({ const backOff = (retryAfterValue) => __awaiter4(this, void 0, void 0, function* () { this.uploadHttpManager.disposeAndReplaceClient(httpClientIndex); if (retryAfterValue) { - core19.info(`Backoff due to too many requests, retry #${retryCount}. Waiting for ${retryAfterValue} milliseconds before continuing the upload`); + core18.info(`Backoff due to too many requests, retry #${retryCount}. Waiting for ${retryAfterValue} milliseconds before continuing the upload`); yield (0, utils_1.sleep)(retryAfterValue); } else { const backoffTime = (0, utils_1.getExponentialRetryTimeInMilliseconds)(retryCount); - core19.info(`Exponential backoff for retry #${retryCount}. Waiting for ${backoffTime} milliseconds before continuing the upload at offset ${start}`); + core18.info(`Exponential backoff for retry #${retryCount}. Waiting for ${backoffTime} milliseconds before continuing the upload at offset ${start}`); yield (0, utils_1.sleep)(backoffTime); } - core19.info(`Finished backoff for retry #${retryCount}, continuing with upload`); + core18.info(`Finished backoff for retry #${retryCount}, continuing with upload`); return; }); while (retryCount <= retryLimit) { @@ -119806,7 +119806,7 @@ var require_upload_http_client = __commonJS({ try { response = yield uploadChunkRequest(); } catch (error2) { - core19.info(`An error has been caught http-client index ${httpClientIndex}, retrying the upload`); + core18.info(`An error has been caught http-client index ${httpClientIndex}, retrying the upload`); console.log(error2); if (incrementAndCheckRetryLimit()) { return false; @@ -119818,13 +119818,13 @@ var require_upload_http_client = __commonJS({ if ((0, utils_1.isSuccessStatusCode)(response.message.statusCode)) { return true; } else if ((0, utils_1.isRetryableStatusCode)(response.message.statusCode)) { - core19.info(`A ${response.message.statusCode} status code has been received, will attempt to retry the upload`); + core18.info(`A ${response.message.statusCode} status code has been received, will attempt to retry the upload`); if (incrementAndCheckRetryLimit(response)) { return false; } (0, utils_1.isThrottledStatusCode)(response.message.statusCode) ? yield backOff((0, utils_1.tryGetRetryAfterValueTimeInMilliseconds)(response.message.headers)) : yield backOff(); } else { - core19.error(`Unexpected response. Unable to upload chunk to ${resourceUrl}`); + core18.error(`Unexpected response. Unable to upload chunk to ${resourceUrl}`); (0, utils_1.displayHttpDiagnostics)(response); return false; } @@ -119842,7 +119842,7 @@ var require_upload_http_client = __commonJS({ resourceUrl.searchParams.append("artifactName", artifactName); const parameters = { Size: size }; const data = JSON.stringify(parameters, null, 2); - core19.debug(`URL is ${resourceUrl.toString()}`); + core18.debug(`URL is ${resourceUrl.toString()}`); const client = this.uploadHttpManager.getClient(0); const headers = (0, utils_1.getUploadHeaders)("application/json", false); const customErrorMessages = /* @__PURE__ */ new Map([ @@ -119855,7 +119855,7 @@ var require_upload_http_client = __commonJS({ return client.patch(resourceUrl.toString(), data, headers); }), customErrorMessages); yield response.readBody(); - core19.debug(`Artifact ${artifactName} has been successfully uploaded, total size in bytes: ${size}`); + core18.debug(`Artifact ${artifactName} has been successfully uploaded, total size in bytes: ${size}`); }); } }; @@ -119924,7 +119924,7 @@ var require_download_http_client = __commonJS({ Object.defineProperty(exports2, "__esModule", { value: true }); exports2.DownloadHttpClient = void 0; var fs20 = __importStar4(require("fs")); - var core19 = __importStar4(require_core()); + var core18 = __importStar4(require_core()); var zlib3 = __importStar4(require("zlib")); var utils_1 = require_utils14(); var url_1 = require("url"); @@ -119978,11 +119978,11 @@ var require_download_http_client = __commonJS({ downloadSingleArtifact(downloadItems) { return __awaiter4(this, void 0, void 0, function* () { const DOWNLOAD_CONCURRENCY = (0, config_variables_1.getDownloadFileConcurrency)(); - core19.debug(`Download file concurrency is set to ${DOWNLOAD_CONCURRENCY}`); + core18.debug(`Download file concurrency is set to ${DOWNLOAD_CONCURRENCY}`); const parallelDownloads = [...new Array(DOWNLOAD_CONCURRENCY).keys()]; let currentFile = 0; let downloadedFiles = 0; - core19.info(`Total number of files that will be downloaded: ${downloadItems.length}`); + core18.info(`Total number of files that will be downloaded: ${downloadItems.length}`); this.statusReporter.setTotalNumberOfFilesToProcess(downloadItems.length); this.statusReporter.start(); yield Promise.all(parallelDownloads.map((index) => __awaiter4(this, void 0, void 0, function* () { @@ -119991,8 +119991,8 @@ var require_download_http_client = __commonJS({ currentFile += 1; const startTime = perf_hooks_1.performance.now(); yield this.downloadIndividualFile(index, currentFileToDownload.sourceLocation, currentFileToDownload.targetPath); - if (core19.isDebug()) { - core19.debug(`File: ${++downloadedFiles}/${downloadItems.length}. ${currentFileToDownload.targetPath} took ${(perf_hooks_1.performance.now() - startTime).toFixed(3)} milliseconds to finish downloading`); + if (core18.isDebug()) { + core18.debug(`File: ${++downloadedFiles}/${downloadItems.length}. ${currentFileToDownload.targetPath} took ${(perf_hooks_1.performance.now() - startTime).toFixed(3)} milliseconds to finish downloading`); } this.statusReporter.incrementProcessedCount(); } @@ -120030,19 +120030,19 @@ var require_download_http_client = __commonJS({ } else { this.downloadHttpManager.disposeAndReplaceClient(httpClientIndex); if (retryAfterValue) { - core19.info(`Backoff due to too many requests, retry #${retryCount}. Waiting for ${retryAfterValue} milliseconds before continuing the download`); + core18.info(`Backoff due to too many requests, retry #${retryCount}. Waiting for ${retryAfterValue} milliseconds before continuing the download`); yield (0, utils_1.sleep)(retryAfterValue); } else { const backoffTime = (0, utils_1.getExponentialRetryTimeInMilliseconds)(retryCount); - core19.info(`Exponential backoff for retry #${retryCount}. Waiting for ${backoffTime} milliseconds before continuing the download`); + core18.info(`Exponential backoff for retry #${retryCount}. Waiting for ${backoffTime} milliseconds before continuing the download`); yield (0, utils_1.sleep)(backoffTime); } - core19.info(`Finished backoff for retry #${retryCount}, continuing with download`); + core18.info(`Finished backoff for retry #${retryCount}, continuing with download`); } }); const isAllBytesReceived = (expected, received) => { if (!expected || !received || process.env["ACTIONS_ARTIFACT_SKIP_DOWNLOAD_VALIDATION"]) { - core19.info("Skipping download validation."); + core18.info("Skipping download validation."); return true; } return parseInt(expected) === received; @@ -120063,7 +120063,7 @@ var require_download_http_client = __commonJS({ try { response = yield makeDownloadRequest(); } catch (error2) { - core19.info("An error occurred while attempting to download a file"); + core18.info("An error occurred while attempting to download a file"); console.log(error2); yield backOff(); continue; @@ -120083,7 +120083,7 @@ var require_download_http_client = __commonJS({ } } if (forceRetry || (0, utils_1.isRetryableStatusCode)(response.message.statusCode)) { - core19.info(`A ${response.message.statusCode} response code has been received while attempting to download an artifact`); + core18.info(`A ${response.message.statusCode} response code has been received while attempting to download an artifact`); resetDestinationStream(downloadPath); (0, utils_1.isThrottledStatusCode)(response.message.statusCode) ? yield backOff((0, utils_1.tryGetRetryAfterValueTimeInMilliseconds)(response.message.headers)) : yield backOff(); } else { @@ -120105,29 +120105,29 @@ var require_download_http_client = __commonJS({ if (isGzip) { const gunzip = zlib3.createGunzip(); response.message.on("error", (error2) => { - core19.info(`An error occurred while attempting to read the response stream`); + core18.info(`An error occurred while attempting to read the response stream`); gunzip.close(); destinationStream.close(); reject(error2); }).pipe(gunzip).on("error", (error2) => { - core19.info(`An error occurred while attempting to decompress the response stream`); + core18.info(`An error occurred while attempting to decompress the response stream`); destinationStream.close(); reject(error2); }).pipe(destinationStream).on("close", () => { resolve8(); }).on("error", (error2) => { - core19.info(`An error occurred while writing a downloaded file to ${destinationStream.path}`); + core18.info(`An error occurred while writing a downloaded file to ${destinationStream.path}`); reject(error2); }); } else { response.message.on("error", (error2) => { - core19.info(`An error occurred while attempting to read the response stream`); + core18.info(`An error occurred while attempting to read the response stream`); destinationStream.close(); reject(error2); }).pipe(destinationStream).on("close", () => { resolve8(); }).on("error", (error2) => { - core19.info(`An error occurred while writing a downloaded file to ${destinationStream.path}`); + core18.info(`An error occurred while writing a downloaded file to ${destinationStream.path}`); reject(error2); }); } @@ -120266,7 +120266,7 @@ var require_artifact_client = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.DefaultArtifactClient = void 0; - var core19 = __importStar4(require_core()); + var core18 = __importStar4(require_core()); var upload_specification_1 = require_upload_specification(); var upload_http_client_1 = require_upload_http_client(); var utils_1 = require_utils14(); @@ -120287,7 +120287,7 @@ var require_artifact_client = __commonJS({ */ uploadArtifact(name, files, rootDirectory, options) { return __awaiter4(this, void 0, void 0, function* () { - core19.info(`Starting artifact upload + core18.info(`Starting artifact upload For more detailed logs during the artifact upload process, enable step-debugging: https://docs.github.com/actions/monitoring-and-troubleshooting-workflows/enabling-debug-logging#enabling-step-debug-logging`); (0, path_and_artifact_name_validation_1.checkArtifactName)(name); const uploadSpecification = (0, upload_specification_1.getUploadSpecification)(name, rootDirectory, files); @@ -120299,24 +120299,24 @@ For more detailed logs during the artifact upload process, enable step-debugging }; const uploadHttpClient = new upload_http_client_1.UploadHttpClient(); if (uploadSpecification.length === 0) { - core19.warning(`No files found that can be uploaded`); + core18.warning(`No files found that can be uploaded`); } else { const response = yield uploadHttpClient.createArtifactInFileContainer(name, options); if (!response.fileContainerResourceUrl) { - core19.debug(response.toString()); + core18.debug(response.toString()); throw new Error("No URL provided by the Artifact Service to upload an artifact to"); } - core19.debug(`Upload Resource URL: ${response.fileContainerResourceUrl}`); - core19.info(`Container for artifact "${name}" successfully created. Starting upload of file(s)`); + core18.debug(`Upload Resource URL: ${response.fileContainerResourceUrl}`); + core18.info(`Container for artifact "${name}" successfully created. Starting upload of file(s)`); const uploadResult = yield uploadHttpClient.uploadArtifactToFileContainer(response.fileContainerResourceUrl, uploadSpecification, options); - core19.info(`File upload process has finished. Finalizing the artifact upload`); + core18.info(`File upload process has finished. Finalizing the artifact upload`); yield uploadHttpClient.patchArtifactSize(uploadResult.totalSize, name); if (uploadResult.failedItems.length > 0) { - core19.info(`Upload finished. There were ${uploadResult.failedItems.length} items that failed to upload`); + core18.info(`Upload finished. There were ${uploadResult.failedItems.length} items that failed to upload`); } else { - core19.info(`Artifact has been finalized. All files have been successfully uploaded!`); + core18.info(`Artifact has been finalized. All files have been successfully uploaded!`); } - core19.info(` + core18.info(` The raw size of all the files that were specified for upload is ${uploadResult.totalSize} bytes The size of all the files that were uploaded is ${uploadResult.uploadSize} bytes. This takes into account any gzip compression used to reduce the upload size, time and storage @@ -120350,10 +120350,10 @@ Note: The size of downloaded zips can differ significantly from the reported siz path19 = (0, path_1.resolve)(path19); const downloadSpecification = (0, download_specification_1.getDownloadSpecification)(name, items.value, path19, (options === null || options === void 0 ? void 0 : options.createArtifactFolder) || false); if (downloadSpecification.filesToDownload.length === 0) { - core19.info(`No downloadable files were found for the artifact: ${artifactToDownload.name}`); + core18.info(`No downloadable files were found for the artifact: ${artifactToDownload.name}`); } else { yield (0, utils_1.createDirectoriesForArtifact)(downloadSpecification.directoryStructure); - core19.info("Directory structure has been set up for the artifact"); + core18.info("Directory structure has been set up for the artifact"); yield (0, utils_1.createEmptyFilesForArtifact)(downloadSpecification.emptyFilesToCreate); yield downloadHttpClient.downloadSingleArtifact(downloadSpecification.filesToDownload); } @@ -120369,7 +120369,7 @@ Note: The size of downloaded zips can differ significantly from the reported siz const response = []; const artifacts = yield downloadHttpClient.listArtifacts(); if (artifacts.count === 0) { - core19.info("Unable to find any artifacts for the associated workflow"); + core18.info("Unable to find any artifacts for the associated workflow"); return response; } if (!path19) { @@ -120381,11 +120381,11 @@ Note: The size of downloaded zips can differ significantly from the reported siz while (downloadedArtifacts < artifacts.count) { const currentArtifactToDownload = artifacts.value[downloadedArtifacts]; downloadedArtifacts += 1; - core19.info(`starting download of artifact ${currentArtifactToDownload.name} : ${downloadedArtifacts}/${artifacts.count}`); + core18.info(`starting download of artifact ${currentArtifactToDownload.name} : ${downloadedArtifacts}/${artifacts.count}`); const items = yield downloadHttpClient.getContainerItems(currentArtifactToDownload.name, currentArtifactToDownload.fileContainerResourceUrl); const downloadSpecification = (0, download_specification_1.getDownloadSpecification)(currentArtifactToDownload.name, items.value, path19, true); if (downloadSpecification.filesToDownload.length === 0) { - core19.info(`No downloadable files were found for any artifact ${currentArtifactToDownload.name}`); + core18.info(`No downloadable files were found for any artifact ${currentArtifactToDownload.name}`); } else { yield (0, utils_1.createDirectoriesForArtifact)(downloadSpecification.directoryStructure); yield (0, utils_1.createEmptyFilesForArtifact)(downloadSpecification.emptyFilesToCreate); @@ -120451,7 +120451,7 @@ var require_internal_glob_options_helper2 = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.getOptions = void 0; - var core19 = __importStar4(require_core()); + var core18 = __importStar4(require_core()); function getOptions(copy) { const result = { followSymbolicLinks: true, @@ -120463,23 +120463,23 @@ var require_internal_glob_options_helper2 = __commonJS({ if (copy) { if (typeof copy.followSymbolicLinks === "boolean") { result.followSymbolicLinks = copy.followSymbolicLinks; - core19.debug(`followSymbolicLinks '${result.followSymbolicLinks}'`); + core18.debug(`followSymbolicLinks '${result.followSymbolicLinks}'`); } if (typeof copy.implicitDescendants === "boolean") { result.implicitDescendants = copy.implicitDescendants; - core19.debug(`implicitDescendants '${result.implicitDescendants}'`); + core18.debug(`implicitDescendants '${result.implicitDescendants}'`); } if (typeof copy.matchDirectories === "boolean") { result.matchDirectories = copy.matchDirectories; - core19.debug(`matchDirectories '${result.matchDirectories}'`); + core18.debug(`matchDirectories '${result.matchDirectories}'`); } if (typeof copy.omitBrokenSymbolicLinks === "boolean") { result.omitBrokenSymbolicLinks = copy.omitBrokenSymbolicLinks; - core19.debug(`omitBrokenSymbolicLinks '${result.omitBrokenSymbolicLinks}'`); + core18.debug(`omitBrokenSymbolicLinks '${result.omitBrokenSymbolicLinks}'`); } if (typeof copy.excludeHiddenFiles === "boolean") { result.excludeHiddenFiles = copy.excludeHiddenFiles; - core19.debug(`excludeHiddenFiles '${result.excludeHiddenFiles}'`); + core18.debug(`excludeHiddenFiles '${result.excludeHiddenFiles}'`); } } return result; @@ -121163,7 +121163,7 @@ var require_internal_globber2 = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.DefaultGlobber = void 0; - var core19 = __importStar4(require_core()); + var core18 = __importStar4(require_core()); var fs20 = __importStar4(require("fs")); var globOptionsHelper = __importStar4(require_internal_glob_options_helper2()); var path19 = __importStar4(require("path")); @@ -121216,7 +121216,7 @@ var require_internal_globber2 = __commonJS({ } const stack = []; for (const searchPath of patternHelper.getSearchPaths(patterns)) { - core19.debug(`Search path '${searchPath}'`); + core18.debug(`Search path '${searchPath}'`); try { yield __await4(fs20.promises.lstat(searchPath)); } catch (err) { @@ -121291,7 +121291,7 @@ var require_internal_globber2 = __commonJS({ } catch (err) { if (err.code === "ENOENT") { if (options.omitBrokenSymbolicLinks) { - core19.debug(`Broken symlink '${item.path}'`); + core18.debug(`Broken symlink '${item.path}'`); return void 0; } throw new Error(`No information found for the path '${item.path}'. This may indicate a broken symbolic link.`); @@ -121307,7 +121307,7 @@ var require_internal_globber2 = __commonJS({ traversalChain.pop(); } if (traversalChain.some((x) => x === realPath)) { - core19.debug(`Symlink cycle detected for path '${item.path}' and realpath '${realPath}'`); + core18.debug(`Symlink cycle detected for path '${item.path}' and realpath '${realPath}'`); return void 0; } traversalChain.push(realPath); @@ -121400,7 +121400,7 @@ var require_internal_hash_files = __commonJS({ Object.defineProperty(exports2, "__esModule", { value: true }); exports2.hashFiles = void 0; var crypto = __importStar4(require("crypto")); - var core19 = __importStar4(require_core()); + var core18 = __importStar4(require_core()); var fs20 = __importStar4(require("fs")); var stream2 = __importStar4(require("stream")); var util = __importStar4(require("util")); @@ -121409,7 +121409,7 @@ var require_internal_hash_files = __commonJS({ var _a, e_1, _b, _c; var _d; return __awaiter4(this, void 0, void 0, function* () { - const writeDelegate = verbose ? core19.info : core19.debug; + const writeDelegate = verbose ? core18.info : core18.debug; let hasMatch = false; const githubWorkspace = currentWorkspace ? currentWorkspace : (_d = process.env["GITHUB_WORKSPACE"]) !== null && _d !== void 0 ? _d : process.cwd(); const result = crypto.createHash("sha256"); @@ -124412,7 +124412,7 @@ var require_sarif_schema_2_1_0 = __commonJS({ }); // src/init-action-post.ts -var core18 = __toESM(require_core()); +var core17 = __toESM(require_core()); // src/actions-util.ts var fs5 = __toESM(require("fs")); @@ -131581,7 +131581,7 @@ async function createDatabaseBundleCli(codeql, config, language) { // src/init-action-post-helper.ts var fs19 = __toESM(require("fs")); -var core17 = __toESM(require_core()); +var core16 = __toESM(require_core()); var github2 = __toESM(require_github()); // src/status-report.ts @@ -131787,11 +131787,11 @@ async function sendStatusReport(statusReport) { } // src/upload-lib.ts -var fs18 = __toESM(require("fs")); -var path18 = __toESM(require("path")); +var fs17 = __toESM(require("fs")); +var path17 = __toESM(require("path")); var url = __toESM(require("url")); -var import_zlib2 = __toESM(require("zlib")); -var core16 = __toESM(require_core()); +var import_zlib = __toESM(require("zlib")); +var core14 = __toESM(require_core()); var jsonschema2 = __toESM(require_lib2()); // src/fingerprints.ts @@ -132917,143 +132917,8 @@ async function addFingerprints(sarif, sourceRoot, logger) { } // src/init.ts -var core15 = __toESM(require_core()); var toolrunner4 = __toESM(require_toolrunner()); var io6 = __toESM(require_io()); - -// src/workflow.ts -var fs17 = __toESM(require("fs")); -var path17 = __toESM(require("path")); -var import_zlib = __toESM(require("zlib")); -var core14 = __toESM(require_core()); -function toCodedErrors(errors) { - return Object.entries(errors).reduce( - (acc, [code, message]) => { - acc[code] = { message, code }; - return acc; - }, - {} - ); -} -var WorkflowErrors = toCodedErrors({ - MissingPushHook: `Please specify an on.push hook to analyze and see code scanning alerts from the default branch on the Security tab.`, - CheckoutWrongHead: `git checkout HEAD^2 is no longer necessary. Please remove this step as Code Scanning recommends analyzing the merge commit for best results.`, - InconsistentActionVersion: `Not all workflow steps that use \`github/codeql-action\` actions use the same version. Please ensure that all such steps use the same version to avoid compatibility issues.` -}); -async function getWorkflow(logger) { - const maybeWorkflow = process.env["CODE_SCANNING_WORKFLOW_FILE"]; - if (maybeWorkflow) { - logger.debug( - "Using the workflow specified by the CODE_SCANNING_WORKFLOW_FILE environment variable." - ); - return load( - import_zlib.default.gunzipSync(Buffer.from(maybeWorkflow, "base64")).toString() - ); - } - const workflowPath = await getWorkflowAbsolutePath(logger); - return load(fs17.readFileSync(workflowPath, "utf-8")); -} -async function getWorkflowAbsolutePath(logger) { - const relativePath = await getWorkflowRelativePath(); - const absolutePath = path17.join( - getRequiredEnvParam("GITHUB_WORKSPACE"), - relativePath - ); - if (fs17.existsSync(absolutePath)) { - logger.debug( - `Derived the following absolute path for the currently executing workflow: ${absolutePath}.` - ); - return absolutePath; - } - throw new Error( - `Expected to find a code scanning workflow file at ${absolutePath}, but no such file existed. This can happen if the currently running workflow checks out a branch that doesn't contain the corresponding workflow file.` - ); -} -function getStepsCallingAction(job, actionName) { - if (job.uses) { - throw new Error( - `Could not get steps calling ${actionName} since the job calls a reusable workflow.` - ); - } - const steps = job.steps; - if (!Array.isArray(steps)) { - throw new Error( - `Could not get steps calling ${actionName} since job.steps was not an array.` - ); - } - return steps.filter((step) => step.uses?.includes(actionName)); -} -function getInputOrThrow(workflow, jobName, actionName, inputName, matrixVars) { - const preamble = `Could not get ${inputName} input to ${actionName} since`; - if (!workflow.jobs) { - throw new Error(`${preamble} the workflow has no jobs.`); - } - if (!workflow.jobs[jobName]) { - throw new Error(`${preamble} the workflow has no job named ${jobName}.`); - } - const stepsCallingAction = getStepsCallingAction( - workflow.jobs[jobName], - actionName - ); - if (stepsCallingAction.length === 0) { - throw new Error( - `${preamble} the ${jobName} job does not call ${actionName}.` - ); - } else if (stepsCallingAction.length > 1) { - throw new Error( - `${preamble} the ${jobName} job calls ${actionName} multiple times.` - ); - } - let input = stepsCallingAction[0].with?.[inputName]?.toString(); - if (input !== void 0 && matrixVars !== void 0) { - input = input.replace(/\${{\s+/, "${{").replace(/\s+}}/, "}}"); - for (const [key, value] of Object.entries(matrixVars)) { - input = input.replace(`\${{matrix.${key}}}`, value); - } - } - if (input?.includes("${{")) { - throw new Error( - `Could not get ${inputName} input to ${actionName} since it contained an unrecognized dynamic value.` - ); - } - return input; -} -function getAnalyzeActionName() { - if (isInTestMode() || getTestingEnvironment() === "codeql-action-pr-checks") { - return "./analyze"; - } else { - return "github/codeql-action/analyze"; - } -} -function getCategoryInputOrThrow(workflow, jobName, matrixVars) { - return getInputOrThrow( - workflow, - jobName, - getAnalyzeActionName(), - "category", - matrixVars - ); -} -function getUploadInputOrThrow(workflow, jobName, matrixVars) { - return getInputOrThrow( - workflow, - jobName, - getAnalyzeActionName(), - "upload", - matrixVars - ); -} -function getCheckoutPathInputOrThrow(workflow, jobName, matrixVars) { - return getInputOrThrow( - workflow, - jobName, - getAnalyzeActionName(), - "checkout_path", - matrixVars - ) || getRequiredEnvParam("GITHUB_WORKSPACE"); -} - -// src/init.ts async function initCodeQL(toolsInput, apiDetails, tempDir, variant, defaultCliVersion, features, logger) { logger.startGroup("Setup CodeQL tools"); const { @@ -133095,7 +132960,7 @@ function combineSarifFiles(sarifFiles, logger) { for (const sarifFile of sarifFiles) { logger.debug(`Loading SARIF file: ${sarifFile}`); const sarifObject = JSON.parse( - fs18.readFileSync(sarifFile, "utf8") + fs17.readFileSync(sarifFile, "utf8") ); if (combinedSarif.version === null) { combinedSarif.version = sarifObject.version; @@ -133167,7 +133032,7 @@ async function shouldDisableCombineSarifFiles(sarifObjects, githubVersion) { async function combineSarifFilesUsingCLI(sarifFiles, gitHubVersion, features, logger) { logger.info("Combining SARIF files using the CodeQL CLI"); const sarifObjects = sarifFiles.map((sarifFile) => { - return JSON.parse(fs18.readFileSync(sarifFile, "utf8")); + return JSON.parse(fs17.readFileSync(sarifFile, "utf8")); }); const deprecationWarningMessage = gitHubVersion.type === 1 /* GHES */ ? "and will be removed in GitHub Enterprise Server 3.18" : "and will be removed in July 2025"; const deprecationMoreInformationMessage = "For more information, see https://github.blog/changelog/2024-05-06-code-scanning-will-stop-combining-runs-from-a-single-upload"; @@ -133183,7 +133048,7 @@ async function combineSarifFilesUsingCLI(sarifFiles, gitHubVersion, features, lo logger.warning( `Uploading multiple SARIF runs with the same category is deprecated ${deprecationWarningMessage}. Please update your workflow to upload a single run per category. ${deprecationMoreInformationMessage}` ); - core16.exportVariable("CODEQL_MERGE_SARIF_DEPRECATION_WARNING", "true"); + core14.exportVariable("CODEQL_MERGE_SARIF_DEPRECATION_WARNING", "true"); } return combineSarifFiles(sarifFiles, logger); } @@ -133220,14 +133085,14 @@ async function combineSarifFilesUsingCLI(sarifFiles, gitHubVersion, features, lo ); codeQL = initCodeQLResult.codeql; } - const baseTempDir = path18.resolve(tempDir, "combined-sarif"); - fs18.mkdirSync(baseTempDir, { recursive: true }); - const outputDirectory = fs18.mkdtempSync(path18.resolve(baseTempDir, "output-")); - const outputFile = path18.resolve(outputDirectory, "combined-sarif.sarif"); + const baseTempDir = path17.resolve(tempDir, "combined-sarif"); + fs17.mkdirSync(baseTempDir, { recursive: true }); + const outputDirectory = fs17.mkdtempSync(path17.resolve(baseTempDir, "output-")); + const outputFile = path17.resolve(outputDirectory, "combined-sarif.sarif"); await codeQL.mergeResults(sarifFiles, outputFile, { mergeRunsFromEqualCategory: true }); - return JSON.parse(fs18.readFileSync(outputFile, "utf8")); + return JSON.parse(fs17.readFileSync(outputFile, "utf8")); } function populateRunAutomationDetails(sarif, category, analysis_key, environment) { const automationID = getAutomationID2(category, analysis_key, environment); @@ -133256,7 +133121,7 @@ function getAutomationID2(category, analysis_key, environment) { async function uploadPayload(payload, repositoryNwo, logger, analysis) { logger.info("Uploading results"); if (shouldSkipSarifUpload()) { - const payloadSaveFile = path18.join( + const payloadSaveFile = path17.join( getTemporaryDirectory(), `payload-${analysis.kind}.json` ); @@ -133264,7 +133129,7 @@ async function uploadPayload(payload, repositoryNwo, logger, analysis) { `SARIF upload disabled by an environment variable. Saving to ${payloadSaveFile}` ); logger.info(`Payload: ${JSON.stringify(payload, null, 2)}`); - fs18.writeFileSync(payloadSaveFile, JSON.stringify(payload, null, 2)); + fs17.writeFileSync(payloadSaveFile, JSON.stringify(payload, null, 2)); return "dummy-sarif-id"; } const client = getApiClient(); @@ -133282,13 +133147,13 @@ async function uploadPayload(payload, repositoryNwo, logger, analysis) { if (httpError !== void 0) { switch (httpError.status) { case 403: - core16.warning(httpError.message || GENERIC_403_MSG); + core14.warning(httpError.message || GENERIC_403_MSG); break; case 404: - core16.warning(httpError.message || GENERIC_404_MSG); + core14.warning(httpError.message || GENERIC_404_MSG); break; default: - core16.warning(httpError.message); + core14.warning(httpError.message); break; } } @@ -133298,12 +133163,12 @@ async function uploadPayload(payload, repositoryNwo, logger, analysis) { function findSarifFilesInDir(sarifPath, isSarif) { const sarifFiles = []; const walkSarifFiles = (dir) => { - const entries = fs18.readdirSync(dir, { withFileTypes: true }); + const entries = fs17.readdirSync(dir, { withFileTypes: true }); for (const entry of entries) { if (entry.isFile() && isSarif(entry.name)) { - sarifFiles.push(path18.resolve(dir, entry.name)); + sarifFiles.push(path17.resolve(dir, entry.name)); } else if (entry.isDirectory()) { - walkSarifFiles(path18.resolve(dir, entry.name)); + walkSarifFiles(path17.resolve(dir, entry.name)); } } }; @@ -133311,11 +133176,11 @@ function findSarifFilesInDir(sarifPath, isSarif) { return sarifFiles; } function getSarifFilePaths(sarifPath, isSarif) { - if (!fs18.existsSync(sarifPath)) { + if (!fs17.existsSync(sarifPath)) { throw new ConfigurationError(`Path does not exist: ${sarifPath}`); } let sarifFiles; - if (fs18.lstatSync(sarifPath).isDirectory()) { + if (fs17.lstatSync(sarifPath).isDirectory()) { sarifFiles = findSarifFilesInDir(sarifPath, isSarif); if (sarifFiles.length === 0) { throw new ConfigurationError( @@ -133345,7 +133210,7 @@ function countResultsInSarif(sarif) { } function readSarifFile(sarifFilePath) { try { - return JSON.parse(fs18.readFileSync(sarifFilePath, "utf8")); + return JSON.parse(fs17.readFileSync(sarifFilePath, "utf8")); } catch (e) { throw new InvalidSarifUploadError( `Invalid SARIF. JSON syntax error: ${getErrorMessage(e)}` @@ -133414,7 +133279,7 @@ function buildPayload(commitOid, ref, analysisKey, analysisName, zippedSarif, wo payloadObj.base_sha = mergeBaseCommitOid; } else if (process.env.GITHUB_EVENT_PATH) { const githubEvent = JSON.parse( - fs18.readFileSync(process.env.GITHUB_EVENT_PATH, "utf8") + fs17.readFileSync(process.env.GITHUB_EVENT_PATH, "utf8") ); payloadObj.base_ref = `refs/heads/${githubEvent.pull_request.base.ref}`; payloadObj.base_sha = githubEvent.pull_request.base.sha; @@ -133495,7 +133360,7 @@ async function uploadPostProcessedFiles(logger, checkoutPath, uploadTarget, post logger.debug(`Serializing SARIF for upload`); const sarifPayload = JSON.stringify(sarif); logger.debug(`Compressing serialized SARIF`); - const zippedSarif = import_zlib2.default.gzipSync(sarifPayload).toString("base64"); + const zippedSarif = import_zlib.default.gzipSync(sarifPayload).toString("base64"); const checkoutURI = url.pathToFileURL(checkoutPath).href; const payload = buildPayload( await getCommitOid(checkoutPath), @@ -133643,7 +133508,7 @@ function validateUniqueCategory(sarif, sentinelPrefix) { `Aborting upload: only one run of the codeql/analyze or codeql/upload-sarif actions is allowed per job per tool/category. The easiest fix is to specify a unique value for the \`category\` input. If .runs[].automationDetails.id is specified in the sarif file, that will take precedence over your configured \`category\`. Category: (${id ? id : "none"}) Tool: (${tool ? tool : "none"})` ); } - core16.exportVariable(sentinelEnvVar, sentinelEnvVar); + core14.exportVariable(sentinelEnvVar, sentinelEnvVar); } } function sanitize(str2) { @@ -133673,7 +133538,7 @@ function filterAlertsByDiffRange(logger, sarif) { if (!locationUri || locationStartLine === void 0) { return false; } - const locationPath = path18.join(checkoutPath, locationUri).replaceAll(path18.sep, "/"); + const locationPath = path17.join(checkoutPath, locationUri).replaceAll(path17.sep, "/"); return diffRanges.some( (range) => range.path === locationPath && (range.startLine <= locationStartLine && range.endLine >= locationStartLine || range.startLine === 0 && range.endLine === 0) ); @@ -133684,6 +133549,138 @@ function filterAlertsByDiffRange(logger, sarif) { return sarif; } +// src/workflow.ts +var fs18 = __toESM(require("fs")); +var path18 = __toESM(require("path")); +var import_zlib2 = __toESM(require("zlib")); +var core15 = __toESM(require_core()); +function toCodedErrors(errors) { + return Object.entries(errors).reduce( + (acc, [code, message]) => { + acc[code] = { message, code }; + return acc; + }, + {} + ); +} +var WorkflowErrors = toCodedErrors({ + MissingPushHook: `Please specify an on.push hook to analyze and see code scanning alerts from the default branch on the Security tab.`, + CheckoutWrongHead: `git checkout HEAD^2 is no longer necessary. Please remove this step as Code Scanning recommends analyzing the merge commit for best results.`, + InconsistentActionVersion: `Not all workflow steps that use \`github/codeql-action\` actions use the same version. Please ensure that all such steps use the same version to avoid compatibility issues.` +}); +async function getWorkflow(logger) { + const maybeWorkflow = process.env["CODE_SCANNING_WORKFLOW_FILE"]; + if (maybeWorkflow) { + logger.debug( + "Using the workflow specified by the CODE_SCANNING_WORKFLOW_FILE environment variable." + ); + return load( + import_zlib2.default.gunzipSync(Buffer.from(maybeWorkflow, "base64")).toString() + ); + } + const workflowPath = await getWorkflowAbsolutePath(logger); + return load(fs18.readFileSync(workflowPath, "utf-8")); +} +async function getWorkflowAbsolutePath(logger) { + const relativePath = await getWorkflowRelativePath(); + const absolutePath = path18.join( + getRequiredEnvParam("GITHUB_WORKSPACE"), + relativePath + ); + if (fs18.existsSync(absolutePath)) { + logger.debug( + `Derived the following absolute path for the currently executing workflow: ${absolutePath}.` + ); + return absolutePath; + } + throw new Error( + `Expected to find a code scanning workflow file at ${absolutePath}, but no such file existed. This can happen if the currently running workflow checks out a branch that doesn't contain the corresponding workflow file.` + ); +} +function getStepsCallingAction(job, actionName) { + if (job.uses) { + throw new Error( + `Could not get steps calling ${actionName} since the job calls a reusable workflow.` + ); + } + const steps = job.steps; + if (!Array.isArray(steps)) { + throw new Error( + `Could not get steps calling ${actionName} since job.steps was not an array.` + ); + } + return steps.filter((step) => step.uses?.includes(actionName)); +} +function getInputOrThrow(workflow, jobName, actionName, inputName, matrixVars) { + const preamble = `Could not get ${inputName} input to ${actionName} since`; + if (!workflow.jobs) { + throw new Error(`${preamble} the workflow has no jobs.`); + } + if (!workflow.jobs[jobName]) { + throw new Error(`${preamble} the workflow has no job named ${jobName}.`); + } + const stepsCallingAction = getStepsCallingAction( + workflow.jobs[jobName], + actionName + ); + if (stepsCallingAction.length === 0) { + throw new Error( + `${preamble} the ${jobName} job does not call ${actionName}.` + ); + } else if (stepsCallingAction.length > 1) { + throw new Error( + `${preamble} the ${jobName} job calls ${actionName} multiple times.` + ); + } + let input = stepsCallingAction[0].with?.[inputName]?.toString(); + if (input !== void 0 && matrixVars !== void 0) { + input = input.replace(/\${{\s+/, "${{").replace(/\s+}}/, "}}"); + for (const [key, value] of Object.entries(matrixVars)) { + input = input.replace(`\${{matrix.${key}}}`, value); + } + } + if (input?.includes("${{")) { + throw new Error( + `Could not get ${inputName} input to ${actionName} since it contained an unrecognized dynamic value.` + ); + } + return input; +} +function getAnalyzeActionName() { + if (isInTestMode() || getTestingEnvironment() === "codeql-action-pr-checks") { + return "./analyze"; + } else { + return "github/codeql-action/analyze"; + } +} +function getCategoryInputOrThrow(workflow, jobName, matrixVars) { + return getInputOrThrow( + workflow, + jobName, + getAnalyzeActionName(), + "category", + matrixVars + ); +} +function getUploadInputOrThrow(workflow, jobName, matrixVars) { + return getInputOrThrow( + workflow, + jobName, + getAnalyzeActionName(), + "upload", + matrixVars + ); +} +function getCheckoutPathInputOrThrow(workflow, jobName, matrixVars) { + return getInputOrThrow( + workflow, + jobName, + getAnalyzeActionName(), + "checkout_path", + matrixVars + ) || getRequiredEnvParam("GITHUB_WORKSPACE"); +} + // src/init-action-post-helper.ts function createFailedUploadFailedSarifResult(error2) { const wrappedError = wrapError(error2); @@ -133734,7 +133731,7 @@ async function maybeUploadFailedSarif(config, repositoryNwo, features, logger) { } async function tryUploadSarifIfRunFailed(config, repositoryNwo, features, logger) { if (process.env["CODEQL_ACTION_ANALYZE_DID_COMPLETE_SUCCESSFULLY" /* ANALYZE_DID_COMPLETE_SUCCESSFULLY */] !== "true") { - core17.exportVariable( + core16.exportVariable( "CODEQL_ACTION_JOB_STATUS" /* JOB_STATUS */, process.env["CODEQL_ACTION_JOB_STATUS" /* JOB_STATUS */] ?? "JOB_STATUS_CONFIGURATION_ERROR" /* ConfigErrorStatus */ ); @@ -133757,7 +133754,7 @@ async function tryUploadSarifIfRunFailed(config, repositoryNwo, features, logger return createFailedUploadFailedSarifResult(e); } } else { - core17.exportVariable( + core16.exportVariable( "CODEQL_ACTION_JOB_STATUS" /* JOB_STATUS */, process.env["CODEQL_ACTION_JOB_STATUS" /* JOB_STATUS */] ?? "JOB_STATUS_SUCCESS" /* SuccessStatus */ ); @@ -133935,7 +133932,7 @@ async function runWrapper() { } } catch (unwrappedError) { const error2 = wrapError(unwrappedError); - core18.setFailed(error2.message); + core17.setFailed(error2.message); const statusReportBase2 = await createStatusReportBase( "init-post" /* InitPost */, getActionsStatus(error2), diff --git a/lib/init-action.js b/lib/init-action.js index 415bbdd1c1..47769e5f55 100644 --- a/lib/init-action.js +++ b/lib/init-action.js @@ -35463,7 +35463,7 @@ var require_internal_glob_options_helper = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.getOptions = void 0; - var core15 = __importStar4(require_core()); + var core14 = __importStar4(require_core()); function getOptions(copy) { const result = { followSymbolicLinks: true, @@ -35473,15 +35473,15 @@ var require_internal_glob_options_helper = __commonJS({ if (copy) { if (typeof copy.followSymbolicLinks === "boolean") { result.followSymbolicLinks = copy.followSymbolicLinks; - core15.debug(`followSymbolicLinks '${result.followSymbolicLinks}'`); + core14.debug(`followSymbolicLinks '${result.followSymbolicLinks}'`); } if (typeof copy.implicitDescendants === "boolean") { result.implicitDescendants = copy.implicitDescendants; - core15.debug(`implicitDescendants '${result.implicitDescendants}'`); + core14.debug(`implicitDescendants '${result.implicitDescendants}'`); } if (typeof copy.omitBrokenSymbolicLinks === "boolean") { result.omitBrokenSymbolicLinks = copy.omitBrokenSymbolicLinks; - core15.debug(`omitBrokenSymbolicLinks '${result.omitBrokenSymbolicLinks}'`); + core14.debug(`omitBrokenSymbolicLinks '${result.omitBrokenSymbolicLinks}'`); } } return result; @@ -36935,7 +36935,7 @@ var require_internal_globber = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.DefaultGlobber = void 0; - var core15 = __importStar4(require_core()); + var core14 = __importStar4(require_core()); var fs18 = __importStar4(require("fs")); var globOptionsHelper = __importStar4(require_internal_glob_options_helper()); var path20 = __importStar4(require("path")); @@ -36986,7 +36986,7 @@ var require_internal_globber = __commonJS({ } const stack = []; for (const searchPath of patternHelper.getSearchPaths(patterns)) { - core15.debug(`Search path '${searchPath}'`); + core14.debug(`Search path '${searchPath}'`); try { yield __await4(fs18.promises.lstat(searchPath)); } catch (err) { @@ -37058,7 +37058,7 @@ var require_internal_globber = __commonJS({ } catch (err) { if (err.code === "ENOENT") { if (options.omitBrokenSymbolicLinks) { - core15.debug(`Broken symlink '${item.path}'`); + core14.debug(`Broken symlink '${item.path}'`); return void 0; } throw new Error(`No information found for the path '${item.path}'. This may indicate a broken symbolic link.`); @@ -37074,7 +37074,7 @@ var require_internal_globber = __commonJS({ traversalChain.pop(); } if (traversalChain.some((x) => x === realPath)) { - core15.debug(`Symlink cycle detected for path '${item.path}' and realpath '${realPath}'`); + core14.debug(`Symlink cycle detected for path '${item.path}' and realpath '${realPath}'`); return void 0; } traversalChain.push(realPath); @@ -38398,7 +38398,7 @@ var require_cacheUtils = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.getRuntimeToken = exports2.getCacheVersion = exports2.assertDefined = exports2.getGnuTarPathOnWindows = exports2.getCacheFileName = exports2.getCompressionMethod = exports2.unlinkFile = exports2.resolvePaths = exports2.getArchiveFileSizeInBytes = exports2.createTempDirectory = void 0; - var core15 = __importStar4(require_core()); + var core14 = __importStar4(require_core()); var exec2 = __importStar4(require_exec()); var glob2 = __importStar4(require_glob()); var io7 = __importStar4(require_io()); @@ -38451,7 +38451,7 @@ var require_cacheUtils = __commonJS({ _e = false; const file = _c; const relativeFile = path20.relative(workspace, file).replace(new RegExp(`\\${path20.sep}`, "g"), "/"); - core15.debug(`Matched: ${relativeFile}`); + core14.debug(`Matched: ${relativeFile}`); if (relativeFile === "") { paths.push("."); } else { @@ -38481,7 +38481,7 @@ var require_cacheUtils = __commonJS({ return __awaiter4(this, void 0, void 0, function* () { let versionOutput = ""; additionalArgs.push("--version"); - core15.debug(`Checking ${app} ${additionalArgs.join(" ")}`); + core14.debug(`Checking ${app} ${additionalArgs.join(" ")}`); try { yield exec2.exec(`${app}`, additionalArgs, { ignoreReturnCode: true, @@ -38492,10 +38492,10 @@ var require_cacheUtils = __commonJS({ } }); } catch (err) { - core15.debug(err.message); + core14.debug(err.message); } versionOutput = versionOutput.trim(); - core15.debug(versionOutput); + core14.debug(versionOutput); return versionOutput; }); } @@ -38503,7 +38503,7 @@ var require_cacheUtils = __commonJS({ return __awaiter4(this, void 0, void 0, function* () { const versionOutput = yield getVersion("zstd", ["--quiet"]); const version = semver9.clean(versionOutput); - core15.debug(`zstd version: ${version}`); + core14.debug(`zstd version: ${version}`); if (versionOutput === "") { return constants_1.CompressionMethod.Gzip; } else { @@ -73815,7 +73815,7 @@ var require_uploadUtils = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.uploadCacheArchiveSDK = exports2.UploadProgress = void 0; - var core15 = __importStar4(require_core()); + var core14 = __importStar4(require_core()); var storage_blob_1 = require_dist7(); var errors_1 = require_errors2(); var UploadProgress = class { @@ -73857,7 +73857,7 @@ var require_uploadUtils = __commonJS({ const percentage = (100 * (transferredBytes / this.contentLength)).toFixed(1); const elapsedTime = Date.now() - this.startTime; const uploadSpeed = (transferredBytes / (1024 * 1024) / (elapsedTime / 1e3)).toFixed(1); - core15.info(`Sent ${transferredBytes} of ${this.contentLength} (${percentage}%), ${uploadSpeed} MBs/sec`); + core14.info(`Sent ${transferredBytes} of ${this.contentLength} (${percentage}%), ${uploadSpeed} MBs/sec`); if (this.isDone()) { this.displayedComplete = true; } @@ -73912,14 +73912,14 @@ var require_uploadUtils = __commonJS({ }; try { uploadProgress.startDisplayTimer(); - core15.debug(`BlobClient: ${blobClient.name}:${blobClient.accountName}:${blobClient.containerName}`); + core14.debug(`BlobClient: ${blobClient.name}:${blobClient.accountName}:${blobClient.containerName}`); const response = yield blockBlobClient.uploadFile(archivePath, uploadOptions); if (response._response.status >= 400) { throw new errors_1.InvalidResponseError(`uploadCacheArchiveSDK: upload failed with status code ${response._response.status}`); } return response; } catch (error2) { - core15.warning(`uploadCacheArchiveSDK: internal error uploading cache archive: ${error2.message}`); + core14.warning(`uploadCacheArchiveSDK: internal error uploading cache archive: ${error2.message}`); throw error2; } finally { uploadProgress.stopDisplayTimer(); @@ -73990,7 +73990,7 @@ var require_requestUtils = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.retryHttpClientResponse = exports2.retryTypedResponse = exports2.retry = exports2.isRetryableStatusCode = exports2.isServerErrorStatusCode = exports2.isSuccessStatusCode = void 0; - var core15 = __importStar4(require_core()); + var core14 = __importStar4(require_core()); var http_client_1 = require_lib(); var constants_1 = require_constants10(); function isSuccessStatusCode(statusCode) { @@ -74051,9 +74051,9 @@ var require_requestUtils = __commonJS({ isRetryable = isRetryableStatusCode(statusCode); errorMessage = `Cache service responded with ${statusCode}`; } - core15.debug(`${name} - Attempt ${attempt} of ${maxAttempts} failed with error: ${errorMessage}`); + core14.debug(`${name} - Attempt ${attempt} of ${maxAttempts} failed with error: ${errorMessage}`); if (!isRetryable) { - core15.debug(`${name} - Error is not retryable`); + core14.debug(`${name} - Error is not retryable`); break; } yield sleep(delay2); @@ -74158,7 +74158,7 @@ var require_downloadUtils = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.downloadCacheStorageSDK = exports2.downloadCacheHttpClientConcurrent = exports2.downloadCacheHttpClient = exports2.DownloadProgress = void 0; - var core15 = __importStar4(require_core()); + var core14 = __importStar4(require_core()); var http_client_1 = require_lib(); var storage_blob_1 = require_dist7(); var buffer = __importStar4(require("buffer")); @@ -74196,7 +74196,7 @@ var require_downloadUtils = __commonJS({ this.segmentIndex = this.segmentIndex + 1; this.segmentSize = segmentSize; this.receivedBytes = 0; - core15.debug(`Downloading segment at offset ${this.segmentOffset} with length ${this.segmentSize}...`); + core14.debug(`Downloading segment at offset ${this.segmentOffset} with length ${this.segmentSize}...`); } /** * Sets the number of bytes received for the current segment. @@ -74230,7 +74230,7 @@ var require_downloadUtils = __commonJS({ const percentage = (100 * (transferredBytes / this.contentLength)).toFixed(1); const elapsedTime = Date.now() - this.startTime; const downloadSpeed = (transferredBytes / (1024 * 1024) / (elapsedTime / 1e3)).toFixed(1); - core15.info(`Received ${transferredBytes} of ${this.contentLength} (${percentage}%), ${downloadSpeed} MBs/sec`); + core14.info(`Received ${transferredBytes} of ${this.contentLength} (${percentage}%), ${downloadSpeed} MBs/sec`); if (this.isDone()) { this.displayedComplete = true; } @@ -74280,7 +74280,7 @@ var require_downloadUtils = __commonJS({ })); downloadResponse.message.socket.setTimeout(constants_1.SocketTimeout, () => { downloadResponse.message.destroy(); - core15.debug(`Aborting download, socket timed out after ${constants_1.SocketTimeout} ms`); + core14.debug(`Aborting download, socket timed out after ${constants_1.SocketTimeout} ms`); }); yield pipeResponseToStream(downloadResponse, writeStream); const contentLengthHeader = downloadResponse.message.headers["content-length"]; @@ -74291,7 +74291,7 @@ var require_downloadUtils = __commonJS({ throw new Error(`Incomplete download. Expected file size: ${expectedLength}, actual file size: ${actualLength}`); } } else { - core15.debug("Unable to validate download, no Content-Length header"); + core14.debug("Unable to validate download, no Content-Length header"); } }); } @@ -74411,7 +74411,7 @@ var require_downloadUtils = __commonJS({ const properties = yield client.getProperties(); const contentLength = (_a = properties.contentLength) !== null && _a !== void 0 ? _a : -1; if (contentLength < 0) { - core15.debug("Unable to determine content length, downloading file with http-client..."); + core14.debug("Unable to determine content length, downloading file with http-client..."); yield downloadCacheHttpClient(archiveLocation, archivePath); } else { const maxSegmentSize = Math.min(134217728, buffer.constants.MAX_LENGTH); @@ -74491,7 +74491,7 @@ var require_options = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.getDownloadOptions = exports2.getUploadOptions = void 0; - var core15 = __importStar4(require_core()); + var core14 = __importStar4(require_core()); function getUploadOptions(copy) { const result = { useAzureSdk: false, @@ -74511,9 +74511,9 @@ var require_options = __commonJS({ } result.uploadConcurrency = !isNaN(Number(process.env["CACHE_UPLOAD_CONCURRENCY"])) ? Math.min(32, Number(process.env["CACHE_UPLOAD_CONCURRENCY"])) : result.uploadConcurrency; result.uploadChunkSize = !isNaN(Number(process.env["CACHE_UPLOAD_CHUNK_SIZE"])) ? Math.min(128 * 1024 * 1024, Number(process.env["CACHE_UPLOAD_CHUNK_SIZE"]) * 1024 * 1024) : result.uploadChunkSize; - core15.debug(`Use Azure SDK: ${result.useAzureSdk}`); - core15.debug(`Upload concurrency: ${result.uploadConcurrency}`); - core15.debug(`Upload chunk size: ${result.uploadChunkSize}`); + core14.debug(`Use Azure SDK: ${result.useAzureSdk}`); + core14.debug(`Upload concurrency: ${result.uploadConcurrency}`); + core14.debug(`Upload chunk size: ${result.uploadChunkSize}`); return result; } exports2.getUploadOptions = getUploadOptions; @@ -74550,12 +74550,12 @@ var require_options = __commonJS({ if (segmentDownloadTimeoutMins && !isNaN(Number(segmentDownloadTimeoutMins)) && isFinite(Number(segmentDownloadTimeoutMins))) { result.segmentTimeoutInMs = Number(segmentDownloadTimeoutMins) * 60 * 1e3; } - core15.debug(`Use Azure SDK: ${result.useAzureSdk}`); - core15.debug(`Download concurrency: ${result.downloadConcurrency}`); - core15.debug(`Request timeout (ms): ${result.timeoutInMs}`); - core15.debug(`Cache segment download timeout mins env var: ${process.env["SEGMENT_DOWNLOAD_TIMEOUT_MINS"]}`); - core15.debug(`Segment download timeout (ms): ${result.segmentTimeoutInMs}`); - core15.debug(`Lookup only: ${result.lookupOnly}`); + core14.debug(`Use Azure SDK: ${result.useAzureSdk}`); + core14.debug(`Download concurrency: ${result.downloadConcurrency}`); + core14.debug(`Request timeout (ms): ${result.timeoutInMs}`); + core14.debug(`Cache segment download timeout mins env var: ${process.env["SEGMENT_DOWNLOAD_TIMEOUT_MINS"]}`); + core14.debug(`Segment download timeout (ms): ${result.segmentTimeoutInMs}`); + core14.debug(`Lookup only: ${result.lookupOnly}`); return result; } exports2.getDownloadOptions = getDownloadOptions; @@ -74735,7 +74735,7 @@ var require_cacheHttpClient = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.saveCache = exports2.reserveCache = exports2.downloadCache = exports2.getCacheEntry = void 0; - var core15 = __importStar4(require_core()); + var core14 = __importStar4(require_core()); var http_client_1 = require_lib(); var auth_1 = require_auth(); var fs18 = __importStar4(require("fs")); @@ -74753,7 +74753,7 @@ var require_cacheHttpClient = __commonJS({ throw new Error("Cache Service Url not found, unable to restore cache."); } const url = `${baseUrl}_apis/artifactcache/${resource}`; - core15.debug(`Resource Url: ${url}`); + core14.debug(`Resource Url: ${url}`); return url; } function createAcceptHeader(type2, apiVersion) { @@ -74781,7 +74781,7 @@ var require_cacheHttpClient = __commonJS({ return httpClient.getJson(getCacheApiUrl(resource)); })); if (response.statusCode === 204) { - if (core15.isDebug()) { + if (core14.isDebug()) { yield printCachesListForDiagnostics(keys[0], httpClient, version); } return null; @@ -74794,9 +74794,9 @@ var require_cacheHttpClient = __commonJS({ if (!cacheDownloadUrl) { throw new Error("Cache not found."); } - core15.setSecret(cacheDownloadUrl); - core15.debug(`Cache Result:`); - core15.debug(JSON.stringify(cacheResult)); + core14.setSecret(cacheDownloadUrl); + core14.debug(`Cache Result:`); + core14.debug(JSON.stringify(cacheResult)); return cacheResult; }); } @@ -74811,10 +74811,10 @@ var require_cacheHttpClient = __commonJS({ const cacheListResult = response.result; const totalCount = cacheListResult === null || cacheListResult === void 0 ? void 0 : cacheListResult.totalCount; if (totalCount && totalCount > 0) { - core15.debug(`No matching cache found for cache key '${key}', version '${version} and scope ${process.env["GITHUB_REF"]}. There exist one or more cache(s) with similar key but they have different version or scope. See more info on cache matching here: https://docs.github.com/en/actions/using-workflows/caching-dependencies-to-speed-up-workflows#matching-a-cache-key + core14.debug(`No matching cache found for cache key '${key}', version '${version} and scope ${process.env["GITHUB_REF"]}. There exist one or more cache(s) with similar key but they have different version or scope. See more info on cache matching here: https://docs.github.com/en/actions/using-workflows/caching-dependencies-to-speed-up-workflows#matching-a-cache-key Other caches with similar key:`); for (const cacheEntry of (cacheListResult === null || cacheListResult === void 0 ? void 0 : cacheListResult.artifactCaches) || []) { - core15.debug(`Cache Key: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.cacheKey}, Cache Version: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.cacheVersion}, Cache Scope: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.scope}, Cache Created: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.creationTime}`); + core14.debug(`Cache Key: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.cacheKey}, Cache Version: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.cacheVersion}, Cache Scope: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.scope}, Cache Created: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.creationTime}`); } } } @@ -74859,7 +74859,7 @@ Other caches with similar key:`); } function uploadChunk(httpClient, resourceUrl, openStream, start, end) { return __awaiter4(this, void 0, void 0, function* () { - core15.debug(`Uploading chunk of size ${end - start + 1} bytes at offset ${start} with content range: ${getContentRange(start, end)}`); + core14.debug(`Uploading chunk of size ${end - start + 1} bytes at offset ${start} with content range: ${getContentRange(start, end)}`); const additionalHeaders = { "Content-Type": "application/octet-stream", "Content-Range": getContentRange(start, end) @@ -74881,7 +74881,7 @@ Other caches with similar key:`); const concurrency = utils.assertDefined("uploadConcurrency", uploadOptions.uploadConcurrency); const maxChunkSize = utils.assertDefined("uploadChunkSize", uploadOptions.uploadChunkSize); const parallelUploads = [...new Array(concurrency).keys()]; - core15.debug("Awaiting all uploads"); + core14.debug("Awaiting all uploads"); let offset = 0; try { yield Promise.all(parallelUploads.map(() => __awaiter4(this, void 0, void 0, function* () { @@ -74924,16 +74924,16 @@ Other caches with similar key:`); yield (0, uploadUtils_1.uploadCacheArchiveSDK)(signedUploadURL, archivePath, options); } else { const httpClient = createHttpClient(); - core15.debug("Upload cache"); + core14.debug("Upload cache"); yield uploadFile(httpClient, cacheId, archivePath, options); - core15.debug("Commiting cache"); + core14.debug("Commiting cache"); const cacheSize = utils.getArchiveFileSizeInBytes(archivePath); - core15.info(`Cache Size: ~${Math.round(cacheSize / (1024 * 1024))} MB (${cacheSize} B)`); + core14.info(`Cache Size: ~${Math.round(cacheSize / (1024 * 1024))} MB (${cacheSize} B)`); const commitCacheResponse = yield commitCache(httpClient, cacheId, cacheSize); if (!(0, requestUtils_1.isSuccessStatusCode)(commitCacheResponse.statusCode)) { throw new Error(`Cache service responded with ${commitCacheResponse.statusCode} during commit cache.`); } - core15.info("Cache saved successfully"); + core14.info("Cache saved successfully"); } }); } @@ -80385,7 +80385,7 @@ var require_cache3 = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.saveCache = exports2.restoreCache = exports2.isFeatureAvailable = exports2.FinalizeCacheError = exports2.ReserveCacheError = exports2.ValidationError = void 0; - var core15 = __importStar4(require_core()); + var core14 = __importStar4(require_core()); var path20 = __importStar4(require("path")); var utils = __importStar4(require_cacheUtils()); var cacheHttpClient = __importStar4(require_cacheHttpClient()); @@ -80445,7 +80445,7 @@ var require_cache3 = __commonJS({ function restoreCache4(paths, primaryKey, restoreKeys, options, enableCrossOsArchive = false) { return __awaiter4(this, void 0, void 0, function* () { const cacheServiceVersion = (0, config_1.getCacheServiceVersion)(); - core15.debug(`Cache service version: ${cacheServiceVersion}`); + core14.debug(`Cache service version: ${cacheServiceVersion}`); checkPaths(paths); switch (cacheServiceVersion) { case "v2": @@ -80461,8 +80461,8 @@ var require_cache3 = __commonJS({ return __awaiter4(this, void 0, void 0, function* () { restoreKeys = restoreKeys || []; const keys = [primaryKey, ...restoreKeys]; - core15.debug("Resolved Keys:"); - core15.debug(JSON.stringify(keys)); + core14.debug("Resolved Keys:"); + core14.debug(JSON.stringify(keys)); if (keys.length > 10) { throw new ValidationError(`Key Validation Error: Keys are limited to a maximum of 10.`); } @@ -80480,19 +80480,19 @@ var require_cache3 = __commonJS({ return void 0; } if (options === null || options === void 0 ? void 0 : options.lookupOnly) { - core15.info("Lookup only - skipping download"); + core14.info("Lookup only - skipping download"); return cacheEntry.cacheKey; } archivePath = path20.join(yield utils.createTempDirectory(), utils.getCacheFileName(compressionMethod)); - core15.debug(`Archive Path: ${archivePath}`); + core14.debug(`Archive Path: ${archivePath}`); yield cacheHttpClient.downloadCache(cacheEntry.archiveLocation, archivePath, options); - if (core15.isDebug()) { + if (core14.isDebug()) { yield (0, tar_1.listTar)(archivePath, compressionMethod); } const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath); - core15.info(`Cache Size: ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B)`); + core14.info(`Cache Size: ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B)`); yield (0, tar_1.extractTar)(archivePath, compressionMethod); - core15.info("Cache restored successfully"); + core14.info("Cache restored successfully"); return cacheEntry.cacheKey; } catch (error2) { const typedError = error2; @@ -80500,16 +80500,16 @@ var require_cache3 = __commonJS({ throw error2; } else { if (typedError instanceof http_client_1.HttpClientError && typeof typedError.statusCode === "number" && typedError.statusCode >= 500) { - core15.error(`Failed to restore: ${error2.message}`); + core14.error(`Failed to restore: ${error2.message}`); } else { - core15.warning(`Failed to restore: ${error2.message}`); + core14.warning(`Failed to restore: ${error2.message}`); } } } finally { try { yield utils.unlinkFile(archivePath); } catch (error2) { - core15.debug(`Failed to delete archive: ${error2}`); + core14.debug(`Failed to delete archive: ${error2}`); } } return void 0; @@ -80520,8 +80520,8 @@ var require_cache3 = __commonJS({ options = Object.assign(Object.assign({}, options), { useAzureSdk: true }); restoreKeys = restoreKeys || []; const keys = [primaryKey, ...restoreKeys]; - core15.debug("Resolved Keys:"); - core15.debug(JSON.stringify(keys)); + core14.debug("Resolved Keys:"); + core14.debug(JSON.stringify(keys)); if (keys.length > 10) { throw new ValidationError(`Key Validation Error: Keys are limited to a maximum of 10.`); } @@ -80539,30 +80539,30 @@ var require_cache3 = __commonJS({ }; const response = yield twirpClient.GetCacheEntryDownloadURL(request); if (!response.ok) { - core15.debug(`Cache not found for version ${request.version} of keys: ${keys.join(", ")}`); + core14.debug(`Cache not found for version ${request.version} of keys: ${keys.join(", ")}`); return void 0; } const isRestoreKeyMatch = request.key !== response.matchedKey; if (isRestoreKeyMatch) { - core15.info(`Cache hit for restore-key: ${response.matchedKey}`); + core14.info(`Cache hit for restore-key: ${response.matchedKey}`); } else { - core15.info(`Cache hit for: ${response.matchedKey}`); + core14.info(`Cache hit for: ${response.matchedKey}`); } if (options === null || options === void 0 ? void 0 : options.lookupOnly) { - core15.info("Lookup only - skipping download"); + core14.info("Lookup only - skipping download"); return response.matchedKey; } archivePath = path20.join(yield utils.createTempDirectory(), utils.getCacheFileName(compressionMethod)); - core15.debug(`Archive path: ${archivePath}`); - core15.debug(`Starting download of archive to: ${archivePath}`); + core14.debug(`Archive path: ${archivePath}`); + core14.debug(`Starting download of archive to: ${archivePath}`); yield cacheHttpClient.downloadCache(response.signedDownloadUrl, archivePath, options); const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath); - core15.info(`Cache Size: ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B)`); - if (core15.isDebug()) { + core14.info(`Cache Size: ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B)`); + if (core14.isDebug()) { yield (0, tar_1.listTar)(archivePath, compressionMethod); } yield (0, tar_1.extractTar)(archivePath, compressionMethod); - core15.info("Cache restored successfully"); + core14.info("Cache restored successfully"); return response.matchedKey; } catch (error2) { const typedError = error2; @@ -80570,9 +80570,9 @@ var require_cache3 = __commonJS({ throw error2; } else { if (typedError instanceof http_client_1.HttpClientError && typeof typedError.statusCode === "number" && typedError.statusCode >= 500) { - core15.error(`Failed to restore: ${error2.message}`); + core14.error(`Failed to restore: ${error2.message}`); } else { - core15.warning(`Failed to restore: ${error2.message}`); + core14.warning(`Failed to restore: ${error2.message}`); } } } finally { @@ -80581,7 +80581,7 @@ var require_cache3 = __commonJS({ yield utils.unlinkFile(archivePath); } } catch (error2) { - core15.debug(`Failed to delete archive: ${error2}`); + core14.debug(`Failed to delete archive: ${error2}`); } } return void 0; @@ -80590,7 +80590,7 @@ var require_cache3 = __commonJS({ function saveCache4(paths, key, options, enableCrossOsArchive = false) { return __awaiter4(this, void 0, void 0, function* () { const cacheServiceVersion = (0, config_1.getCacheServiceVersion)(); - core15.debug(`Cache service version: ${cacheServiceVersion}`); + core14.debug(`Cache service version: ${cacheServiceVersion}`); checkPaths(paths); checkKey(key); switch (cacheServiceVersion) { @@ -80609,26 +80609,26 @@ var require_cache3 = __commonJS({ const compressionMethod = yield utils.getCompressionMethod(); let cacheId = -1; const cachePaths = yield utils.resolvePaths(paths); - core15.debug("Cache Paths:"); - core15.debug(`${JSON.stringify(cachePaths)}`); + core14.debug("Cache Paths:"); + core14.debug(`${JSON.stringify(cachePaths)}`); if (cachePaths.length === 0) { throw new Error(`Path Validation Error: Path(s) specified in the action for caching do(es) not exist, hence no cache is being saved.`); } const archiveFolder = yield utils.createTempDirectory(); const archivePath = path20.join(archiveFolder, utils.getCacheFileName(compressionMethod)); - core15.debug(`Archive Path: ${archivePath}`); + core14.debug(`Archive Path: ${archivePath}`); try { yield (0, tar_1.createTar)(archiveFolder, cachePaths, compressionMethod); - if (core15.isDebug()) { + if (core14.isDebug()) { yield (0, tar_1.listTar)(archivePath, compressionMethod); } const fileSizeLimit = 10 * 1024 * 1024 * 1024; const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath); - core15.debug(`File Size: ${archiveFileSize}`); + core14.debug(`File Size: ${archiveFileSize}`); if (archiveFileSize > fileSizeLimit && !(0, config_1.isGhes)()) { throw new Error(`Cache size of ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B) is over the 10GB limit, not saving cache.`); } - core15.debug("Reserving Cache"); + core14.debug("Reserving Cache"); const reserveCacheResponse = yield cacheHttpClient.reserveCache(key, paths, { compressionMethod, enableCrossOsArchive, @@ -80641,26 +80641,26 @@ var require_cache3 = __commonJS({ } else { throw new ReserveCacheError2(`Unable to reserve cache with key ${key}, another job may be creating this cache. More details: ${(_e = reserveCacheResponse === null || reserveCacheResponse === void 0 ? void 0 : reserveCacheResponse.error) === null || _e === void 0 ? void 0 : _e.message}`); } - core15.debug(`Saving Cache (ID: ${cacheId})`); + core14.debug(`Saving Cache (ID: ${cacheId})`); yield cacheHttpClient.saveCache(cacheId, archivePath, "", options); } catch (error2) { const typedError = error2; if (typedError.name === ValidationError.name) { throw error2; } else if (typedError.name === ReserveCacheError2.name) { - core15.info(`Failed to save: ${typedError.message}`); + core14.info(`Failed to save: ${typedError.message}`); } else { if (typedError instanceof http_client_1.HttpClientError && typeof typedError.statusCode === "number" && typedError.statusCode >= 500) { - core15.error(`Failed to save: ${typedError.message}`); + core14.error(`Failed to save: ${typedError.message}`); } else { - core15.warning(`Failed to save: ${typedError.message}`); + core14.warning(`Failed to save: ${typedError.message}`); } } } finally { try { yield utils.unlinkFile(archivePath); } catch (error2) { - core15.debug(`Failed to delete archive: ${error2}`); + core14.debug(`Failed to delete archive: ${error2}`); } } return cacheId; @@ -80673,23 +80673,23 @@ var require_cache3 = __commonJS({ const twirpClient = cacheTwirpClient.internalCacheTwirpClient(); let cacheId = -1; const cachePaths = yield utils.resolvePaths(paths); - core15.debug("Cache Paths:"); - core15.debug(`${JSON.stringify(cachePaths)}`); + core14.debug("Cache Paths:"); + core14.debug(`${JSON.stringify(cachePaths)}`); if (cachePaths.length === 0) { throw new Error(`Path Validation Error: Path(s) specified in the action for caching do(es) not exist, hence no cache is being saved.`); } const archiveFolder = yield utils.createTempDirectory(); const archivePath = path20.join(archiveFolder, utils.getCacheFileName(compressionMethod)); - core15.debug(`Archive Path: ${archivePath}`); + core14.debug(`Archive Path: ${archivePath}`); try { yield (0, tar_1.createTar)(archiveFolder, cachePaths, compressionMethod); - if (core15.isDebug()) { + if (core14.isDebug()) { yield (0, tar_1.listTar)(archivePath, compressionMethod); } const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath); - core15.debug(`File Size: ${archiveFileSize}`); + core14.debug(`File Size: ${archiveFileSize}`); options.archiveSizeBytes = archiveFileSize; - core15.debug("Reserving Cache"); + core14.debug("Reserving Cache"); const version = utils.getCacheVersion(paths, compressionMethod, enableCrossOsArchive); const request = { key, @@ -80700,16 +80700,16 @@ var require_cache3 = __commonJS({ const response = yield twirpClient.CreateCacheEntry(request); if (!response.ok) { if (response.message) { - core15.warning(`Cache reservation failed: ${response.message}`); + core14.warning(`Cache reservation failed: ${response.message}`); } throw new Error(response.message || "Response was not ok"); } signedUploadUrl = response.signedUploadUrl; } catch (error2) { - core15.debug(`Failed to reserve cache: ${error2}`); + core14.debug(`Failed to reserve cache: ${error2}`); throw new ReserveCacheError2(`Unable to reserve cache with key ${key}, another job may be creating this cache.`); } - core15.debug(`Attempting to upload cache located at: ${archivePath}`); + core14.debug(`Attempting to upload cache located at: ${archivePath}`); yield cacheHttpClient.saveCache(cacheId, archivePath, signedUploadUrl, options); const finalizeRequest = { key, @@ -80717,7 +80717,7 @@ var require_cache3 = __commonJS({ sizeBytes: `${archiveFileSize}` }; const finalizeResponse = yield twirpClient.FinalizeCacheEntryUpload(finalizeRequest); - core15.debug(`FinalizeCacheEntryUploadResponse: ${finalizeResponse.ok}`); + core14.debug(`FinalizeCacheEntryUploadResponse: ${finalizeResponse.ok}`); if (!finalizeResponse.ok) { if (finalizeResponse.message) { throw new FinalizeCacheError(finalizeResponse.message); @@ -80730,21 +80730,21 @@ var require_cache3 = __commonJS({ if (typedError.name === ValidationError.name) { throw error2; } else if (typedError.name === ReserveCacheError2.name) { - core15.info(`Failed to save: ${typedError.message}`); + core14.info(`Failed to save: ${typedError.message}`); } else if (typedError.name === FinalizeCacheError.name) { - core15.warning(typedError.message); + core14.warning(typedError.message); } else { if (typedError instanceof http_client_1.HttpClientError && typeof typedError.statusCode === "number" && typedError.statusCode >= 500) { - core15.error(`Failed to save: ${typedError.message}`); + core14.error(`Failed to save: ${typedError.message}`); } else { - core15.warning(`Failed to save: ${typedError.message}`); + core14.warning(`Failed to save: ${typedError.message}`); } } } finally { try { yield utils.unlinkFile(archivePath); } catch (error2) { - core15.debug(`Failed to delete archive: ${error2}`); + core14.debug(`Failed to delete archive: ${error2}`); } } return cacheId; @@ -80786,7 +80786,7 @@ var require_internal_glob_options_helper2 = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.getOptions = void 0; - var core15 = __importStar4(require_core()); + var core14 = __importStar4(require_core()); function getOptions(copy) { const result = { followSymbolicLinks: true, @@ -80798,23 +80798,23 @@ var require_internal_glob_options_helper2 = __commonJS({ if (copy) { if (typeof copy.followSymbolicLinks === "boolean") { result.followSymbolicLinks = copy.followSymbolicLinks; - core15.debug(`followSymbolicLinks '${result.followSymbolicLinks}'`); + core14.debug(`followSymbolicLinks '${result.followSymbolicLinks}'`); } if (typeof copy.implicitDescendants === "boolean") { result.implicitDescendants = copy.implicitDescendants; - core15.debug(`implicitDescendants '${result.implicitDescendants}'`); + core14.debug(`implicitDescendants '${result.implicitDescendants}'`); } if (typeof copy.matchDirectories === "boolean") { result.matchDirectories = copy.matchDirectories; - core15.debug(`matchDirectories '${result.matchDirectories}'`); + core14.debug(`matchDirectories '${result.matchDirectories}'`); } if (typeof copy.omitBrokenSymbolicLinks === "boolean") { result.omitBrokenSymbolicLinks = copy.omitBrokenSymbolicLinks; - core15.debug(`omitBrokenSymbolicLinks '${result.omitBrokenSymbolicLinks}'`); + core14.debug(`omitBrokenSymbolicLinks '${result.omitBrokenSymbolicLinks}'`); } if (typeof copy.excludeHiddenFiles === "boolean") { result.excludeHiddenFiles = copy.excludeHiddenFiles; - core15.debug(`excludeHiddenFiles '${result.excludeHiddenFiles}'`); + core14.debug(`excludeHiddenFiles '${result.excludeHiddenFiles}'`); } } return result; @@ -81498,7 +81498,7 @@ var require_internal_globber2 = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.DefaultGlobber = void 0; - var core15 = __importStar4(require_core()); + var core14 = __importStar4(require_core()); var fs18 = __importStar4(require("fs")); var globOptionsHelper = __importStar4(require_internal_glob_options_helper2()); var path20 = __importStar4(require("path")); @@ -81551,7 +81551,7 @@ var require_internal_globber2 = __commonJS({ } const stack = []; for (const searchPath of patternHelper.getSearchPaths(patterns)) { - core15.debug(`Search path '${searchPath}'`); + core14.debug(`Search path '${searchPath}'`); try { yield __await4(fs18.promises.lstat(searchPath)); } catch (err) { @@ -81626,7 +81626,7 @@ var require_internal_globber2 = __commonJS({ } catch (err) { if (err.code === "ENOENT") { if (options.omitBrokenSymbolicLinks) { - core15.debug(`Broken symlink '${item.path}'`); + core14.debug(`Broken symlink '${item.path}'`); return void 0; } throw new Error(`No information found for the path '${item.path}'. This may indicate a broken symbolic link.`); @@ -81642,7 +81642,7 @@ var require_internal_globber2 = __commonJS({ traversalChain.pop(); } if (traversalChain.some((x) => x === realPath)) { - core15.debug(`Symlink cycle detected for path '${item.path}' and realpath '${realPath}'`); + core14.debug(`Symlink cycle detected for path '${item.path}' and realpath '${realPath}'`); return void 0; } traversalChain.push(realPath); @@ -81735,7 +81735,7 @@ var require_internal_hash_files = __commonJS({ Object.defineProperty(exports2, "__esModule", { value: true }); exports2.hashFiles = void 0; var crypto2 = __importStar4(require("crypto")); - var core15 = __importStar4(require_core()); + var core14 = __importStar4(require_core()); var fs18 = __importStar4(require("fs")); var stream2 = __importStar4(require("stream")); var util = __importStar4(require("util")); @@ -81744,7 +81744,7 @@ var require_internal_hash_files = __commonJS({ var _a, e_1, _b, _c; var _d; return __awaiter4(this, void 0, void 0, function* () { - const writeDelegate = verbose ? core15.info : core15.debug; + const writeDelegate = verbose ? core14.info : core14.debug; let hasMatch = false; const githubWorkspace = currentWorkspace ? currentWorkspace : (_d = process.env["GITHUB_WORKSPACE"]) !== null && _d !== void 0 ? _d : process.cwd(); const result = crypto2.createHash("sha256"); @@ -82049,7 +82049,7 @@ var require_retry_helper = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.RetryHelper = void 0; - var core15 = __importStar4(require_core()); + var core14 = __importStar4(require_core()); var RetryHelper = class { constructor(maxAttempts, minSeconds, maxSeconds) { if (maxAttempts < 1) { @@ -82072,10 +82072,10 @@ var require_retry_helper = __commonJS({ if (isRetryable && !isRetryable(err)) { throw err; } - core15.info(err.message); + core14.info(err.message); } const seconds = this.getSleepAmount(); - core15.info(`Waiting ${seconds} seconds before trying again`); + core14.info(`Waiting ${seconds} seconds before trying again`); yield this.sleep(seconds); attempt++; } @@ -82155,7 +82155,7 @@ var require_tool_cache = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.evaluateVersions = exports2.isExplicitVersion = exports2.findFromManifest = exports2.getManifestFromRepo = exports2.findAllVersions = exports2.find = exports2.cacheFile = exports2.cacheDir = exports2.extractZip = exports2.extractXar = exports2.extractTar = exports2.extract7z = exports2.downloadTool = exports2.HTTPError = void 0; - var core15 = __importStar4(require_core()); + var core14 = __importStar4(require_core()); var io7 = __importStar4(require_io()); var crypto2 = __importStar4(require("crypto")); var fs18 = __importStar4(require("fs")); @@ -82184,8 +82184,8 @@ var require_tool_cache = __commonJS({ return __awaiter4(this, void 0, void 0, function* () { dest = dest || path20.join(_getTempDirectory(), crypto2.randomUUID()); yield io7.mkdirP(path20.dirname(dest)); - core15.debug(`Downloading ${url}`); - core15.debug(`Destination ${dest}`); + core14.debug(`Downloading ${url}`); + core14.debug(`Destination ${dest}`); const maxAttempts = 3; const minSeconds = _getGlobal("TEST_DOWNLOAD_TOOL_RETRY_MIN_SECONDS", 10); const maxSeconds = _getGlobal("TEST_DOWNLOAD_TOOL_RETRY_MAX_SECONDS", 20); @@ -82212,7 +82212,7 @@ var require_tool_cache = __commonJS({ allowRetries: false }); if (auth) { - core15.debug("set auth"); + core14.debug("set auth"); if (headers === void 0) { headers = {}; } @@ -82221,7 +82221,7 @@ var require_tool_cache = __commonJS({ const response = yield http.get(url, headers); if (response.message.statusCode !== 200) { const err = new HTTPError2(response.message.statusCode); - core15.debug(`Failed to download from "${url}". Code(${response.message.statusCode}) Message(${response.message.statusMessage})`); + core14.debug(`Failed to download from "${url}". Code(${response.message.statusCode}) Message(${response.message.statusMessage})`); throw err; } const pipeline = util.promisify(stream2.pipeline); @@ -82230,16 +82230,16 @@ var require_tool_cache = __commonJS({ let succeeded = false; try { yield pipeline(readStream, fs18.createWriteStream(dest)); - core15.debug("download complete"); + core14.debug("download complete"); succeeded = true; return dest; } finally { if (!succeeded) { - core15.debug("download failed"); + core14.debug("download failed"); try { yield io7.rmRF(dest); } catch (err) { - core15.debug(`Failed to delete '${dest}'. ${err.message}`); + core14.debug(`Failed to delete '${dest}'. ${err.message}`); } } } @@ -82254,7 +82254,7 @@ var require_tool_cache = __commonJS({ process.chdir(dest); if (_7zPath) { try { - const logLevel = core15.isDebug() ? "-bb1" : "-bb0"; + const logLevel = core14.isDebug() ? "-bb1" : "-bb0"; const args = [ "x", logLevel, @@ -82304,7 +82304,7 @@ var require_tool_cache = __commonJS({ throw new Error("parameter 'file' is required"); } dest = yield _createExtractFolder(dest); - core15.debug("Checking tar --version"); + core14.debug("Checking tar --version"); let versionOutput = ""; yield (0, exec_1.exec)("tar --version", [], { ignoreReturnCode: true, @@ -82314,7 +82314,7 @@ var require_tool_cache = __commonJS({ stderr: (data) => versionOutput += data.toString() } }); - core15.debug(versionOutput.trim()); + core14.debug(versionOutput.trim()); const isGnuTar = versionOutput.toUpperCase().includes("GNU TAR"); let args; if (flags instanceof Array) { @@ -82322,7 +82322,7 @@ var require_tool_cache = __commonJS({ } else { args = [flags]; } - if (core15.isDebug() && !flags.includes("v")) { + if (core14.isDebug() && !flags.includes("v")) { args.push("-v"); } let destArg = dest; @@ -82354,7 +82354,7 @@ var require_tool_cache = __commonJS({ args = [flags]; } args.push("-x", "-C", dest, "-f", file); - if (core15.isDebug()) { + if (core14.isDebug()) { args.push("-v"); } const xarPath = yield io7.which("xar", true); @@ -82399,7 +82399,7 @@ var require_tool_cache = __commonJS({ "-Command", pwshCommand ]; - core15.debug(`Using pwsh at path: ${pwshPath}`); + core14.debug(`Using pwsh at path: ${pwshPath}`); yield (0, exec_1.exec)(`"${pwshPath}"`, args); } else { const powershellCommand = [ @@ -82419,7 +82419,7 @@ var require_tool_cache = __commonJS({ powershellCommand ]; const powershellPath = yield io7.which("powershell", true); - core15.debug(`Using powershell at path: ${powershellPath}`); + core14.debug(`Using powershell at path: ${powershellPath}`); yield (0, exec_1.exec)(`"${powershellPath}"`, args); } }); @@ -82428,7 +82428,7 @@ var require_tool_cache = __commonJS({ return __awaiter4(this, void 0, void 0, function* () { const unzipPath = yield io7.which("unzip", true); const args = [file]; - if (!core15.isDebug()) { + if (!core14.isDebug()) { args.unshift("-q"); } args.unshift("-o"); @@ -82439,8 +82439,8 @@ var require_tool_cache = __commonJS({ return __awaiter4(this, void 0, void 0, function* () { version = semver9.clean(version) || version; arch2 = arch2 || os5.arch(); - core15.debug(`Caching tool ${tool} ${version} ${arch2}`); - core15.debug(`source dir: ${sourceDir}`); + core14.debug(`Caching tool ${tool} ${version} ${arch2}`); + core14.debug(`source dir: ${sourceDir}`); if (!fs18.statSync(sourceDir).isDirectory()) { throw new Error("sourceDir is not a directory"); } @@ -82458,14 +82458,14 @@ var require_tool_cache = __commonJS({ return __awaiter4(this, void 0, void 0, function* () { version = semver9.clean(version) || version; arch2 = arch2 || os5.arch(); - core15.debug(`Caching tool ${tool} ${version} ${arch2}`); - core15.debug(`source file: ${sourceFile}`); + core14.debug(`Caching tool ${tool} ${version} ${arch2}`); + core14.debug(`source file: ${sourceFile}`); if (!fs18.statSync(sourceFile).isFile()) { throw new Error("sourceFile is not a file"); } const destFolder = yield _createToolPath(tool, version, arch2); const destPath = path20.join(destFolder, targetFile); - core15.debug(`destination file ${destPath}`); + core14.debug(`destination file ${destPath}`); yield io7.cp(sourceFile, destPath); _completeToolPath(tool, version, arch2); return destFolder; @@ -82489,12 +82489,12 @@ var require_tool_cache = __commonJS({ if (versionSpec) { versionSpec = semver9.clean(versionSpec) || ""; const cachePath = path20.join(_getCacheDirectory(), toolName, versionSpec, arch2); - core15.debug(`checking cache: ${cachePath}`); + core14.debug(`checking cache: ${cachePath}`); if (fs18.existsSync(cachePath) && fs18.existsSync(`${cachePath}.complete`)) { - core15.debug(`Found tool in cache ${toolName} ${versionSpec} ${arch2}`); + core14.debug(`Found tool in cache ${toolName} ${versionSpec} ${arch2}`); toolPath = cachePath; } else { - core15.debug("not found"); + core14.debug("not found"); } } return toolPath; @@ -82525,7 +82525,7 @@ var require_tool_cache = __commonJS({ const http = new httpm.HttpClient("tool-cache"); const headers = {}; if (auth) { - core15.debug("set auth"); + core14.debug("set auth"); headers.authorization = auth; } const response = yield http.getJson(treeUrl, headers); @@ -82546,7 +82546,7 @@ var require_tool_cache = __commonJS({ try { releases = JSON.parse(versionsRaw); } catch (_a) { - core15.debug("Invalid json"); + core14.debug("Invalid json"); } } return releases; @@ -82572,7 +82572,7 @@ var require_tool_cache = __commonJS({ function _createToolPath(tool, version, arch2) { return __awaiter4(this, void 0, void 0, function* () { const folderPath = path20.join(_getCacheDirectory(), tool, semver9.clean(version) || version, arch2 || ""); - core15.debug(`destination ${folderPath}`); + core14.debug(`destination ${folderPath}`); const markerPath = `${folderPath}.complete`; yield io7.rmRF(folderPath); yield io7.rmRF(markerPath); @@ -82584,19 +82584,19 @@ var require_tool_cache = __commonJS({ const folderPath = path20.join(_getCacheDirectory(), tool, semver9.clean(version) || version, arch2 || ""); const markerPath = `${folderPath}.complete`; fs18.writeFileSync(markerPath, ""); - core15.debug("finished caching tool"); + core14.debug("finished caching tool"); } function isExplicitVersion(versionSpec) { const c = semver9.clean(versionSpec) || ""; - core15.debug(`isExplicit: ${c}`); + core14.debug(`isExplicit: ${c}`); const valid3 = semver9.valid(c) != null; - core15.debug(`explicit? ${valid3}`); + core14.debug(`explicit? ${valid3}`); return valid3; } exports2.isExplicitVersion = isExplicitVersion; function evaluateVersions(versions, versionSpec) { let version = ""; - core15.debug(`evaluating ${versions.length} versions`); + core14.debug(`evaluating ${versions.length} versions`); versions = versions.sort((a, b) => { if (semver9.gt(a, b)) { return 1; @@ -82612,9 +82612,9 @@ var require_tool_cache = __commonJS({ } } if (version) { - core15.debug(`matched: ${version}`); + core14.debug(`matched: ${version}`); } else { - core15.debug("match not found"); + core14.debug("match not found"); } return version; } @@ -83193,7 +83193,7 @@ var require_follow_redirects = __commonJS({ // src/init-action.ts var fs17 = __toESM(require("fs")); var path19 = __toESM(require("path")); -var core14 = __toESM(require_core()); +var core13 = __toESM(require_core()); var io6 = __toESM(require_io()); var semver8 = __toESM(require_semver2()); @@ -89916,9 +89916,8 @@ function flushDiagnostics(config) { } // src/init.ts -var fs16 = __toESM(require("fs")); -var path18 = __toESM(require("path")); -var core12 = __toESM(require_core()); +var fs15 = __toESM(require("fs")); +var path17 = __toESM(require("path")); var toolrunner4 = __toESM(require_toolrunner()); var io5 = __toESM(require_io()); @@ -91655,193 +91654,7 @@ async function getJobRunUuidSarifOptions(codeql) { ) ? [`--sarif-run-property=jobRunUuid=${jobRunUuid}`] : []; } -// src/workflow.ts -var fs15 = __toESM(require("fs")); -var path17 = __toESM(require("path")); -var import_zlib = __toESM(require("zlib")); -var core11 = __toESM(require_core()); -function toCodedErrors(errors) { - return Object.entries(errors).reduce( - (acc, [code, message]) => { - acc[code] = { message, code }; - return acc; - }, - {} - ); -} -var WorkflowErrors = toCodedErrors({ - MissingPushHook: `Please specify an on.push hook to analyze and see code scanning alerts from the default branch on the Security tab.`, - CheckoutWrongHead: `git checkout HEAD^2 is no longer necessary. Please remove this step as Code Scanning recommends analyzing the merge commit for best results.`, - InconsistentActionVersion: `Not all workflow steps that use \`github/codeql-action\` actions use the same version. Please ensure that all such steps use the same version to avoid compatibility issues.` -}); -async function groupLanguagesByExtractor(languages, codeql) { - const resolveResult = await codeql.betterResolveLanguages(); - if (!resolveResult.aliases) { - return void 0; - } - const aliases = resolveResult.aliases; - const languagesByExtractor = {}; - for (const language of languages) { - const extractorName = aliases[language] || language; - if (!languagesByExtractor[extractorName]) { - languagesByExtractor[extractorName] = []; - } - languagesByExtractor[extractorName].push(language); - } - return languagesByExtractor; -} -async function getWorkflowErrors(doc, codeql) { - const errors = []; - const jobName = process.env.GITHUB_JOB; - if (jobName) { - const job = doc?.jobs?.[jobName]; - if (job?.strategy?.matrix?.language) { - const matrixLanguages = job.strategy.matrix.language; - if (Array.isArray(matrixLanguages)) { - const matrixLanguagesByExtractor = await groupLanguagesByExtractor( - matrixLanguages, - codeql - ); - if (matrixLanguagesByExtractor !== void 0) { - for (const [extractor, languages] of Object.entries( - matrixLanguagesByExtractor - )) { - if (languages.length > 1) { - errors.push({ - message: `CodeQL language '${extractor}' is referenced by more than one entry in the 'language' matrix parameter for job '${jobName}'. This may result in duplicate alerts. Please edit the 'language' matrix parameter to keep only one of the following: ${languages.map((language) => `'${language}'`).join(", ")}.`, - code: "DuplicateLanguageInMatrix" - }); - } - } - } - } - } - const steps = job?.steps; - if (Array.isArray(steps)) { - for (const step of steps) { - if (step?.run === "git checkout HEAD^2") { - errors.push(WorkflowErrors.CheckoutWrongHead); - break; - } - } - } - } - const codeqlStepRefs = []; - for (const job of Object.values(doc?.jobs || {})) { - if (Array.isArray(job.steps)) { - for (const step of job.steps) { - if (step.uses?.startsWith("github/codeql-action/")) { - const parts = step.uses.split("@"); - if (parts.length >= 2) { - codeqlStepRefs.push(parts[parts.length - 1]); - } - } - } - } - } - if (codeqlStepRefs.length > 0 && !codeqlStepRefs.every((ref) => ref === codeqlStepRefs[0])) { - errors.push(WorkflowErrors.InconsistentActionVersion); - } - const hasPushTrigger = hasWorkflowTrigger("push", doc); - const hasPullRequestTrigger = hasWorkflowTrigger("pull_request", doc); - const hasWorkflowCallTrigger = hasWorkflowTrigger("workflow_call", doc); - if (hasPullRequestTrigger && !hasPushTrigger && !hasWorkflowCallTrigger) { - errors.push(WorkflowErrors.MissingPushHook); - } - return errors; -} -function hasWorkflowTrigger(triggerName, doc) { - if (!doc.on) { - return false; - } - if (typeof doc.on === "string") { - return doc.on === triggerName; - } - if (Array.isArray(doc.on)) { - return doc.on.includes(triggerName); - } - return Object.prototype.hasOwnProperty.call(doc.on, triggerName); -} -async function validateWorkflow(codeql, logger) { - let workflow; - try { - workflow = await getWorkflow(logger); - } catch (e) { - return `error: getWorkflow() failed: ${String(e)}`; - } - let workflowErrors; - try { - workflowErrors = await getWorkflowErrors(workflow, codeql); - } catch (e) { - return `error: getWorkflowErrors() failed: ${String(e)}`; - } - if (workflowErrors.length > 0) { - let message; - try { - message = formatWorkflowErrors(workflowErrors); - } catch (e) { - return `error: formatWorkflowErrors() failed: ${String(e)}`; - } - core11.warning(message); - } - return formatWorkflowCause(workflowErrors); -} -function formatWorkflowErrors(errors) { - const issuesWere = errors.length === 1 ? "issue was" : "issues were"; - const errorsList = errors.map((e) => e.message).join(" "); - return `${errors.length} ${issuesWere} detected with this workflow: ${errorsList}`; -} -function formatWorkflowCause(errors) { - if (errors.length === 0) { - return void 0; - } - return errors.map((e) => e.code).join(","); -} -async function getWorkflow(logger) { - const maybeWorkflow = process.env["CODE_SCANNING_WORKFLOW_FILE"]; - if (maybeWorkflow) { - logger.debug( - "Using the workflow specified by the CODE_SCANNING_WORKFLOW_FILE environment variable." - ); - return load( - import_zlib.default.gunzipSync(Buffer.from(maybeWorkflow, "base64")).toString() - ); - } - const workflowPath = await getWorkflowAbsolutePath(logger); - return load(fs15.readFileSync(workflowPath, "utf-8")); -} -async function getWorkflowAbsolutePath(logger) { - const relativePath = await getWorkflowRelativePath(); - const absolutePath = path17.join( - getRequiredEnvParam("GITHUB_WORKSPACE"), - relativePath - ); - if (fs15.existsSync(absolutePath)) { - logger.debug( - `Derived the following absolute path for the currently executing workflow: ${absolutePath}.` - ); - return absolutePath; - } - throw new Error( - `Expected to find a code scanning workflow file at ${absolutePath}, but no such file existed. This can happen if the currently running workflow checks out a branch that doesn't contain the corresponding workflow file.` - ); -} - // src/init.ts -async function checkWorkflow(logger, codeql) { - if (!isDynamicWorkflow() && process.env["CODEQL_ACTION_SKIP_WORKFLOW_VALIDATION" /* SKIP_WORKFLOW_VALIDATION */] !== "true") { - core12.startGroup("Validating workflow"); - const validateWorkflowResult = await validateWorkflow(codeql, logger); - if (validateWorkflowResult === void 0) { - logger.info("Detected no issues with the code scanning workflow."); - } else { - logger.warning( - `Unable to validate code scanning workflow: ${validateWorkflowResult}` - ); - } - core12.endGroup(); - } -} async function initCodeQL(toolsInput, apiDetails, tempDir, variant, defaultCliVersion, features, logger) { logger.startGroup("Setup CodeQL tools"); const { @@ -91876,7 +91689,7 @@ async function initConfig2(features, inputs) { }); } async function runDatabaseInitCluster(databaseInitEnvironment, codeql, config, sourceRoot, processName, qlconfigFile, logger) { - fs16.mkdirSync(config.dbLocation, { recursive: true }); + fs15.mkdirSync(config.dbLocation, { recursive: true }); await wrapEnvironment( databaseInitEnvironment, async () => await codeql.databaseInitCluster( @@ -91911,25 +91724,25 @@ async function checkPacksForOverlayCompatibility(codeql, config, logger) { } function checkPackForOverlayCompatibility(packDir, codeQlOverlayVersion, logger) { try { - let qlpackPath = path18.join(packDir, "qlpack.yml"); - if (!fs16.existsSync(qlpackPath)) { - qlpackPath = path18.join(packDir, "codeql-pack.yml"); + let qlpackPath = path17.join(packDir, "qlpack.yml"); + if (!fs15.existsSync(qlpackPath)) { + qlpackPath = path17.join(packDir, "codeql-pack.yml"); } const qlpackContents = load( - fs16.readFileSync(qlpackPath, "utf8") + fs15.readFileSync(qlpackPath, "utf8") ); if (!qlpackContents.buildMetadata) { return true; } - const packInfoPath = path18.join(packDir, ".packinfo"); - if (!fs16.existsSync(packInfoPath)) { + const packInfoPath = path17.join(packDir, ".packinfo"); + if (!fs15.existsSync(packInfoPath)) { logger.warning( `The query pack at ${packDir} does not have a .packinfo file, so it cannot support overlay analysis. Recompiling the query pack with the latest CodeQL CLI should solve this problem.` ); return false; } const packInfoFileContents = JSON.parse( - fs16.readFileSync(packInfoPath, "utf8") + fs15.readFileSync(packInfoPath, "utf8") ); const packOverlayVersion = packInfoFileContents.overlayVersion; if (typeof packOverlayVersion !== "number") { @@ -91954,7 +91767,7 @@ function checkPackForOverlayCompatibility(packDir, codeQlOverlayVersion, logger) } async function checkInstallPython311(languages, codeql) { if (languages.includes("python" /* python */) && process.platform === "win32" && !(await codeql.getVersion()).features?.supportsPython312) { - const script = path18.resolve( + const script = path17.resolve( __dirname, "../python-setup", "check_python12.ps1" @@ -91964,8 +91777,8 @@ async function checkInstallPython311(languages, codeql) { ]).exec(); } } -function cleanupDatabaseClusterDirectory(config, logger, options = {}, rmSync2 = fs16.rmSync) { - if (fs16.existsSync(config.dbLocation) && (fs16.statSync(config.dbLocation).isFile() || fs16.readdirSync(config.dbLocation).length > 0)) { +function cleanupDatabaseClusterDirectory(config, logger, options = {}, rmSync2 = fs15.rmSync) { + if (fs15.existsSync(config.dbLocation) && (fs15.statSync(config.dbLocation).isFile() || fs15.readdirSync(config.dbLocation).length > 0)) { if (!options.disableExistingDirectoryWarning) { logger.warning( `The database cluster directory ${config.dbLocation} must be empty. Attempting to clean it up.` @@ -91999,7 +91812,7 @@ function cleanupDatabaseClusterDirectory(config, logger, options = {}, rmSync2 = // src/status-report.ts var os4 = __toESM(require("os")); -var core13 = __toESM(require_core()); +var core11 = __toESM(require_core()); function isFirstPartyAnalysis(actionName) { if (actionName !== "upload-sarif" /* UploadSarif */) { return true; @@ -92015,12 +91828,12 @@ function getActionsStatus(error2, otherFailureCause) { } function setJobStatusIfUnsuccessful(actionStatus) { if (actionStatus === "user-error") { - core13.exportVariable( + core11.exportVariable( "CODEQL_ACTION_JOB_STATUS" /* JOB_STATUS */, process.env["CODEQL_ACTION_JOB_STATUS" /* JOB_STATUS */] ?? "JOB_STATUS_CONFIGURATION_ERROR" /* ConfigErrorStatus */ ); } else if (actionStatus === "failure" || actionStatus === "aborted") { - core13.exportVariable( + core11.exportVariable( "CODEQL_ACTION_JOB_STATUS" /* JOB_STATUS */, process.env["CODEQL_ACTION_JOB_STATUS" /* JOB_STATUS */] ?? "JOB_STATUS_FAILURE" /* FailureStatus */ ); @@ -92039,14 +91852,14 @@ async function createStatusReportBase(actionName, status, actionStartedAt, confi let workflowStartedAt = process.env["CODEQL_WORKFLOW_STARTED_AT" /* WORKFLOW_STARTED_AT */]; if (workflowStartedAt === void 0) { workflowStartedAt = actionStartedAt.toISOString(); - core13.exportVariable("CODEQL_WORKFLOW_STARTED_AT" /* WORKFLOW_STARTED_AT */, workflowStartedAt); + core11.exportVariable("CODEQL_WORKFLOW_STARTED_AT" /* WORKFLOW_STARTED_AT */, workflowStartedAt); } const runnerOs = getRequiredEnvParam("RUNNER_OS"); const codeQlCliVersion = getCachedCodeQlVersion(); const actionRef = process.env["GITHUB_ACTION_REF"] || ""; const testingEnvironment = getTestingEnvironment(); if (testingEnvironment) { - core13.exportVariable("CODEQL_ACTION_TESTING_ENVIRONMENT" /* TESTING_ENVIRONMENT */, testingEnvironment); + core11.exportVariable("CODEQL_ACTION_TESTING_ENVIRONMENT" /* TESTING_ENVIRONMENT */, testingEnvironment); } const isSteadyStateDefaultSetupRun = process.env["CODE_SCANNING_IS_STEADY_STATE_DEFAULT_SETUP"] === "true"; const statusReport = { @@ -92127,9 +91940,9 @@ var INCOMPATIBLE_MSG = "CodeQL Action version is incompatible with the API endpo async function sendStatusReport(statusReport) { setJobStatusIfUnsuccessful(statusReport.status); const statusReportJSON = JSON.stringify(statusReport); - core13.debug(`Sending status report: ${statusReportJSON}`); + core11.debug(`Sending status report: ${statusReportJSON}`); if (isInTestMode()) { - core13.debug("In test mode. Status reports are not uploaded."); + core11.debug("In test mode. Status reports are not uploaded."); return; } const nwo = getRepositoryNwo(); @@ -92149,28 +91962,28 @@ async function sendStatusReport(statusReport) { switch (httpError.status) { case 403: if (getWorkflowEventName() === "push" && process.env["GITHUB_ACTOR"] === "dependabot[bot]") { - core13.warning( + core11.warning( `Workflows triggered by Dependabot on the "push" event run with read-only access. Uploading CodeQL results requires write access. To use CodeQL with Dependabot, please ensure you are using the "pull_request" event for this workflow and avoid triggering on the "push" event for Dependabot branches. See ${"https://docs.github.com/en/code-security/code-scanning/creating-an-advanced-setup-for-code-scanning/customizing-your-advanced-setup-for-code-scanning#scanning-on-push" /* SCANNING_ON_PUSH */} for more information on how to configure these events.` ); } else { - core13.warning( + core11.warning( `This run of the CodeQL Action does not have permission to access the CodeQL Action API endpoints. This could be because the Action is running on a pull request from a fork. If not, please ensure the workflow has at least the 'security-events: read' permission. Details: ${httpError.message}` ); } return; case 404: - core13.warning(httpError.message); + core11.warning(httpError.message); return; case 422: if (getRequiredEnvParam("GITHUB_SERVER_URL") !== GITHUB_DOTCOM_URL) { - core13.debug(INCOMPATIBLE_MSG); + core11.debug(INCOMPATIBLE_MSG); } else { - core13.debug(OUT_OF_DATE_MSG); + core11.debug(OUT_OF_DATE_MSG); } return; } } - core13.warning( + core11.warning( `An unexpected error occurred when sending a status report: ${getErrorMessage( e )}` @@ -92224,6 +92037,198 @@ async function createInitWithConfigStatusReport(config, initStatusReport, config }; } +// src/workflow.ts +var fs16 = __toESM(require("fs")); +var path18 = __toESM(require("path")); +var import_zlib = __toESM(require("zlib")); +var core12 = __toESM(require_core()); +function toCodedErrors(errors) { + return Object.entries(errors).reduce( + (acc, [code, message]) => { + acc[code] = { message, code }; + return acc; + }, + {} + ); +} +var WorkflowErrors = toCodedErrors({ + MissingPushHook: `Please specify an on.push hook to analyze and see code scanning alerts from the default branch on the Security tab.`, + CheckoutWrongHead: `git checkout HEAD^2 is no longer necessary. Please remove this step as Code Scanning recommends analyzing the merge commit for best results.`, + InconsistentActionVersion: `Not all workflow steps that use \`github/codeql-action\` actions use the same version. Please ensure that all such steps use the same version to avoid compatibility issues.` +}); +async function groupLanguagesByExtractor(languages, codeql) { + const resolveResult = await codeql.betterResolveLanguages(); + if (!resolveResult.aliases) { + return void 0; + } + const aliases = resolveResult.aliases; + const languagesByExtractor = {}; + for (const language of languages) { + const extractorName = aliases[language] || language; + if (!languagesByExtractor[extractorName]) { + languagesByExtractor[extractorName] = []; + } + languagesByExtractor[extractorName].push(language); + } + return languagesByExtractor; +} +async function getWorkflowErrors(doc, codeql) { + const errors = []; + const jobName = process.env.GITHUB_JOB; + if (jobName) { + const job = doc?.jobs?.[jobName]; + if (job?.strategy?.matrix?.language) { + const matrixLanguages = job.strategy.matrix.language; + if (Array.isArray(matrixLanguages)) { + const matrixLanguagesByExtractor = await groupLanguagesByExtractor( + matrixLanguages, + codeql + ); + if (matrixLanguagesByExtractor !== void 0) { + for (const [extractor, languages] of Object.entries( + matrixLanguagesByExtractor + )) { + if (languages.length > 1) { + errors.push({ + message: `CodeQL language '${extractor}' is referenced by more than one entry in the 'language' matrix parameter for job '${jobName}'. This may result in duplicate alerts. Please edit the 'language' matrix parameter to keep only one of the following: ${languages.map((language) => `'${language}'`).join(", ")}.`, + code: "DuplicateLanguageInMatrix" + }); + } + } + } + } + } + const steps = job?.steps; + if (Array.isArray(steps)) { + for (const step of steps) { + if (step?.run === "git checkout HEAD^2") { + errors.push(WorkflowErrors.CheckoutWrongHead); + break; + } + } + } + } + const codeqlStepRefs = []; + for (const job of Object.values(doc?.jobs || {})) { + if (Array.isArray(job.steps)) { + for (const step of job.steps) { + if (step.uses?.startsWith("github/codeql-action/")) { + const parts = step.uses.split("@"); + if (parts.length >= 2) { + codeqlStepRefs.push(parts[parts.length - 1]); + } + } + } + } + } + if (codeqlStepRefs.length > 0 && !codeqlStepRefs.every((ref) => ref === codeqlStepRefs[0])) { + errors.push(WorkflowErrors.InconsistentActionVersion); + } + const hasPushTrigger = hasWorkflowTrigger("push", doc); + const hasPullRequestTrigger = hasWorkflowTrigger("pull_request", doc); + const hasWorkflowCallTrigger = hasWorkflowTrigger("workflow_call", doc); + if (hasPullRequestTrigger && !hasPushTrigger && !hasWorkflowCallTrigger) { + errors.push(WorkflowErrors.MissingPushHook); + } + return errors; +} +function hasWorkflowTrigger(triggerName, doc) { + if (!doc.on) { + return false; + } + if (typeof doc.on === "string") { + return doc.on === triggerName; + } + if (Array.isArray(doc.on)) { + return doc.on.includes(triggerName); + } + return Object.prototype.hasOwnProperty.call(doc.on, triggerName); +} +async function validateWorkflow(codeql, logger) { + let workflow; + try { + workflow = await getWorkflow(logger); + } catch (e) { + return `error: getWorkflow() failed: ${String(e)}`; + } + let workflowErrors; + try { + workflowErrors = await getWorkflowErrors(workflow, codeql); + } catch (e) { + return `error: getWorkflowErrors() failed: ${String(e)}`; + } + if (workflowErrors.length > 0) { + let message; + try { + message = formatWorkflowErrors(workflowErrors); + } catch (e) { + return `error: formatWorkflowErrors() failed: ${String(e)}`; + } + core12.warning(message); + } + return formatWorkflowCause(workflowErrors); +} +function formatWorkflowErrors(errors) { + const issuesWere = errors.length === 1 ? "issue was" : "issues were"; + const errorsList = errors.map((e) => e.message).join(" "); + return `${errors.length} ${issuesWere} detected with this workflow: ${errorsList}`; +} +function formatWorkflowCause(errors) { + if (errors.length === 0) { + return void 0; + } + return errors.map((e) => e.code).join(","); +} +async function getWorkflow(logger) { + const maybeWorkflow = process.env["CODE_SCANNING_WORKFLOW_FILE"]; + if (maybeWorkflow) { + logger.debug( + "Using the workflow specified by the CODE_SCANNING_WORKFLOW_FILE environment variable." + ); + return load( + import_zlib.default.gunzipSync(Buffer.from(maybeWorkflow, "base64")).toString() + ); + } + const workflowPath = await getWorkflowAbsolutePath(logger); + return load(fs16.readFileSync(workflowPath, "utf-8")); +} +async function getWorkflowAbsolutePath(logger) { + const relativePath = await getWorkflowRelativePath(); + const absolutePath = path18.join( + getRequiredEnvParam("GITHUB_WORKSPACE"), + relativePath + ); + if (fs16.existsSync(absolutePath)) { + logger.debug( + `Derived the following absolute path for the currently executing workflow: ${absolutePath}.` + ); + return absolutePath; + } + throw new Error( + `Expected to find a code scanning workflow file at ${absolutePath}, but no such file existed. This can happen if the currently running workflow checks out a branch that doesn't contain the corresponding workflow file.` + ); +} +async function checkWorkflow(logger, codeql) { + if (!isDynamicWorkflow() && process.env["CODEQL_ACTION_SKIP_WORKFLOW_VALIDATION" /* SKIP_WORKFLOW_VALIDATION */] !== "true") { + core12.startGroup("Validating workflow"); + const validateWorkflowResult = await internal.validateWorkflow( + codeql, + logger + ); + if (validateWorkflowResult === void 0) { + logger.info("Detected no issues with the code scanning workflow."); + } else { + logger.warning( + `Unable to validate code scanning workflow: ${validateWorkflowResult}` + ); + } + core12.endGroup(); + } +} +var internal = { + validateWorkflow +}; + // src/init-action.ts async function sendStartingStatusReport(startedAt, config, logger) { const statusReportBase = await createStatusReportBase( @@ -92320,8 +92325,8 @@ async function run() { const repositoryProperties = enableRepoProps ? await loadPropertiesFromApi(gitHubVersion, logger, repositoryNwo) : {}; const jobRunUuid = v4_default(); logger.info(`Job run UUID is ${jobRunUuid}.`); - core14.exportVariable("JOB_RUN_UUID" /* JOB_RUN_UUID */, jobRunUuid); - core14.exportVariable("CODEQL_ACTION_INIT_HAS_RUN" /* INIT_ACTION_HAS_RUN */, "true"); + core13.exportVariable("JOB_RUN_UUID" /* JOB_RUN_UUID */, jobRunUuid); + core13.exportVariable("CODEQL_ACTION_INIT_HAS_RUN" /* INIT_ACTION_HAS_RUN */, "true"); const configFile = getOptionalInput("config-file"); const sourceRoot = path19.resolve( getRequiredEnvParam("GITHUB_WORKSPACE"), @@ -92375,7 +92380,7 @@ async function run() { ); } if (semver8.lt(actualVer, publicPreview)) { - core14.exportVariable("CODEQL_ENABLE_EXPERIMENTAL_FEATURES" /* EXPERIMENTAL_FEATURES */, "true"); + core13.exportVariable("CODEQL_ENABLE_EXPERIMENTAL_FEATURES" /* EXPERIMENTAL_FEATURES */, "true"); logger.info("Experimental Rust analysis enabled"); } } @@ -92395,7 +92400,7 @@ async function run() { // - The `init` Action is passed `debug: true`. // - Actions step debugging is enabled (e.g. by [enabling debug logging for a rerun](https://docs.github.com/en/actions/managing-workflow-runs/re-running-workflows-and-jobs#re-running-all-the-jobs-in-a-workflow), // or by setting the `ACTIONS_STEP_DEBUG` secret to `true`). - debugMode: getOptionalInput("debug") === "true" || core14.isDebug(), + debugMode: getOptionalInput("debug") === "true" || core13.isDebug(), debugArtifactName: getOptionalInput("debug-artifact-name") || DEFAULT_DEBUG_ARTIFACT_NAME, debugDatabaseName: getOptionalInput("debug-database-name") || DEFAULT_DEBUG_DATABASE_NAME, repository: repositoryNwo, @@ -92412,7 +92417,7 @@ async function run() { await checkInstallPython311(config.languages, codeql); } catch (unwrappedError) { const error2 = wrapError(unwrappedError); - core14.setFailed(error2.message); + core13.setFailed(error2.message); const statusReportBase = await createStatusReportBase( "init" /* Init */, error2 instanceof ConfigurationError ? "user-error" : "aborted", @@ -92472,8 +92477,8 @@ async function run() { } const goFlags = process.env["GOFLAGS"]; if (goFlags) { - core14.exportVariable("GOFLAGS", goFlags); - core14.warning( + core13.exportVariable("GOFLAGS", goFlags); + core13.warning( "Passing the GOFLAGS env parameter to the init action is deprecated. Please move this to the analyze action." ); } @@ -92497,7 +92502,7 @@ async function run() { "bin" ); fs17.mkdirSync(tempBinPath, { recursive: true }); - core14.addPath(tempBinPath); + core13.addPath(tempBinPath); const goWrapperPath = path19.resolve(tempBinPath, "go"); fs17.writeFileSync( goWrapperPath, @@ -92506,14 +92511,14 @@ async function run() { exec ${goBinaryPath} "$@"` ); fs17.chmodSync(goWrapperPath, "755"); - core14.exportVariable("CODEQL_ACTION_GO_BINARY" /* GO_BINARY_LOCATION */, goWrapperPath); + core13.exportVariable("CODEQL_ACTION_GO_BINARY" /* GO_BINARY_LOCATION */, goWrapperPath); } catch (e) { logger.warning( `Analyzing Go on Linux, but failed to install wrapper script. Tracing custom builds may fail: ${e}` ); } } else { - core14.exportVariable("CODEQL_ACTION_GO_BINARY" /* GO_BINARY_LOCATION */, goBinaryPath); + core13.exportVariable("CODEQL_ACTION_GO_BINARY" /* GO_BINARY_LOCATION */, goBinaryPath); } } catch (e) { logger.warning( @@ -92540,20 +92545,20 @@ exec ${goBinaryPath} "$@"` } } } - core14.exportVariable( + core13.exportVariable( "CODEQL_RAM", process.env["CODEQL_RAM"] || getMemoryFlagValue(getOptionalInput("ram"), logger).toString() ); - core14.exportVariable( + core13.exportVariable( "CODEQL_THREADS", process.env["CODEQL_THREADS"] || getThreadsFlagValue(getOptionalInput("threads"), logger).toString() ); if (await features.getValue("disable_kotlin_analysis_enabled" /* DisableKotlinAnalysisEnabled */)) { - core14.exportVariable("CODEQL_EXTRACTOR_JAVA_AGENT_DISABLE_KOTLIN", "true"); + core13.exportVariable("CODEQL_EXTRACTOR_JAVA_AGENT_DISABLE_KOTLIN", "true"); } const kotlinLimitVar = "CODEQL_EXTRACTOR_KOTLIN_OVERRIDE_MAXIMUM_VERSION_LIMIT"; if (await codeQlVersionAtLeast(codeql, "2.20.3") && !await codeQlVersionAtLeast(codeql, "2.20.4")) { - core14.exportVariable(kotlinLimitVar, "2.1.20"); + core13.exportVariable(kotlinLimitVar, "2.1.20"); } if (config.languages.includes("cpp" /* cpp */)) { const envVar = "CODEQL_EXTRACTOR_CPP_TRAP_CACHING"; @@ -92563,10 +92568,10 @@ exec ${goBinaryPath} "$@"` ); } else if (getTrapCachingEnabled() && await codeQlVersionAtLeast(codeql, "2.17.5")) { logger.info("Enabling CodeQL C++ TRAP caching support"); - core14.exportVariable(envVar, "true"); + core13.exportVariable(envVar, "true"); } else { logger.info("Disabling CodeQL C++ TRAP caching support"); - core14.exportVariable(envVar, "false"); + core13.exportVariable(envVar, "false"); } } const minimizeJavaJars = await features.getValue( @@ -92582,7 +92587,7 @@ exec ${goBinaryPath} "$@"` } if (await codeQlVersionAtLeast(codeql, "2.17.1")) { } else { - core14.exportVariable( + core13.exportVariable( "CODEQL_EXTRACTOR_PYTHON_DISABLE_LIBRARY_EXTRACTION", "true" ); @@ -92608,7 +92613,7 @@ exec ${goBinaryPath} "$@"` "python_default_is_to_not_extract_stdlib" /* PythonDefaultIsToNotExtractStdlib */, codeql )) { - core14.exportVariable("CODEQL_EXTRACTOR_PYTHON_EXTRACT_STDLIB", "true"); + core13.exportVariable("CODEQL_EXTRACTOR_PYTHON_EXTRACT_STDLIB", "true"); } } if (process.env["CODEQL_EXTRACTOR_JAVA_OPTION_MINIMIZE_DEPENDENCY_JARS" /* JAVA_EXTRACTOR_MINIMIZE_DEPENDENCY_JARS */]) { @@ -92616,7 +92621,7 @@ exec ${goBinaryPath} "$@"` `${"CODEQL_EXTRACTOR_JAVA_OPTION_MINIMIZE_DEPENDENCY_JARS" /* JAVA_EXTRACTOR_MINIMIZE_DEPENDENCY_JARS */} is already set to '${process.env["CODEQL_EXTRACTOR_JAVA_OPTION_MINIMIZE_DEPENDENCY_JARS" /* JAVA_EXTRACTOR_MINIMIZE_DEPENDENCY_JARS */]}', so the Action will not override it.` ); } else if (minimizeJavaJars && config.dependencyCachingEnabled && config.buildMode === "none" /* None */ && config.languages.includes("java" /* java */)) { - core14.exportVariable( + core13.exportVariable( "CODEQL_EXTRACTOR_JAVA_OPTION_MINIMIZE_DEPENDENCY_JARS" /* JAVA_EXTRACTOR_MINIMIZE_DEPENDENCY_JARS */, "true" ); @@ -92660,15 +92665,15 @@ exec ${goBinaryPath} "$@"` const tracerConfig = await getCombinedTracerConfig(codeql, config); if (tracerConfig !== void 0) { for (const [key, value] of Object.entries(tracerConfig.env)) { - core14.exportVariable(key, value); + core13.exportVariable(key, value); } } flushDiagnostics(config); - core14.setOutput("codeql-path", config.codeQLCmd); - core14.setOutput("codeql-version", (await codeql.getVersion()).version); + core13.setOutput("codeql-path", config.codeQLCmd); + core13.setOutput("codeql-version", (await codeql.getVersion()).version); } catch (unwrappedError) { const error2 = wrapError(unwrappedError); - core14.setFailed(error2.message); + core13.setFailed(error2.message); await sendCompletedStatusReport( startedAt, config, @@ -92731,7 +92736,7 @@ async function runWrapper() { try { await run(); } catch (error2) { - core14.setFailed(`init action failed: ${getErrorMessage(error2)}`); + core13.setFailed(`init action failed: ${getErrorMessage(error2)}`); } await checkForTimeout(); } diff --git a/lib/setup-codeql-action.js b/lib/setup-codeql-action.js index 7fefd35163..37e3f6121a 100644 --- a/lib/setup-codeql-action.js +++ b/lib/setup-codeql-action.js @@ -19743,10 +19743,10 @@ Support boolean input list: \`true | True | TRUE | false | False | FALSE\``); (0, command_1.issueCommand)("error", (0, utils_1.toCommandProperties)(properties), message instanceof Error ? message.toString() : message); } exports2.error = error2; - function warning7(message, properties = {}) { + function warning6(message, properties = {}) { (0, command_1.issueCommand)("warning", (0, utils_1.toCommandProperties)(properties), message instanceof Error ? message.toString() : message); } - exports2.warning = warning7; + exports2.warning = warning6; function notice(message, properties = {}) { (0, command_1.issueCommand)("notice", (0, utils_1.toCommandProperties)(properties), message instanceof Error ? message.toString() : message); } @@ -19755,22 +19755,22 @@ Support boolean input list: \`true | True | TRUE | false | False | FALSE\``); process.stdout.write(message + os3.EOL); } exports2.info = info4; - function startGroup4(name) { + function startGroup3(name) { (0, command_1.issue)("group", name); } - exports2.startGroup = startGroup4; - function endGroup4() { + exports2.startGroup = startGroup3; + function endGroup3() { (0, command_1.issue)("endgroup"); } - exports2.endGroup = endGroup4; + exports2.endGroup = endGroup3; function group(name, fn) { return __awaiter4(this, void 0, void 0, function* () { - startGroup4(name); + startGroup3(name); let result; try { result = yield fn(); } finally { - endGroup4(); + endGroup3(); } return result; }); @@ -34015,7 +34015,7 @@ var require_internal_glob_options_helper = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.getOptions = void 0; - var core15 = __importStar4(require_core()); + var core13 = __importStar4(require_core()); function getOptions(copy) { const result = { followSymbolicLinks: true, @@ -34025,15 +34025,15 @@ var require_internal_glob_options_helper = __commonJS({ if (copy) { if (typeof copy.followSymbolicLinks === "boolean") { result.followSymbolicLinks = copy.followSymbolicLinks; - core15.debug(`followSymbolicLinks '${result.followSymbolicLinks}'`); + core13.debug(`followSymbolicLinks '${result.followSymbolicLinks}'`); } if (typeof copy.implicitDescendants === "boolean") { result.implicitDescendants = copy.implicitDescendants; - core15.debug(`implicitDescendants '${result.implicitDescendants}'`); + core13.debug(`implicitDescendants '${result.implicitDescendants}'`); } if (typeof copy.omitBrokenSymbolicLinks === "boolean") { result.omitBrokenSymbolicLinks = copy.omitBrokenSymbolicLinks; - core15.debug(`omitBrokenSymbolicLinks '${result.omitBrokenSymbolicLinks}'`); + core13.debug(`omitBrokenSymbolicLinks '${result.omitBrokenSymbolicLinks}'`); } } return result; @@ -35487,7 +35487,7 @@ var require_internal_globber = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.DefaultGlobber = void 0; - var core15 = __importStar4(require_core()); + var core13 = __importStar4(require_core()); var fs11 = __importStar4(require("fs")); var globOptionsHelper = __importStar4(require_internal_glob_options_helper()); var path12 = __importStar4(require("path")); @@ -35538,7 +35538,7 @@ var require_internal_globber = __commonJS({ } const stack = []; for (const searchPath of patternHelper.getSearchPaths(patterns)) { - core15.debug(`Search path '${searchPath}'`); + core13.debug(`Search path '${searchPath}'`); try { yield __await4(fs11.promises.lstat(searchPath)); } catch (err) { @@ -35610,7 +35610,7 @@ var require_internal_globber = __commonJS({ } catch (err) { if (err.code === "ENOENT") { if (options.omitBrokenSymbolicLinks) { - core15.debug(`Broken symlink '${item.path}'`); + core13.debug(`Broken symlink '${item.path}'`); return void 0; } throw new Error(`No information found for the path '${item.path}'. This may indicate a broken symbolic link.`); @@ -35626,7 +35626,7 @@ var require_internal_globber = __commonJS({ traversalChain.pop(); } if (traversalChain.some((x) => x === realPath)) { - core15.debug(`Symlink cycle detected for path '${item.path}' and realpath '${realPath}'`); + core13.debug(`Symlink cycle detected for path '${item.path}' and realpath '${realPath}'`); return void 0; } traversalChain.push(realPath); @@ -36950,7 +36950,7 @@ var require_cacheUtils = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.getRuntimeToken = exports2.getCacheVersion = exports2.assertDefined = exports2.getGnuTarPathOnWindows = exports2.getCacheFileName = exports2.getCompressionMethod = exports2.unlinkFile = exports2.resolvePaths = exports2.getArchiveFileSizeInBytes = exports2.createTempDirectory = void 0; - var core15 = __importStar4(require_core()); + var core13 = __importStar4(require_core()); var exec2 = __importStar4(require_exec()); var glob = __importStar4(require_glob()); var io6 = __importStar4(require_io()); @@ -37003,7 +37003,7 @@ var require_cacheUtils = __commonJS({ _e = false; const file = _c; const relativeFile = path12.relative(workspace, file).replace(new RegExp(`\\${path12.sep}`, "g"), "/"); - core15.debug(`Matched: ${relativeFile}`); + core13.debug(`Matched: ${relativeFile}`); if (relativeFile === "") { paths.push("."); } else { @@ -37033,7 +37033,7 @@ var require_cacheUtils = __commonJS({ return __awaiter4(this, void 0, void 0, function* () { let versionOutput = ""; additionalArgs.push("--version"); - core15.debug(`Checking ${app} ${additionalArgs.join(" ")}`); + core13.debug(`Checking ${app} ${additionalArgs.join(" ")}`); try { yield exec2.exec(`${app}`, additionalArgs, { ignoreReturnCode: true, @@ -37044,10 +37044,10 @@ var require_cacheUtils = __commonJS({ } }); } catch (err) { - core15.debug(err.message); + core13.debug(err.message); } versionOutput = versionOutput.trim(); - core15.debug(versionOutput); + core13.debug(versionOutput); return versionOutput; }); } @@ -37055,7 +37055,7 @@ var require_cacheUtils = __commonJS({ return __awaiter4(this, void 0, void 0, function* () { const versionOutput = yield getVersion("zstd", ["--quiet"]); const version = semver8.clean(versionOutput); - core15.debug(`zstd version: ${version}`); + core13.debug(`zstd version: ${version}`); if (versionOutput === "") { return constants_1.CompressionMethod.Gzip; } else { @@ -72367,7 +72367,7 @@ var require_uploadUtils = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.uploadCacheArchiveSDK = exports2.UploadProgress = void 0; - var core15 = __importStar4(require_core()); + var core13 = __importStar4(require_core()); var storage_blob_1 = require_dist7(); var errors_1 = require_errors2(); var UploadProgress = class { @@ -72409,7 +72409,7 @@ var require_uploadUtils = __commonJS({ const percentage = (100 * (transferredBytes / this.contentLength)).toFixed(1); const elapsedTime = Date.now() - this.startTime; const uploadSpeed = (transferredBytes / (1024 * 1024) / (elapsedTime / 1e3)).toFixed(1); - core15.info(`Sent ${transferredBytes} of ${this.contentLength} (${percentage}%), ${uploadSpeed} MBs/sec`); + core13.info(`Sent ${transferredBytes} of ${this.contentLength} (${percentage}%), ${uploadSpeed} MBs/sec`); if (this.isDone()) { this.displayedComplete = true; } @@ -72464,14 +72464,14 @@ var require_uploadUtils = __commonJS({ }; try { uploadProgress.startDisplayTimer(); - core15.debug(`BlobClient: ${blobClient.name}:${blobClient.accountName}:${blobClient.containerName}`); + core13.debug(`BlobClient: ${blobClient.name}:${blobClient.accountName}:${blobClient.containerName}`); const response = yield blockBlobClient.uploadFile(archivePath, uploadOptions); if (response._response.status >= 400) { throw new errors_1.InvalidResponseError(`uploadCacheArchiveSDK: upload failed with status code ${response._response.status}`); } return response; } catch (error2) { - core15.warning(`uploadCacheArchiveSDK: internal error uploading cache archive: ${error2.message}`); + core13.warning(`uploadCacheArchiveSDK: internal error uploading cache archive: ${error2.message}`); throw error2; } finally { uploadProgress.stopDisplayTimer(); @@ -72542,7 +72542,7 @@ var require_requestUtils = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.retryHttpClientResponse = exports2.retryTypedResponse = exports2.retry = exports2.isRetryableStatusCode = exports2.isServerErrorStatusCode = exports2.isSuccessStatusCode = void 0; - var core15 = __importStar4(require_core()); + var core13 = __importStar4(require_core()); var http_client_1 = require_lib(); var constants_1 = require_constants10(); function isSuccessStatusCode(statusCode) { @@ -72603,9 +72603,9 @@ var require_requestUtils = __commonJS({ isRetryable = isRetryableStatusCode(statusCode); errorMessage = `Cache service responded with ${statusCode}`; } - core15.debug(`${name} - Attempt ${attempt} of ${maxAttempts} failed with error: ${errorMessage}`); + core13.debug(`${name} - Attempt ${attempt} of ${maxAttempts} failed with error: ${errorMessage}`); if (!isRetryable) { - core15.debug(`${name} - Error is not retryable`); + core13.debug(`${name} - Error is not retryable`); break; } yield sleep(delay2); @@ -72710,7 +72710,7 @@ var require_downloadUtils = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.downloadCacheStorageSDK = exports2.downloadCacheHttpClientConcurrent = exports2.downloadCacheHttpClient = exports2.DownloadProgress = void 0; - var core15 = __importStar4(require_core()); + var core13 = __importStar4(require_core()); var http_client_1 = require_lib(); var storage_blob_1 = require_dist7(); var buffer = __importStar4(require("buffer")); @@ -72748,7 +72748,7 @@ var require_downloadUtils = __commonJS({ this.segmentIndex = this.segmentIndex + 1; this.segmentSize = segmentSize; this.receivedBytes = 0; - core15.debug(`Downloading segment at offset ${this.segmentOffset} with length ${this.segmentSize}...`); + core13.debug(`Downloading segment at offset ${this.segmentOffset} with length ${this.segmentSize}...`); } /** * Sets the number of bytes received for the current segment. @@ -72782,7 +72782,7 @@ var require_downloadUtils = __commonJS({ const percentage = (100 * (transferredBytes / this.contentLength)).toFixed(1); const elapsedTime = Date.now() - this.startTime; const downloadSpeed = (transferredBytes / (1024 * 1024) / (elapsedTime / 1e3)).toFixed(1); - core15.info(`Received ${transferredBytes} of ${this.contentLength} (${percentage}%), ${downloadSpeed} MBs/sec`); + core13.info(`Received ${transferredBytes} of ${this.contentLength} (${percentage}%), ${downloadSpeed} MBs/sec`); if (this.isDone()) { this.displayedComplete = true; } @@ -72832,7 +72832,7 @@ var require_downloadUtils = __commonJS({ })); downloadResponse.message.socket.setTimeout(constants_1.SocketTimeout, () => { downloadResponse.message.destroy(); - core15.debug(`Aborting download, socket timed out after ${constants_1.SocketTimeout} ms`); + core13.debug(`Aborting download, socket timed out after ${constants_1.SocketTimeout} ms`); }); yield pipeResponseToStream(downloadResponse, writeStream); const contentLengthHeader = downloadResponse.message.headers["content-length"]; @@ -72843,7 +72843,7 @@ var require_downloadUtils = __commonJS({ throw new Error(`Incomplete download. Expected file size: ${expectedLength}, actual file size: ${actualLength}`); } } else { - core15.debug("Unable to validate download, no Content-Length header"); + core13.debug("Unable to validate download, no Content-Length header"); } }); } @@ -72963,7 +72963,7 @@ var require_downloadUtils = __commonJS({ const properties = yield client.getProperties(); const contentLength = (_a = properties.contentLength) !== null && _a !== void 0 ? _a : -1; if (contentLength < 0) { - core15.debug("Unable to determine content length, downloading file with http-client..."); + core13.debug("Unable to determine content length, downloading file with http-client..."); yield downloadCacheHttpClient(archiveLocation, archivePath); } else { const maxSegmentSize = Math.min(134217728, buffer.constants.MAX_LENGTH); @@ -73043,7 +73043,7 @@ var require_options = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.getDownloadOptions = exports2.getUploadOptions = void 0; - var core15 = __importStar4(require_core()); + var core13 = __importStar4(require_core()); function getUploadOptions(copy) { const result = { useAzureSdk: false, @@ -73063,9 +73063,9 @@ var require_options = __commonJS({ } result.uploadConcurrency = !isNaN(Number(process.env["CACHE_UPLOAD_CONCURRENCY"])) ? Math.min(32, Number(process.env["CACHE_UPLOAD_CONCURRENCY"])) : result.uploadConcurrency; result.uploadChunkSize = !isNaN(Number(process.env["CACHE_UPLOAD_CHUNK_SIZE"])) ? Math.min(128 * 1024 * 1024, Number(process.env["CACHE_UPLOAD_CHUNK_SIZE"]) * 1024 * 1024) : result.uploadChunkSize; - core15.debug(`Use Azure SDK: ${result.useAzureSdk}`); - core15.debug(`Upload concurrency: ${result.uploadConcurrency}`); - core15.debug(`Upload chunk size: ${result.uploadChunkSize}`); + core13.debug(`Use Azure SDK: ${result.useAzureSdk}`); + core13.debug(`Upload concurrency: ${result.uploadConcurrency}`); + core13.debug(`Upload chunk size: ${result.uploadChunkSize}`); return result; } exports2.getUploadOptions = getUploadOptions; @@ -73102,12 +73102,12 @@ var require_options = __commonJS({ if (segmentDownloadTimeoutMins && !isNaN(Number(segmentDownloadTimeoutMins)) && isFinite(Number(segmentDownloadTimeoutMins))) { result.segmentTimeoutInMs = Number(segmentDownloadTimeoutMins) * 60 * 1e3; } - core15.debug(`Use Azure SDK: ${result.useAzureSdk}`); - core15.debug(`Download concurrency: ${result.downloadConcurrency}`); - core15.debug(`Request timeout (ms): ${result.timeoutInMs}`); - core15.debug(`Cache segment download timeout mins env var: ${process.env["SEGMENT_DOWNLOAD_TIMEOUT_MINS"]}`); - core15.debug(`Segment download timeout (ms): ${result.segmentTimeoutInMs}`); - core15.debug(`Lookup only: ${result.lookupOnly}`); + core13.debug(`Use Azure SDK: ${result.useAzureSdk}`); + core13.debug(`Download concurrency: ${result.downloadConcurrency}`); + core13.debug(`Request timeout (ms): ${result.timeoutInMs}`); + core13.debug(`Cache segment download timeout mins env var: ${process.env["SEGMENT_DOWNLOAD_TIMEOUT_MINS"]}`); + core13.debug(`Segment download timeout (ms): ${result.segmentTimeoutInMs}`); + core13.debug(`Lookup only: ${result.lookupOnly}`); return result; } exports2.getDownloadOptions = getDownloadOptions; @@ -73287,7 +73287,7 @@ var require_cacheHttpClient = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.saveCache = exports2.reserveCache = exports2.downloadCache = exports2.getCacheEntry = void 0; - var core15 = __importStar4(require_core()); + var core13 = __importStar4(require_core()); var http_client_1 = require_lib(); var auth_1 = require_auth(); var fs11 = __importStar4(require("fs")); @@ -73305,7 +73305,7 @@ var require_cacheHttpClient = __commonJS({ throw new Error("Cache Service Url not found, unable to restore cache."); } const url = `${baseUrl}_apis/artifactcache/${resource}`; - core15.debug(`Resource Url: ${url}`); + core13.debug(`Resource Url: ${url}`); return url; } function createAcceptHeader(type2, apiVersion) { @@ -73333,7 +73333,7 @@ var require_cacheHttpClient = __commonJS({ return httpClient.getJson(getCacheApiUrl(resource)); })); if (response.statusCode === 204) { - if (core15.isDebug()) { + if (core13.isDebug()) { yield printCachesListForDiagnostics(keys[0], httpClient, version); } return null; @@ -73346,9 +73346,9 @@ var require_cacheHttpClient = __commonJS({ if (!cacheDownloadUrl) { throw new Error("Cache not found."); } - core15.setSecret(cacheDownloadUrl); - core15.debug(`Cache Result:`); - core15.debug(JSON.stringify(cacheResult)); + core13.setSecret(cacheDownloadUrl); + core13.debug(`Cache Result:`); + core13.debug(JSON.stringify(cacheResult)); return cacheResult; }); } @@ -73363,10 +73363,10 @@ var require_cacheHttpClient = __commonJS({ const cacheListResult = response.result; const totalCount = cacheListResult === null || cacheListResult === void 0 ? void 0 : cacheListResult.totalCount; if (totalCount && totalCount > 0) { - core15.debug(`No matching cache found for cache key '${key}', version '${version} and scope ${process.env["GITHUB_REF"]}. There exist one or more cache(s) with similar key but they have different version or scope. See more info on cache matching here: https://docs.github.com/en/actions/using-workflows/caching-dependencies-to-speed-up-workflows#matching-a-cache-key + core13.debug(`No matching cache found for cache key '${key}', version '${version} and scope ${process.env["GITHUB_REF"]}. There exist one or more cache(s) with similar key but they have different version or scope. See more info on cache matching here: https://docs.github.com/en/actions/using-workflows/caching-dependencies-to-speed-up-workflows#matching-a-cache-key Other caches with similar key:`); for (const cacheEntry of (cacheListResult === null || cacheListResult === void 0 ? void 0 : cacheListResult.artifactCaches) || []) { - core15.debug(`Cache Key: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.cacheKey}, Cache Version: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.cacheVersion}, Cache Scope: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.scope}, Cache Created: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.creationTime}`); + core13.debug(`Cache Key: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.cacheKey}, Cache Version: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.cacheVersion}, Cache Scope: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.scope}, Cache Created: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.creationTime}`); } } } @@ -73411,7 +73411,7 @@ Other caches with similar key:`); } function uploadChunk(httpClient, resourceUrl, openStream, start, end) { return __awaiter4(this, void 0, void 0, function* () { - core15.debug(`Uploading chunk of size ${end - start + 1} bytes at offset ${start} with content range: ${getContentRange(start, end)}`); + core13.debug(`Uploading chunk of size ${end - start + 1} bytes at offset ${start} with content range: ${getContentRange(start, end)}`); const additionalHeaders = { "Content-Type": "application/octet-stream", "Content-Range": getContentRange(start, end) @@ -73433,7 +73433,7 @@ Other caches with similar key:`); const concurrency = utils.assertDefined("uploadConcurrency", uploadOptions.uploadConcurrency); const maxChunkSize = utils.assertDefined("uploadChunkSize", uploadOptions.uploadChunkSize); const parallelUploads = [...new Array(concurrency).keys()]; - core15.debug("Awaiting all uploads"); + core13.debug("Awaiting all uploads"); let offset = 0; try { yield Promise.all(parallelUploads.map(() => __awaiter4(this, void 0, void 0, function* () { @@ -73476,16 +73476,16 @@ Other caches with similar key:`); yield (0, uploadUtils_1.uploadCacheArchiveSDK)(signedUploadURL, archivePath, options); } else { const httpClient = createHttpClient(); - core15.debug("Upload cache"); + core13.debug("Upload cache"); yield uploadFile(httpClient, cacheId, archivePath, options); - core15.debug("Commiting cache"); + core13.debug("Commiting cache"); const cacheSize = utils.getArchiveFileSizeInBytes(archivePath); - core15.info(`Cache Size: ~${Math.round(cacheSize / (1024 * 1024))} MB (${cacheSize} B)`); + core13.info(`Cache Size: ~${Math.round(cacheSize / (1024 * 1024))} MB (${cacheSize} B)`); const commitCacheResponse = yield commitCache(httpClient, cacheId, cacheSize); if (!(0, requestUtils_1.isSuccessStatusCode)(commitCacheResponse.statusCode)) { throw new Error(`Cache service responded with ${commitCacheResponse.statusCode} during commit cache.`); } - core15.info("Cache saved successfully"); + core13.info("Cache saved successfully"); } }); } @@ -78937,7 +78937,7 @@ var require_cache3 = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.saveCache = exports2.restoreCache = exports2.isFeatureAvailable = exports2.FinalizeCacheError = exports2.ReserveCacheError = exports2.ValidationError = void 0; - var core15 = __importStar4(require_core()); + var core13 = __importStar4(require_core()); var path12 = __importStar4(require("path")); var utils = __importStar4(require_cacheUtils()); var cacheHttpClient = __importStar4(require_cacheHttpClient()); @@ -78997,7 +78997,7 @@ var require_cache3 = __commonJS({ function restoreCache3(paths, primaryKey, restoreKeys, options, enableCrossOsArchive = false) { return __awaiter4(this, void 0, void 0, function* () { const cacheServiceVersion = (0, config_1.getCacheServiceVersion)(); - core15.debug(`Cache service version: ${cacheServiceVersion}`); + core13.debug(`Cache service version: ${cacheServiceVersion}`); checkPaths(paths); switch (cacheServiceVersion) { case "v2": @@ -79013,8 +79013,8 @@ var require_cache3 = __commonJS({ return __awaiter4(this, void 0, void 0, function* () { restoreKeys = restoreKeys || []; const keys = [primaryKey, ...restoreKeys]; - core15.debug("Resolved Keys:"); - core15.debug(JSON.stringify(keys)); + core13.debug("Resolved Keys:"); + core13.debug(JSON.stringify(keys)); if (keys.length > 10) { throw new ValidationError(`Key Validation Error: Keys are limited to a maximum of 10.`); } @@ -79032,19 +79032,19 @@ var require_cache3 = __commonJS({ return void 0; } if (options === null || options === void 0 ? void 0 : options.lookupOnly) { - core15.info("Lookup only - skipping download"); + core13.info("Lookup only - skipping download"); return cacheEntry.cacheKey; } archivePath = path12.join(yield utils.createTempDirectory(), utils.getCacheFileName(compressionMethod)); - core15.debug(`Archive Path: ${archivePath}`); + core13.debug(`Archive Path: ${archivePath}`); yield cacheHttpClient.downloadCache(cacheEntry.archiveLocation, archivePath, options); - if (core15.isDebug()) { + if (core13.isDebug()) { yield (0, tar_1.listTar)(archivePath, compressionMethod); } const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath); - core15.info(`Cache Size: ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B)`); + core13.info(`Cache Size: ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B)`); yield (0, tar_1.extractTar)(archivePath, compressionMethod); - core15.info("Cache restored successfully"); + core13.info("Cache restored successfully"); return cacheEntry.cacheKey; } catch (error2) { const typedError = error2; @@ -79052,16 +79052,16 @@ var require_cache3 = __commonJS({ throw error2; } else { if (typedError instanceof http_client_1.HttpClientError && typeof typedError.statusCode === "number" && typedError.statusCode >= 500) { - core15.error(`Failed to restore: ${error2.message}`); + core13.error(`Failed to restore: ${error2.message}`); } else { - core15.warning(`Failed to restore: ${error2.message}`); + core13.warning(`Failed to restore: ${error2.message}`); } } } finally { try { yield utils.unlinkFile(archivePath); } catch (error2) { - core15.debug(`Failed to delete archive: ${error2}`); + core13.debug(`Failed to delete archive: ${error2}`); } } return void 0; @@ -79072,8 +79072,8 @@ var require_cache3 = __commonJS({ options = Object.assign(Object.assign({}, options), { useAzureSdk: true }); restoreKeys = restoreKeys || []; const keys = [primaryKey, ...restoreKeys]; - core15.debug("Resolved Keys:"); - core15.debug(JSON.stringify(keys)); + core13.debug("Resolved Keys:"); + core13.debug(JSON.stringify(keys)); if (keys.length > 10) { throw new ValidationError(`Key Validation Error: Keys are limited to a maximum of 10.`); } @@ -79091,30 +79091,30 @@ var require_cache3 = __commonJS({ }; const response = yield twirpClient.GetCacheEntryDownloadURL(request); if (!response.ok) { - core15.debug(`Cache not found for version ${request.version} of keys: ${keys.join(", ")}`); + core13.debug(`Cache not found for version ${request.version} of keys: ${keys.join(", ")}`); return void 0; } const isRestoreKeyMatch = request.key !== response.matchedKey; if (isRestoreKeyMatch) { - core15.info(`Cache hit for restore-key: ${response.matchedKey}`); + core13.info(`Cache hit for restore-key: ${response.matchedKey}`); } else { - core15.info(`Cache hit for: ${response.matchedKey}`); + core13.info(`Cache hit for: ${response.matchedKey}`); } if (options === null || options === void 0 ? void 0 : options.lookupOnly) { - core15.info("Lookup only - skipping download"); + core13.info("Lookup only - skipping download"); return response.matchedKey; } archivePath = path12.join(yield utils.createTempDirectory(), utils.getCacheFileName(compressionMethod)); - core15.debug(`Archive path: ${archivePath}`); - core15.debug(`Starting download of archive to: ${archivePath}`); + core13.debug(`Archive path: ${archivePath}`); + core13.debug(`Starting download of archive to: ${archivePath}`); yield cacheHttpClient.downloadCache(response.signedDownloadUrl, archivePath, options); const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath); - core15.info(`Cache Size: ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B)`); - if (core15.isDebug()) { + core13.info(`Cache Size: ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B)`); + if (core13.isDebug()) { yield (0, tar_1.listTar)(archivePath, compressionMethod); } yield (0, tar_1.extractTar)(archivePath, compressionMethod); - core15.info("Cache restored successfully"); + core13.info("Cache restored successfully"); return response.matchedKey; } catch (error2) { const typedError = error2; @@ -79122,9 +79122,9 @@ var require_cache3 = __commonJS({ throw error2; } else { if (typedError instanceof http_client_1.HttpClientError && typeof typedError.statusCode === "number" && typedError.statusCode >= 500) { - core15.error(`Failed to restore: ${error2.message}`); + core13.error(`Failed to restore: ${error2.message}`); } else { - core15.warning(`Failed to restore: ${error2.message}`); + core13.warning(`Failed to restore: ${error2.message}`); } } } finally { @@ -79133,7 +79133,7 @@ var require_cache3 = __commonJS({ yield utils.unlinkFile(archivePath); } } catch (error2) { - core15.debug(`Failed to delete archive: ${error2}`); + core13.debug(`Failed to delete archive: ${error2}`); } } return void 0; @@ -79142,7 +79142,7 @@ var require_cache3 = __commonJS({ function saveCache3(paths, key, options, enableCrossOsArchive = false) { return __awaiter4(this, void 0, void 0, function* () { const cacheServiceVersion = (0, config_1.getCacheServiceVersion)(); - core15.debug(`Cache service version: ${cacheServiceVersion}`); + core13.debug(`Cache service version: ${cacheServiceVersion}`); checkPaths(paths); checkKey(key); switch (cacheServiceVersion) { @@ -79161,26 +79161,26 @@ var require_cache3 = __commonJS({ const compressionMethod = yield utils.getCompressionMethod(); let cacheId = -1; const cachePaths = yield utils.resolvePaths(paths); - core15.debug("Cache Paths:"); - core15.debug(`${JSON.stringify(cachePaths)}`); + core13.debug("Cache Paths:"); + core13.debug(`${JSON.stringify(cachePaths)}`); if (cachePaths.length === 0) { throw new Error(`Path Validation Error: Path(s) specified in the action for caching do(es) not exist, hence no cache is being saved.`); } const archiveFolder = yield utils.createTempDirectory(); const archivePath = path12.join(archiveFolder, utils.getCacheFileName(compressionMethod)); - core15.debug(`Archive Path: ${archivePath}`); + core13.debug(`Archive Path: ${archivePath}`); try { yield (0, tar_1.createTar)(archiveFolder, cachePaths, compressionMethod); - if (core15.isDebug()) { + if (core13.isDebug()) { yield (0, tar_1.listTar)(archivePath, compressionMethod); } const fileSizeLimit = 10 * 1024 * 1024 * 1024; const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath); - core15.debug(`File Size: ${archiveFileSize}`); + core13.debug(`File Size: ${archiveFileSize}`); if (archiveFileSize > fileSizeLimit && !(0, config_1.isGhes)()) { throw new Error(`Cache size of ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B) is over the 10GB limit, not saving cache.`); } - core15.debug("Reserving Cache"); + core13.debug("Reserving Cache"); const reserveCacheResponse = yield cacheHttpClient.reserveCache(key, paths, { compressionMethod, enableCrossOsArchive, @@ -79193,26 +79193,26 @@ var require_cache3 = __commonJS({ } else { throw new ReserveCacheError(`Unable to reserve cache with key ${key}, another job may be creating this cache. More details: ${(_e = reserveCacheResponse === null || reserveCacheResponse === void 0 ? void 0 : reserveCacheResponse.error) === null || _e === void 0 ? void 0 : _e.message}`); } - core15.debug(`Saving Cache (ID: ${cacheId})`); + core13.debug(`Saving Cache (ID: ${cacheId})`); yield cacheHttpClient.saveCache(cacheId, archivePath, "", options); } catch (error2) { const typedError = error2; if (typedError.name === ValidationError.name) { throw error2; } else if (typedError.name === ReserveCacheError.name) { - core15.info(`Failed to save: ${typedError.message}`); + core13.info(`Failed to save: ${typedError.message}`); } else { if (typedError instanceof http_client_1.HttpClientError && typeof typedError.statusCode === "number" && typedError.statusCode >= 500) { - core15.error(`Failed to save: ${typedError.message}`); + core13.error(`Failed to save: ${typedError.message}`); } else { - core15.warning(`Failed to save: ${typedError.message}`); + core13.warning(`Failed to save: ${typedError.message}`); } } } finally { try { yield utils.unlinkFile(archivePath); } catch (error2) { - core15.debug(`Failed to delete archive: ${error2}`); + core13.debug(`Failed to delete archive: ${error2}`); } } return cacheId; @@ -79225,23 +79225,23 @@ var require_cache3 = __commonJS({ const twirpClient = cacheTwirpClient.internalCacheTwirpClient(); let cacheId = -1; const cachePaths = yield utils.resolvePaths(paths); - core15.debug("Cache Paths:"); - core15.debug(`${JSON.stringify(cachePaths)}`); + core13.debug("Cache Paths:"); + core13.debug(`${JSON.stringify(cachePaths)}`); if (cachePaths.length === 0) { throw new Error(`Path Validation Error: Path(s) specified in the action for caching do(es) not exist, hence no cache is being saved.`); } const archiveFolder = yield utils.createTempDirectory(); const archivePath = path12.join(archiveFolder, utils.getCacheFileName(compressionMethod)); - core15.debug(`Archive Path: ${archivePath}`); + core13.debug(`Archive Path: ${archivePath}`); try { yield (0, tar_1.createTar)(archiveFolder, cachePaths, compressionMethod); - if (core15.isDebug()) { + if (core13.isDebug()) { yield (0, tar_1.listTar)(archivePath, compressionMethod); } const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath); - core15.debug(`File Size: ${archiveFileSize}`); + core13.debug(`File Size: ${archiveFileSize}`); options.archiveSizeBytes = archiveFileSize; - core15.debug("Reserving Cache"); + core13.debug("Reserving Cache"); const version = utils.getCacheVersion(paths, compressionMethod, enableCrossOsArchive); const request = { key, @@ -79252,16 +79252,16 @@ var require_cache3 = __commonJS({ const response = yield twirpClient.CreateCacheEntry(request); if (!response.ok) { if (response.message) { - core15.warning(`Cache reservation failed: ${response.message}`); + core13.warning(`Cache reservation failed: ${response.message}`); } throw new Error(response.message || "Response was not ok"); } signedUploadUrl = response.signedUploadUrl; } catch (error2) { - core15.debug(`Failed to reserve cache: ${error2}`); + core13.debug(`Failed to reserve cache: ${error2}`); throw new ReserveCacheError(`Unable to reserve cache with key ${key}, another job may be creating this cache.`); } - core15.debug(`Attempting to upload cache located at: ${archivePath}`); + core13.debug(`Attempting to upload cache located at: ${archivePath}`); yield cacheHttpClient.saveCache(cacheId, archivePath, signedUploadUrl, options); const finalizeRequest = { key, @@ -79269,7 +79269,7 @@ var require_cache3 = __commonJS({ sizeBytes: `${archiveFileSize}` }; const finalizeResponse = yield twirpClient.FinalizeCacheEntryUpload(finalizeRequest); - core15.debug(`FinalizeCacheEntryUploadResponse: ${finalizeResponse.ok}`); + core13.debug(`FinalizeCacheEntryUploadResponse: ${finalizeResponse.ok}`); if (!finalizeResponse.ok) { if (finalizeResponse.message) { throw new FinalizeCacheError(finalizeResponse.message); @@ -79282,21 +79282,21 @@ var require_cache3 = __commonJS({ if (typedError.name === ValidationError.name) { throw error2; } else if (typedError.name === ReserveCacheError.name) { - core15.info(`Failed to save: ${typedError.message}`); + core13.info(`Failed to save: ${typedError.message}`); } else if (typedError.name === FinalizeCacheError.name) { - core15.warning(typedError.message); + core13.warning(typedError.message); } else { if (typedError instanceof http_client_1.HttpClientError && typeof typedError.statusCode === "number" && typedError.statusCode >= 500) { - core15.error(`Failed to save: ${typedError.message}`); + core13.error(`Failed to save: ${typedError.message}`); } else { - core15.warning(`Failed to save: ${typedError.message}`); + core13.warning(`Failed to save: ${typedError.message}`); } } } finally { try { yield utils.unlinkFile(archivePath); } catch (error2) { - core15.debug(`Failed to delete archive: ${error2}`); + core13.debug(`Failed to delete archive: ${error2}`); } } return cacheId; @@ -80801,7 +80801,7 @@ var require_retry_helper = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.RetryHelper = void 0; - var core15 = __importStar4(require_core()); + var core13 = __importStar4(require_core()); var RetryHelper = class { constructor(maxAttempts, minSeconds, maxSeconds) { if (maxAttempts < 1) { @@ -80824,10 +80824,10 @@ var require_retry_helper = __commonJS({ if (isRetryable && !isRetryable(err)) { throw err; } - core15.info(err.message); + core13.info(err.message); } const seconds = this.getSleepAmount(); - core15.info(`Waiting ${seconds} seconds before trying again`); + core13.info(`Waiting ${seconds} seconds before trying again`); yield this.sleep(seconds); attempt++; } @@ -80907,7 +80907,7 @@ var require_tool_cache = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.evaluateVersions = exports2.isExplicitVersion = exports2.findFromManifest = exports2.getManifestFromRepo = exports2.findAllVersions = exports2.find = exports2.cacheFile = exports2.cacheDir = exports2.extractZip = exports2.extractXar = exports2.extractTar = exports2.extract7z = exports2.downloadTool = exports2.HTTPError = void 0; - var core15 = __importStar4(require_core()); + var core13 = __importStar4(require_core()); var io6 = __importStar4(require_io()); var crypto = __importStar4(require("crypto")); var fs11 = __importStar4(require("fs")); @@ -80936,8 +80936,8 @@ var require_tool_cache = __commonJS({ return __awaiter4(this, void 0, void 0, function* () { dest = dest || path12.join(_getTempDirectory(), crypto.randomUUID()); yield io6.mkdirP(path12.dirname(dest)); - core15.debug(`Downloading ${url}`); - core15.debug(`Destination ${dest}`); + core13.debug(`Downloading ${url}`); + core13.debug(`Destination ${dest}`); const maxAttempts = 3; const minSeconds = _getGlobal("TEST_DOWNLOAD_TOOL_RETRY_MIN_SECONDS", 10); const maxSeconds = _getGlobal("TEST_DOWNLOAD_TOOL_RETRY_MAX_SECONDS", 20); @@ -80964,7 +80964,7 @@ var require_tool_cache = __commonJS({ allowRetries: false }); if (auth) { - core15.debug("set auth"); + core13.debug("set auth"); if (headers === void 0) { headers = {}; } @@ -80973,7 +80973,7 @@ var require_tool_cache = __commonJS({ const response = yield http.get(url, headers); if (response.message.statusCode !== 200) { const err = new HTTPError2(response.message.statusCode); - core15.debug(`Failed to download from "${url}". Code(${response.message.statusCode}) Message(${response.message.statusMessage})`); + core13.debug(`Failed to download from "${url}". Code(${response.message.statusCode}) Message(${response.message.statusMessage})`); throw err; } const pipeline = util.promisify(stream2.pipeline); @@ -80982,16 +80982,16 @@ var require_tool_cache = __commonJS({ let succeeded = false; try { yield pipeline(readStream, fs11.createWriteStream(dest)); - core15.debug("download complete"); + core13.debug("download complete"); succeeded = true; return dest; } finally { if (!succeeded) { - core15.debug("download failed"); + core13.debug("download failed"); try { yield io6.rmRF(dest); } catch (err) { - core15.debug(`Failed to delete '${dest}'. ${err.message}`); + core13.debug(`Failed to delete '${dest}'. ${err.message}`); } } } @@ -81006,7 +81006,7 @@ var require_tool_cache = __commonJS({ process.chdir(dest); if (_7zPath) { try { - const logLevel = core15.isDebug() ? "-bb1" : "-bb0"; + const logLevel = core13.isDebug() ? "-bb1" : "-bb0"; const args = [ "x", logLevel, @@ -81056,7 +81056,7 @@ var require_tool_cache = __commonJS({ throw new Error("parameter 'file' is required"); } dest = yield _createExtractFolder(dest); - core15.debug("Checking tar --version"); + core13.debug("Checking tar --version"); let versionOutput = ""; yield (0, exec_1.exec)("tar --version", [], { ignoreReturnCode: true, @@ -81066,7 +81066,7 @@ var require_tool_cache = __commonJS({ stderr: (data) => versionOutput += data.toString() } }); - core15.debug(versionOutput.trim()); + core13.debug(versionOutput.trim()); const isGnuTar = versionOutput.toUpperCase().includes("GNU TAR"); let args; if (flags instanceof Array) { @@ -81074,7 +81074,7 @@ var require_tool_cache = __commonJS({ } else { args = [flags]; } - if (core15.isDebug() && !flags.includes("v")) { + if (core13.isDebug() && !flags.includes("v")) { args.push("-v"); } let destArg = dest; @@ -81106,7 +81106,7 @@ var require_tool_cache = __commonJS({ args = [flags]; } args.push("-x", "-C", dest, "-f", file); - if (core15.isDebug()) { + if (core13.isDebug()) { args.push("-v"); } const xarPath = yield io6.which("xar", true); @@ -81151,7 +81151,7 @@ var require_tool_cache = __commonJS({ "-Command", pwshCommand ]; - core15.debug(`Using pwsh at path: ${pwshPath}`); + core13.debug(`Using pwsh at path: ${pwshPath}`); yield (0, exec_1.exec)(`"${pwshPath}"`, args); } else { const powershellCommand = [ @@ -81171,7 +81171,7 @@ var require_tool_cache = __commonJS({ powershellCommand ]; const powershellPath = yield io6.which("powershell", true); - core15.debug(`Using powershell at path: ${powershellPath}`); + core13.debug(`Using powershell at path: ${powershellPath}`); yield (0, exec_1.exec)(`"${powershellPath}"`, args); } }); @@ -81180,7 +81180,7 @@ var require_tool_cache = __commonJS({ return __awaiter4(this, void 0, void 0, function* () { const unzipPath = yield io6.which("unzip", true); const args = [file]; - if (!core15.isDebug()) { + if (!core13.isDebug()) { args.unshift("-q"); } args.unshift("-o"); @@ -81191,8 +81191,8 @@ var require_tool_cache = __commonJS({ return __awaiter4(this, void 0, void 0, function* () { version = semver8.clean(version) || version; arch2 = arch2 || os3.arch(); - core15.debug(`Caching tool ${tool} ${version} ${arch2}`); - core15.debug(`source dir: ${sourceDir}`); + core13.debug(`Caching tool ${tool} ${version} ${arch2}`); + core13.debug(`source dir: ${sourceDir}`); if (!fs11.statSync(sourceDir).isDirectory()) { throw new Error("sourceDir is not a directory"); } @@ -81210,14 +81210,14 @@ var require_tool_cache = __commonJS({ return __awaiter4(this, void 0, void 0, function* () { version = semver8.clean(version) || version; arch2 = arch2 || os3.arch(); - core15.debug(`Caching tool ${tool} ${version} ${arch2}`); - core15.debug(`source file: ${sourceFile}`); + core13.debug(`Caching tool ${tool} ${version} ${arch2}`); + core13.debug(`source file: ${sourceFile}`); if (!fs11.statSync(sourceFile).isFile()) { throw new Error("sourceFile is not a file"); } const destFolder = yield _createToolPath(tool, version, arch2); const destPath = path12.join(destFolder, targetFile); - core15.debug(`destination file ${destPath}`); + core13.debug(`destination file ${destPath}`); yield io6.cp(sourceFile, destPath); _completeToolPath(tool, version, arch2); return destFolder; @@ -81241,12 +81241,12 @@ var require_tool_cache = __commonJS({ if (versionSpec) { versionSpec = semver8.clean(versionSpec) || ""; const cachePath = path12.join(_getCacheDirectory(), toolName, versionSpec, arch2); - core15.debug(`checking cache: ${cachePath}`); + core13.debug(`checking cache: ${cachePath}`); if (fs11.existsSync(cachePath) && fs11.existsSync(`${cachePath}.complete`)) { - core15.debug(`Found tool in cache ${toolName} ${versionSpec} ${arch2}`); + core13.debug(`Found tool in cache ${toolName} ${versionSpec} ${arch2}`); toolPath = cachePath; } else { - core15.debug("not found"); + core13.debug("not found"); } } return toolPath; @@ -81277,7 +81277,7 @@ var require_tool_cache = __commonJS({ const http = new httpm.HttpClient("tool-cache"); const headers = {}; if (auth) { - core15.debug("set auth"); + core13.debug("set auth"); headers.authorization = auth; } const response = yield http.getJson(treeUrl, headers); @@ -81298,7 +81298,7 @@ var require_tool_cache = __commonJS({ try { releases = JSON.parse(versionsRaw); } catch (_a) { - core15.debug("Invalid json"); + core13.debug("Invalid json"); } } return releases; @@ -81324,7 +81324,7 @@ var require_tool_cache = __commonJS({ function _createToolPath(tool, version, arch2) { return __awaiter4(this, void 0, void 0, function* () { const folderPath = path12.join(_getCacheDirectory(), tool, semver8.clean(version) || version, arch2 || ""); - core15.debug(`destination ${folderPath}`); + core13.debug(`destination ${folderPath}`); const markerPath = `${folderPath}.complete`; yield io6.rmRF(folderPath); yield io6.rmRF(markerPath); @@ -81336,19 +81336,19 @@ var require_tool_cache = __commonJS({ const folderPath = path12.join(_getCacheDirectory(), tool, semver8.clean(version) || version, arch2 || ""); const markerPath = `${folderPath}.complete`; fs11.writeFileSync(markerPath, ""); - core15.debug("finished caching tool"); + core13.debug("finished caching tool"); } function isExplicitVersion(versionSpec) { const c = semver8.clean(versionSpec) || ""; - core15.debug(`isExplicit: ${c}`); + core13.debug(`isExplicit: ${c}`); const valid3 = semver8.valid(c) != null; - core15.debug(`explicit? ${valid3}`); + core13.debug(`explicit? ${valid3}`); return valid3; } exports2.isExplicitVersion = isExplicitVersion; function evaluateVersions(versions, versionSpec) { let version = ""; - core15.debug(`evaluating ${versions.length} versions`); + core13.debug(`evaluating ${versions.length} versions`); versions = versions.sort((a, b) => { if (semver8.gt(a, b)) { return 1; @@ -81364,9 +81364,9 @@ var require_tool_cache = __commonJS({ } } if (version) { - core15.debug(`matched: ${version}`); + core13.debug(`matched: ${version}`); } else { - core15.debug("match not found"); + core13.debug("match not found"); } return version; } @@ -81943,7 +81943,7 @@ var require_follow_redirects = __commonJS({ }); // src/setup-codeql-action.ts -var core14 = __toESM(require_core()); +var core12 = __toESM(require_core()); // node_modules/uuid/dist-node/stringify.js var byteToHex = []; @@ -86807,7 +86807,6 @@ var GitHubFeatureFlags = class { }; // src/init.ts -var core12 = __toESM(require_core()); var toolrunner4 = __toESM(require_toolrunner()); var io5 = __toESM(require_io()); @@ -88590,23 +88589,6 @@ async function getJobRunUuidSarifOptions(codeql) { ) ? [`--sarif-run-property=jobRunUuid=${jobRunUuid}`] : []; } -// src/workflow.ts -var core11 = __toESM(require_core()); -function toCodedErrors(errors) { - return Object.entries(errors).reduce( - (acc, [code, message]) => { - acc[code] = { message, code }; - return acc; - }, - {} - ); -} -var WorkflowErrors = toCodedErrors({ - MissingPushHook: `Please specify an on.push hook to analyze and see code scanning alerts from the default branch on the Security tab.`, - CheckoutWrongHead: `git checkout HEAD^2 is no longer necessary. Please remove this step as Code Scanning recommends analyzing the merge commit for best results.`, - InconsistentActionVersion: `Not all workflow steps that use \`github/codeql-action\` actions use the same version. Please ensure that all such steps use the same version to avoid compatibility issues.` -}); - // src/init.ts async function initCodeQL(toolsInput, apiDetails, tempDir, variant, defaultCliVersion, features, logger) { logger.startGroup("Setup CodeQL tools"); @@ -88639,7 +88621,7 @@ async function initCodeQL(toolsInput, apiDetails, tempDir, variant, defaultCliVe // src/status-report.ts var os2 = __toESM(require("os")); -var core13 = __toESM(require_core()); +var core11 = __toESM(require_core()); function isFirstPartyAnalysis(actionName) { if (actionName !== "upload-sarif" /* UploadSarif */) { return true; @@ -88655,12 +88637,12 @@ function getActionsStatus(error2, otherFailureCause) { } function setJobStatusIfUnsuccessful(actionStatus) { if (actionStatus === "user-error") { - core13.exportVariable( + core11.exportVariable( "CODEQL_ACTION_JOB_STATUS" /* JOB_STATUS */, process.env["CODEQL_ACTION_JOB_STATUS" /* JOB_STATUS */] ?? "JOB_STATUS_CONFIGURATION_ERROR" /* ConfigErrorStatus */ ); } else if (actionStatus === "failure" || actionStatus === "aborted") { - core13.exportVariable( + core11.exportVariable( "CODEQL_ACTION_JOB_STATUS" /* JOB_STATUS */, process.env["CODEQL_ACTION_JOB_STATUS" /* JOB_STATUS */] ?? "JOB_STATUS_FAILURE" /* FailureStatus */ ); @@ -88679,14 +88661,14 @@ async function createStatusReportBase(actionName, status, actionStartedAt, confi let workflowStartedAt = process.env["CODEQL_WORKFLOW_STARTED_AT" /* WORKFLOW_STARTED_AT */]; if (workflowStartedAt === void 0) { workflowStartedAt = actionStartedAt.toISOString(); - core13.exportVariable("CODEQL_WORKFLOW_STARTED_AT" /* WORKFLOW_STARTED_AT */, workflowStartedAt); + core11.exportVariable("CODEQL_WORKFLOW_STARTED_AT" /* WORKFLOW_STARTED_AT */, workflowStartedAt); } const runnerOs = getRequiredEnvParam("RUNNER_OS"); const codeQlCliVersion = getCachedCodeQlVersion(); const actionRef = process.env["GITHUB_ACTION_REF"] || ""; const testingEnvironment = getTestingEnvironment(); if (testingEnvironment) { - core13.exportVariable("CODEQL_ACTION_TESTING_ENVIRONMENT" /* TESTING_ENVIRONMENT */, testingEnvironment); + core11.exportVariable("CODEQL_ACTION_TESTING_ENVIRONMENT" /* TESTING_ENVIRONMENT */, testingEnvironment); } const isSteadyStateDefaultSetupRun = process.env["CODE_SCANNING_IS_STEADY_STATE_DEFAULT_SETUP"] === "true"; const statusReport = { @@ -88767,9 +88749,9 @@ var INCOMPATIBLE_MSG = "CodeQL Action version is incompatible with the API endpo async function sendStatusReport(statusReport) { setJobStatusIfUnsuccessful(statusReport.status); const statusReportJSON = JSON.stringify(statusReport); - core13.debug(`Sending status report: ${statusReportJSON}`); + core11.debug(`Sending status report: ${statusReportJSON}`); if (isInTestMode()) { - core13.debug("In test mode. Status reports are not uploaded."); + core11.debug("In test mode. Status reports are not uploaded."); return; } const nwo = getRepositoryNwo(); @@ -88789,28 +88771,28 @@ async function sendStatusReport(statusReport) { switch (httpError.status) { case 403: if (getWorkflowEventName() === "push" && process.env["GITHUB_ACTOR"] === "dependabot[bot]") { - core13.warning( + core11.warning( `Workflows triggered by Dependabot on the "push" event run with read-only access. Uploading CodeQL results requires write access. To use CodeQL with Dependabot, please ensure you are using the "pull_request" event for this workflow and avoid triggering on the "push" event for Dependabot branches. See ${"https://docs.github.com/en/code-security/code-scanning/creating-an-advanced-setup-for-code-scanning/customizing-your-advanced-setup-for-code-scanning#scanning-on-push" /* SCANNING_ON_PUSH */} for more information on how to configure these events.` ); } else { - core13.warning( + core11.warning( `This run of the CodeQL Action does not have permission to access the CodeQL Action API endpoints. This could be because the Action is running on a pull request from a fork. If not, please ensure the workflow has at least the 'security-events: read' permission. Details: ${httpError.message}` ); } return; case 404: - core13.warning(httpError.message); + core11.warning(httpError.message); return; case 422: if (getRequiredEnvParam("GITHUB_SERVER_URL") !== GITHUB_DOTCOM_URL) { - core13.debug(INCOMPATIBLE_MSG); + core11.debug(INCOMPATIBLE_MSG); } else { - core13.debug(OUT_OF_DATE_MSG); + core11.debug(OUT_OF_DATE_MSG); } return; } } - core13.warning( + core11.warning( `An unexpected error occurred when sending a status report: ${getErrorMessage( e )}` @@ -88876,7 +88858,7 @@ async function run() { ); const jobRunUuid = v4_default(); logger.info(`Job run UUID is ${jobRunUuid}.`); - core14.exportVariable("JOB_RUN_UUID" /* JOB_RUN_UUID */, jobRunUuid); + core12.exportVariable("JOB_RUN_UUID" /* JOB_RUN_UUID */, jobRunUuid); try { const statusReportBase = await createStatusReportBase( "setup-codeql" /* SetupCodeQL */, @@ -88906,12 +88888,12 @@ async function run() { toolsDownloadStatusReport = initCodeQLResult.toolsDownloadStatusReport; toolsVersion = initCodeQLResult.toolsVersion; toolsSource = initCodeQLResult.toolsSource; - core14.setOutput("codeql-path", codeql.getPath()); - core14.setOutput("codeql-version", (await codeql.getVersion()).version); - core14.exportVariable("CODEQL_ACTION_SETUP_CODEQL_HAS_RUN" /* SETUP_CODEQL_ACTION_HAS_RUN */, "true"); + core12.setOutput("codeql-path", codeql.getPath()); + core12.setOutput("codeql-version", (await codeql.getVersion()).version); + core12.exportVariable("CODEQL_ACTION_SETUP_CODEQL_HAS_RUN" /* SETUP_CODEQL_ACTION_HAS_RUN */, "true"); } catch (unwrappedError) { const error2 = wrapError(unwrappedError); - core14.setFailed(error2.message); + core12.setFailed(error2.message); const statusReportBase = await createStatusReportBase( "setup-codeql" /* SetupCodeQL */, error2 instanceof ConfigurationError ? "user-error" : "failure", @@ -88940,7 +88922,7 @@ async function runWrapper() { try { await run(); } catch (error2) { - core14.setFailed(`setup-codeql action failed: ${getErrorMessage(error2)}`); + core12.setFailed(`setup-codeql action failed: ${getErrorMessage(error2)}`); } await checkForTimeout(); } diff --git a/lib/upload-lib.js b/lib/upload-lib.js index 0bd4083269..b5f901089d 100644 --- a/lib/upload-lib.js +++ b/lib/upload-lib.js @@ -19743,10 +19743,10 @@ Support boolean input list: \`true | True | TRUE | false | False | FALSE\``); (0, command_1.issueCommand)("error", (0, utils_1.toCommandProperties)(properties), message instanceof Error ? message.toString() : message); } exports2.error = error2; - function warning7(message, properties = {}) { + function warning6(message, properties = {}) { (0, command_1.issueCommand)("warning", (0, utils_1.toCommandProperties)(properties), message instanceof Error ? message.toString() : message); } - exports2.warning = warning7; + exports2.warning = warning6; function notice(message, properties = {}) { (0, command_1.issueCommand)("notice", (0, utils_1.toCommandProperties)(properties), message instanceof Error ? message.toString() : message); } @@ -19755,22 +19755,22 @@ Support boolean input list: \`true | True | TRUE | false | False | FALSE\``); process.stdout.write(message + os2.EOL); } exports2.info = info4; - function startGroup4(name) { + function startGroup3(name) { (0, command_1.issue)("group", name); } - exports2.startGroup = startGroup4; - function endGroup4() { + exports2.startGroup = startGroup3; + function endGroup3() { (0, command_1.issue)("endgroup"); } - exports2.endGroup = endGroup4; + exports2.endGroup = endGroup3; function group(name, fn) { return __awaiter4(this, void 0, void 0, function* () { - startGroup4(name); + startGroup3(name); let result; try { result = yield fn(); } finally { - endGroup4(); + endGroup3(); } return result; }); @@ -35312,7 +35312,7 @@ var require_internal_glob_options_helper = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.getOptions = void 0; - var core14 = __importStar4(require_core()); + var core12 = __importStar4(require_core()); function getOptions(copy) { const result = { followSymbolicLinks: true, @@ -35322,15 +35322,15 @@ var require_internal_glob_options_helper = __commonJS({ if (copy) { if (typeof copy.followSymbolicLinks === "boolean") { result.followSymbolicLinks = copy.followSymbolicLinks; - core14.debug(`followSymbolicLinks '${result.followSymbolicLinks}'`); + core12.debug(`followSymbolicLinks '${result.followSymbolicLinks}'`); } if (typeof copy.implicitDescendants === "boolean") { result.implicitDescendants = copy.implicitDescendants; - core14.debug(`implicitDescendants '${result.implicitDescendants}'`); + core12.debug(`implicitDescendants '${result.implicitDescendants}'`); } if (typeof copy.omitBrokenSymbolicLinks === "boolean") { result.omitBrokenSymbolicLinks = copy.omitBrokenSymbolicLinks; - core14.debug(`omitBrokenSymbolicLinks '${result.omitBrokenSymbolicLinks}'`); + core12.debug(`omitBrokenSymbolicLinks '${result.omitBrokenSymbolicLinks}'`); } } return result; @@ -36784,7 +36784,7 @@ var require_internal_globber = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.DefaultGlobber = void 0; - var core14 = __importStar4(require_core()); + var core12 = __importStar4(require_core()); var fs14 = __importStar4(require("fs")); var globOptionsHelper = __importStar4(require_internal_glob_options_helper()); var path15 = __importStar4(require("path")); @@ -36835,7 +36835,7 @@ var require_internal_globber = __commonJS({ } const stack = []; for (const searchPath of patternHelper.getSearchPaths(patterns)) { - core14.debug(`Search path '${searchPath}'`); + core12.debug(`Search path '${searchPath}'`); try { yield __await4(fs14.promises.lstat(searchPath)); } catch (err) { @@ -36907,7 +36907,7 @@ var require_internal_globber = __commonJS({ } catch (err) { if (err.code === "ENOENT") { if (options.omitBrokenSymbolicLinks) { - core14.debug(`Broken symlink '${item.path}'`); + core12.debug(`Broken symlink '${item.path}'`); return void 0; } throw new Error(`No information found for the path '${item.path}'. This may indicate a broken symbolic link.`); @@ -36923,7 +36923,7 @@ var require_internal_globber = __commonJS({ traversalChain.pop(); } if (traversalChain.some((x) => x === realPath)) { - core14.debug(`Symlink cycle detected for path '${item.path}' and realpath '${realPath}'`); + core12.debug(`Symlink cycle detected for path '${item.path}' and realpath '${realPath}'`); return void 0; } traversalChain.push(realPath); @@ -38247,7 +38247,7 @@ var require_cacheUtils = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.getRuntimeToken = exports2.getCacheVersion = exports2.assertDefined = exports2.getGnuTarPathOnWindows = exports2.getCacheFileName = exports2.getCompressionMethod = exports2.unlinkFile = exports2.resolvePaths = exports2.getArchiveFileSizeInBytes = exports2.createTempDirectory = void 0; - var core14 = __importStar4(require_core()); + var core12 = __importStar4(require_core()); var exec2 = __importStar4(require_exec()); var glob = __importStar4(require_glob()); var io6 = __importStar4(require_io()); @@ -38300,7 +38300,7 @@ var require_cacheUtils = __commonJS({ _e = false; const file = _c; const relativeFile = path15.relative(workspace, file).replace(new RegExp(`\\${path15.sep}`, "g"), "/"); - core14.debug(`Matched: ${relativeFile}`); + core12.debug(`Matched: ${relativeFile}`); if (relativeFile === "") { paths.push("."); } else { @@ -38330,7 +38330,7 @@ var require_cacheUtils = __commonJS({ return __awaiter4(this, void 0, void 0, function* () { let versionOutput = ""; additionalArgs.push("--version"); - core14.debug(`Checking ${app} ${additionalArgs.join(" ")}`); + core12.debug(`Checking ${app} ${additionalArgs.join(" ")}`); try { yield exec2.exec(`${app}`, additionalArgs, { ignoreReturnCode: true, @@ -38341,10 +38341,10 @@ var require_cacheUtils = __commonJS({ } }); } catch (err) { - core14.debug(err.message); + core12.debug(err.message); } versionOutput = versionOutput.trim(); - core14.debug(versionOutput); + core12.debug(versionOutput); return versionOutput; }); } @@ -38352,7 +38352,7 @@ var require_cacheUtils = __commonJS({ return __awaiter4(this, void 0, void 0, function* () { const versionOutput = yield getVersion("zstd", ["--quiet"]); const version = semver8.clean(versionOutput); - core14.debug(`zstd version: ${version}`); + core12.debug(`zstd version: ${version}`); if (versionOutput === "") { return constants_1.CompressionMethod.Gzip; } else { @@ -73664,7 +73664,7 @@ var require_uploadUtils = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.uploadCacheArchiveSDK = exports2.UploadProgress = void 0; - var core14 = __importStar4(require_core()); + var core12 = __importStar4(require_core()); var storage_blob_1 = require_dist7(); var errors_1 = require_errors2(); var UploadProgress = class { @@ -73706,7 +73706,7 @@ var require_uploadUtils = __commonJS({ const percentage = (100 * (transferredBytes / this.contentLength)).toFixed(1); const elapsedTime = Date.now() - this.startTime; const uploadSpeed = (transferredBytes / (1024 * 1024) / (elapsedTime / 1e3)).toFixed(1); - core14.info(`Sent ${transferredBytes} of ${this.contentLength} (${percentage}%), ${uploadSpeed} MBs/sec`); + core12.info(`Sent ${transferredBytes} of ${this.contentLength} (${percentage}%), ${uploadSpeed} MBs/sec`); if (this.isDone()) { this.displayedComplete = true; } @@ -73761,14 +73761,14 @@ var require_uploadUtils = __commonJS({ }; try { uploadProgress.startDisplayTimer(); - core14.debug(`BlobClient: ${blobClient.name}:${blobClient.accountName}:${blobClient.containerName}`); + core12.debug(`BlobClient: ${blobClient.name}:${blobClient.accountName}:${blobClient.containerName}`); const response = yield blockBlobClient.uploadFile(archivePath, uploadOptions); if (response._response.status >= 400) { throw new errors_1.InvalidResponseError(`uploadCacheArchiveSDK: upload failed with status code ${response._response.status}`); } return response; } catch (error2) { - core14.warning(`uploadCacheArchiveSDK: internal error uploading cache archive: ${error2.message}`); + core12.warning(`uploadCacheArchiveSDK: internal error uploading cache archive: ${error2.message}`); throw error2; } finally { uploadProgress.stopDisplayTimer(); @@ -73839,7 +73839,7 @@ var require_requestUtils = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.retryHttpClientResponse = exports2.retryTypedResponse = exports2.retry = exports2.isRetryableStatusCode = exports2.isServerErrorStatusCode = exports2.isSuccessStatusCode = void 0; - var core14 = __importStar4(require_core()); + var core12 = __importStar4(require_core()); var http_client_1 = require_lib(); var constants_1 = require_constants10(); function isSuccessStatusCode(statusCode) { @@ -73900,9 +73900,9 @@ var require_requestUtils = __commonJS({ isRetryable = isRetryableStatusCode(statusCode); errorMessage = `Cache service responded with ${statusCode}`; } - core14.debug(`${name} - Attempt ${attempt} of ${maxAttempts} failed with error: ${errorMessage}`); + core12.debug(`${name} - Attempt ${attempt} of ${maxAttempts} failed with error: ${errorMessage}`); if (!isRetryable) { - core14.debug(`${name} - Error is not retryable`); + core12.debug(`${name} - Error is not retryable`); break; } yield sleep(delay2); @@ -74007,7 +74007,7 @@ var require_downloadUtils = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.downloadCacheStorageSDK = exports2.downloadCacheHttpClientConcurrent = exports2.downloadCacheHttpClient = exports2.DownloadProgress = void 0; - var core14 = __importStar4(require_core()); + var core12 = __importStar4(require_core()); var http_client_1 = require_lib(); var storage_blob_1 = require_dist7(); var buffer = __importStar4(require("buffer")); @@ -74045,7 +74045,7 @@ var require_downloadUtils = __commonJS({ this.segmentIndex = this.segmentIndex + 1; this.segmentSize = segmentSize; this.receivedBytes = 0; - core14.debug(`Downloading segment at offset ${this.segmentOffset} with length ${this.segmentSize}...`); + core12.debug(`Downloading segment at offset ${this.segmentOffset} with length ${this.segmentSize}...`); } /** * Sets the number of bytes received for the current segment. @@ -74079,7 +74079,7 @@ var require_downloadUtils = __commonJS({ const percentage = (100 * (transferredBytes / this.contentLength)).toFixed(1); const elapsedTime = Date.now() - this.startTime; const downloadSpeed = (transferredBytes / (1024 * 1024) / (elapsedTime / 1e3)).toFixed(1); - core14.info(`Received ${transferredBytes} of ${this.contentLength} (${percentage}%), ${downloadSpeed} MBs/sec`); + core12.info(`Received ${transferredBytes} of ${this.contentLength} (${percentage}%), ${downloadSpeed} MBs/sec`); if (this.isDone()) { this.displayedComplete = true; } @@ -74129,7 +74129,7 @@ var require_downloadUtils = __commonJS({ })); downloadResponse.message.socket.setTimeout(constants_1.SocketTimeout, () => { downloadResponse.message.destroy(); - core14.debug(`Aborting download, socket timed out after ${constants_1.SocketTimeout} ms`); + core12.debug(`Aborting download, socket timed out after ${constants_1.SocketTimeout} ms`); }); yield pipeResponseToStream(downloadResponse, writeStream); const contentLengthHeader = downloadResponse.message.headers["content-length"]; @@ -74140,7 +74140,7 @@ var require_downloadUtils = __commonJS({ throw new Error(`Incomplete download. Expected file size: ${expectedLength}, actual file size: ${actualLength}`); } } else { - core14.debug("Unable to validate download, no Content-Length header"); + core12.debug("Unable to validate download, no Content-Length header"); } }); } @@ -74260,7 +74260,7 @@ var require_downloadUtils = __commonJS({ const properties = yield client.getProperties(); const contentLength = (_a = properties.contentLength) !== null && _a !== void 0 ? _a : -1; if (contentLength < 0) { - core14.debug("Unable to determine content length, downloading file with http-client..."); + core12.debug("Unable to determine content length, downloading file with http-client..."); yield downloadCacheHttpClient(archiveLocation, archivePath); } else { const maxSegmentSize = Math.min(134217728, buffer.constants.MAX_LENGTH); @@ -74340,7 +74340,7 @@ var require_options = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.getDownloadOptions = exports2.getUploadOptions = void 0; - var core14 = __importStar4(require_core()); + var core12 = __importStar4(require_core()); function getUploadOptions(copy) { const result = { useAzureSdk: false, @@ -74360,9 +74360,9 @@ var require_options = __commonJS({ } result.uploadConcurrency = !isNaN(Number(process.env["CACHE_UPLOAD_CONCURRENCY"])) ? Math.min(32, Number(process.env["CACHE_UPLOAD_CONCURRENCY"])) : result.uploadConcurrency; result.uploadChunkSize = !isNaN(Number(process.env["CACHE_UPLOAD_CHUNK_SIZE"])) ? Math.min(128 * 1024 * 1024, Number(process.env["CACHE_UPLOAD_CHUNK_SIZE"]) * 1024 * 1024) : result.uploadChunkSize; - core14.debug(`Use Azure SDK: ${result.useAzureSdk}`); - core14.debug(`Upload concurrency: ${result.uploadConcurrency}`); - core14.debug(`Upload chunk size: ${result.uploadChunkSize}`); + core12.debug(`Use Azure SDK: ${result.useAzureSdk}`); + core12.debug(`Upload concurrency: ${result.uploadConcurrency}`); + core12.debug(`Upload chunk size: ${result.uploadChunkSize}`); return result; } exports2.getUploadOptions = getUploadOptions; @@ -74399,12 +74399,12 @@ var require_options = __commonJS({ if (segmentDownloadTimeoutMins && !isNaN(Number(segmentDownloadTimeoutMins)) && isFinite(Number(segmentDownloadTimeoutMins))) { result.segmentTimeoutInMs = Number(segmentDownloadTimeoutMins) * 60 * 1e3; } - core14.debug(`Use Azure SDK: ${result.useAzureSdk}`); - core14.debug(`Download concurrency: ${result.downloadConcurrency}`); - core14.debug(`Request timeout (ms): ${result.timeoutInMs}`); - core14.debug(`Cache segment download timeout mins env var: ${process.env["SEGMENT_DOWNLOAD_TIMEOUT_MINS"]}`); - core14.debug(`Segment download timeout (ms): ${result.segmentTimeoutInMs}`); - core14.debug(`Lookup only: ${result.lookupOnly}`); + core12.debug(`Use Azure SDK: ${result.useAzureSdk}`); + core12.debug(`Download concurrency: ${result.downloadConcurrency}`); + core12.debug(`Request timeout (ms): ${result.timeoutInMs}`); + core12.debug(`Cache segment download timeout mins env var: ${process.env["SEGMENT_DOWNLOAD_TIMEOUT_MINS"]}`); + core12.debug(`Segment download timeout (ms): ${result.segmentTimeoutInMs}`); + core12.debug(`Lookup only: ${result.lookupOnly}`); return result; } exports2.getDownloadOptions = getDownloadOptions; @@ -74584,7 +74584,7 @@ var require_cacheHttpClient = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.saveCache = exports2.reserveCache = exports2.downloadCache = exports2.getCacheEntry = void 0; - var core14 = __importStar4(require_core()); + var core12 = __importStar4(require_core()); var http_client_1 = require_lib(); var auth_1 = require_auth(); var fs14 = __importStar4(require("fs")); @@ -74602,7 +74602,7 @@ var require_cacheHttpClient = __commonJS({ throw new Error("Cache Service Url not found, unable to restore cache."); } const url2 = `${baseUrl}_apis/artifactcache/${resource}`; - core14.debug(`Resource Url: ${url2}`); + core12.debug(`Resource Url: ${url2}`); return url2; } function createAcceptHeader(type2, apiVersion) { @@ -74630,7 +74630,7 @@ var require_cacheHttpClient = __commonJS({ return httpClient.getJson(getCacheApiUrl(resource)); })); if (response.statusCode === 204) { - if (core14.isDebug()) { + if (core12.isDebug()) { yield printCachesListForDiagnostics(keys[0], httpClient, version); } return null; @@ -74643,9 +74643,9 @@ var require_cacheHttpClient = __commonJS({ if (!cacheDownloadUrl) { throw new Error("Cache not found."); } - core14.setSecret(cacheDownloadUrl); - core14.debug(`Cache Result:`); - core14.debug(JSON.stringify(cacheResult)); + core12.setSecret(cacheDownloadUrl); + core12.debug(`Cache Result:`); + core12.debug(JSON.stringify(cacheResult)); return cacheResult; }); } @@ -74660,10 +74660,10 @@ var require_cacheHttpClient = __commonJS({ const cacheListResult = response.result; const totalCount = cacheListResult === null || cacheListResult === void 0 ? void 0 : cacheListResult.totalCount; if (totalCount && totalCount > 0) { - core14.debug(`No matching cache found for cache key '${key}', version '${version} and scope ${process.env["GITHUB_REF"]}. There exist one or more cache(s) with similar key but they have different version or scope. See more info on cache matching here: https://docs.github.com/en/actions/using-workflows/caching-dependencies-to-speed-up-workflows#matching-a-cache-key + core12.debug(`No matching cache found for cache key '${key}', version '${version} and scope ${process.env["GITHUB_REF"]}. There exist one or more cache(s) with similar key but they have different version or scope. See more info on cache matching here: https://docs.github.com/en/actions/using-workflows/caching-dependencies-to-speed-up-workflows#matching-a-cache-key Other caches with similar key:`); for (const cacheEntry of (cacheListResult === null || cacheListResult === void 0 ? void 0 : cacheListResult.artifactCaches) || []) { - core14.debug(`Cache Key: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.cacheKey}, Cache Version: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.cacheVersion}, Cache Scope: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.scope}, Cache Created: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.creationTime}`); + core12.debug(`Cache Key: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.cacheKey}, Cache Version: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.cacheVersion}, Cache Scope: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.scope}, Cache Created: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.creationTime}`); } } } @@ -74708,7 +74708,7 @@ Other caches with similar key:`); } function uploadChunk(httpClient, resourceUrl, openStream, start, end) { return __awaiter4(this, void 0, void 0, function* () { - core14.debug(`Uploading chunk of size ${end - start + 1} bytes at offset ${start} with content range: ${getContentRange(start, end)}`); + core12.debug(`Uploading chunk of size ${end - start + 1} bytes at offset ${start} with content range: ${getContentRange(start, end)}`); const additionalHeaders = { "Content-Type": "application/octet-stream", "Content-Range": getContentRange(start, end) @@ -74730,7 +74730,7 @@ Other caches with similar key:`); const concurrency = utils.assertDefined("uploadConcurrency", uploadOptions.uploadConcurrency); const maxChunkSize = utils.assertDefined("uploadChunkSize", uploadOptions.uploadChunkSize); const parallelUploads = [...new Array(concurrency).keys()]; - core14.debug("Awaiting all uploads"); + core12.debug("Awaiting all uploads"); let offset = 0; try { yield Promise.all(parallelUploads.map(() => __awaiter4(this, void 0, void 0, function* () { @@ -74773,16 +74773,16 @@ Other caches with similar key:`); yield (0, uploadUtils_1.uploadCacheArchiveSDK)(signedUploadURL, archivePath, options); } else { const httpClient = createHttpClient(); - core14.debug("Upload cache"); + core12.debug("Upload cache"); yield uploadFile(httpClient, cacheId, archivePath, options); - core14.debug("Commiting cache"); + core12.debug("Commiting cache"); const cacheSize = utils.getArchiveFileSizeInBytes(archivePath); - core14.info(`Cache Size: ~${Math.round(cacheSize / (1024 * 1024))} MB (${cacheSize} B)`); + core12.info(`Cache Size: ~${Math.round(cacheSize / (1024 * 1024))} MB (${cacheSize} B)`); const commitCacheResponse = yield commitCache(httpClient, cacheId, cacheSize); if (!(0, requestUtils_1.isSuccessStatusCode)(commitCacheResponse.statusCode)) { throw new Error(`Cache service responded with ${commitCacheResponse.statusCode} during commit cache.`); } - core14.info("Cache saved successfully"); + core12.info("Cache saved successfully"); } }); } @@ -80234,7 +80234,7 @@ var require_cache3 = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.saveCache = exports2.restoreCache = exports2.isFeatureAvailable = exports2.FinalizeCacheError = exports2.ReserveCacheError = exports2.ValidationError = void 0; - var core14 = __importStar4(require_core()); + var core12 = __importStar4(require_core()); var path15 = __importStar4(require("path")); var utils = __importStar4(require_cacheUtils()); var cacheHttpClient = __importStar4(require_cacheHttpClient()); @@ -80294,7 +80294,7 @@ var require_cache3 = __commonJS({ function restoreCache3(paths, primaryKey, restoreKeys, options, enableCrossOsArchive = false) { return __awaiter4(this, void 0, void 0, function* () { const cacheServiceVersion = (0, config_1.getCacheServiceVersion)(); - core14.debug(`Cache service version: ${cacheServiceVersion}`); + core12.debug(`Cache service version: ${cacheServiceVersion}`); checkPaths(paths); switch (cacheServiceVersion) { case "v2": @@ -80310,8 +80310,8 @@ var require_cache3 = __commonJS({ return __awaiter4(this, void 0, void 0, function* () { restoreKeys = restoreKeys || []; const keys = [primaryKey, ...restoreKeys]; - core14.debug("Resolved Keys:"); - core14.debug(JSON.stringify(keys)); + core12.debug("Resolved Keys:"); + core12.debug(JSON.stringify(keys)); if (keys.length > 10) { throw new ValidationError(`Key Validation Error: Keys are limited to a maximum of 10.`); } @@ -80329,19 +80329,19 @@ var require_cache3 = __commonJS({ return void 0; } if (options === null || options === void 0 ? void 0 : options.lookupOnly) { - core14.info("Lookup only - skipping download"); + core12.info("Lookup only - skipping download"); return cacheEntry.cacheKey; } archivePath = path15.join(yield utils.createTempDirectory(), utils.getCacheFileName(compressionMethod)); - core14.debug(`Archive Path: ${archivePath}`); + core12.debug(`Archive Path: ${archivePath}`); yield cacheHttpClient.downloadCache(cacheEntry.archiveLocation, archivePath, options); - if (core14.isDebug()) { + if (core12.isDebug()) { yield (0, tar_1.listTar)(archivePath, compressionMethod); } const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath); - core14.info(`Cache Size: ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B)`); + core12.info(`Cache Size: ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B)`); yield (0, tar_1.extractTar)(archivePath, compressionMethod); - core14.info("Cache restored successfully"); + core12.info("Cache restored successfully"); return cacheEntry.cacheKey; } catch (error2) { const typedError = error2; @@ -80349,16 +80349,16 @@ var require_cache3 = __commonJS({ throw error2; } else { if (typedError instanceof http_client_1.HttpClientError && typeof typedError.statusCode === "number" && typedError.statusCode >= 500) { - core14.error(`Failed to restore: ${error2.message}`); + core12.error(`Failed to restore: ${error2.message}`); } else { - core14.warning(`Failed to restore: ${error2.message}`); + core12.warning(`Failed to restore: ${error2.message}`); } } } finally { try { yield utils.unlinkFile(archivePath); } catch (error2) { - core14.debug(`Failed to delete archive: ${error2}`); + core12.debug(`Failed to delete archive: ${error2}`); } } return void 0; @@ -80369,8 +80369,8 @@ var require_cache3 = __commonJS({ options = Object.assign(Object.assign({}, options), { useAzureSdk: true }); restoreKeys = restoreKeys || []; const keys = [primaryKey, ...restoreKeys]; - core14.debug("Resolved Keys:"); - core14.debug(JSON.stringify(keys)); + core12.debug("Resolved Keys:"); + core12.debug(JSON.stringify(keys)); if (keys.length > 10) { throw new ValidationError(`Key Validation Error: Keys are limited to a maximum of 10.`); } @@ -80388,30 +80388,30 @@ var require_cache3 = __commonJS({ }; const response = yield twirpClient.GetCacheEntryDownloadURL(request); if (!response.ok) { - core14.debug(`Cache not found for version ${request.version} of keys: ${keys.join(", ")}`); + core12.debug(`Cache not found for version ${request.version} of keys: ${keys.join(", ")}`); return void 0; } const isRestoreKeyMatch = request.key !== response.matchedKey; if (isRestoreKeyMatch) { - core14.info(`Cache hit for restore-key: ${response.matchedKey}`); + core12.info(`Cache hit for restore-key: ${response.matchedKey}`); } else { - core14.info(`Cache hit for: ${response.matchedKey}`); + core12.info(`Cache hit for: ${response.matchedKey}`); } if (options === null || options === void 0 ? void 0 : options.lookupOnly) { - core14.info("Lookup only - skipping download"); + core12.info("Lookup only - skipping download"); return response.matchedKey; } archivePath = path15.join(yield utils.createTempDirectory(), utils.getCacheFileName(compressionMethod)); - core14.debug(`Archive path: ${archivePath}`); - core14.debug(`Starting download of archive to: ${archivePath}`); + core12.debug(`Archive path: ${archivePath}`); + core12.debug(`Starting download of archive to: ${archivePath}`); yield cacheHttpClient.downloadCache(response.signedDownloadUrl, archivePath, options); const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath); - core14.info(`Cache Size: ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B)`); - if (core14.isDebug()) { + core12.info(`Cache Size: ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B)`); + if (core12.isDebug()) { yield (0, tar_1.listTar)(archivePath, compressionMethod); } yield (0, tar_1.extractTar)(archivePath, compressionMethod); - core14.info("Cache restored successfully"); + core12.info("Cache restored successfully"); return response.matchedKey; } catch (error2) { const typedError = error2; @@ -80419,9 +80419,9 @@ var require_cache3 = __commonJS({ throw error2; } else { if (typedError instanceof http_client_1.HttpClientError && typeof typedError.statusCode === "number" && typedError.statusCode >= 500) { - core14.error(`Failed to restore: ${error2.message}`); + core12.error(`Failed to restore: ${error2.message}`); } else { - core14.warning(`Failed to restore: ${error2.message}`); + core12.warning(`Failed to restore: ${error2.message}`); } } } finally { @@ -80430,7 +80430,7 @@ var require_cache3 = __commonJS({ yield utils.unlinkFile(archivePath); } } catch (error2) { - core14.debug(`Failed to delete archive: ${error2}`); + core12.debug(`Failed to delete archive: ${error2}`); } } return void 0; @@ -80439,7 +80439,7 @@ var require_cache3 = __commonJS({ function saveCache3(paths, key, options, enableCrossOsArchive = false) { return __awaiter4(this, void 0, void 0, function* () { const cacheServiceVersion = (0, config_1.getCacheServiceVersion)(); - core14.debug(`Cache service version: ${cacheServiceVersion}`); + core12.debug(`Cache service version: ${cacheServiceVersion}`); checkPaths(paths); checkKey(key); switch (cacheServiceVersion) { @@ -80458,26 +80458,26 @@ var require_cache3 = __commonJS({ const compressionMethod = yield utils.getCompressionMethod(); let cacheId = -1; const cachePaths = yield utils.resolvePaths(paths); - core14.debug("Cache Paths:"); - core14.debug(`${JSON.stringify(cachePaths)}`); + core12.debug("Cache Paths:"); + core12.debug(`${JSON.stringify(cachePaths)}`); if (cachePaths.length === 0) { throw new Error(`Path Validation Error: Path(s) specified in the action for caching do(es) not exist, hence no cache is being saved.`); } const archiveFolder = yield utils.createTempDirectory(); const archivePath = path15.join(archiveFolder, utils.getCacheFileName(compressionMethod)); - core14.debug(`Archive Path: ${archivePath}`); + core12.debug(`Archive Path: ${archivePath}`); try { yield (0, tar_1.createTar)(archiveFolder, cachePaths, compressionMethod); - if (core14.isDebug()) { + if (core12.isDebug()) { yield (0, tar_1.listTar)(archivePath, compressionMethod); } const fileSizeLimit = 10 * 1024 * 1024 * 1024; const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath); - core14.debug(`File Size: ${archiveFileSize}`); + core12.debug(`File Size: ${archiveFileSize}`); if (archiveFileSize > fileSizeLimit && !(0, config_1.isGhes)()) { throw new Error(`Cache size of ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B) is over the 10GB limit, not saving cache.`); } - core14.debug("Reserving Cache"); + core12.debug("Reserving Cache"); const reserveCacheResponse = yield cacheHttpClient.reserveCache(key, paths, { compressionMethod, enableCrossOsArchive, @@ -80490,26 +80490,26 @@ var require_cache3 = __commonJS({ } else { throw new ReserveCacheError(`Unable to reserve cache with key ${key}, another job may be creating this cache. More details: ${(_e = reserveCacheResponse === null || reserveCacheResponse === void 0 ? void 0 : reserveCacheResponse.error) === null || _e === void 0 ? void 0 : _e.message}`); } - core14.debug(`Saving Cache (ID: ${cacheId})`); + core12.debug(`Saving Cache (ID: ${cacheId})`); yield cacheHttpClient.saveCache(cacheId, archivePath, "", options); } catch (error2) { const typedError = error2; if (typedError.name === ValidationError.name) { throw error2; } else if (typedError.name === ReserveCacheError.name) { - core14.info(`Failed to save: ${typedError.message}`); + core12.info(`Failed to save: ${typedError.message}`); } else { if (typedError instanceof http_client_1.HttpClientError && typeof typedError.statusCode === "number" && typedError.statusCode >= 500) { - core14.error(`Failed to save: ${typedError.message}`); + core12.error(`Failed to save: ${typedError.message}`); } else { - core14.warning(`Failed to save: ${typedError.message}`); + core12.warning(`Failed to save: ${typedError.message}`); } } } finally { try { yield utils.unlinkFile(archivePath); } catch (error2) { - core14.debug(`Failed to delete archive: ${error2}`); + core12.debug(`Failed to delete archive: ${error2}`); } } return cacheId; @@ -80522,23 +80522,23 @@ var require_cache3 = __commonJS({ const twirpClient = cacheTwirpClient.internalCacheTwirpClient(); let cacheId = -1; const cachePaths = yield utils.resolvePaths(paths); - core14.debug("Cache Paths:"); - core14.debug(`${JSON.stringify(cachePaths)}`); + core12.debug("Cache Paths:"); + core12.debug(`${JSON.stringify(cachePaths)}`); if (cachePaths.length === 0) { throw new Error(`Path Validation Error: Path(s) specified in the action for caching do(es) not exist, hence no cache is being saved.`); } const archiveFolder = yield utils.createTempDirectory(); const archivePath = path15.join(archiveFolder, utils.getCacheFileName(compressionMethod)); - core14.debug(`Archive Path: ${archivePath}`); + core12.debug(`Archive Path: ${archivePath}`); try { yield (0, tar_1.createTar)(archiveFolder, cachePaths, compressionMethod); - if (core14.isDebug()) { + if (core12.isDebug()) { yield (0, tar_1.listTar)(archivePath, compressionMethod); } const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath); - core14.debug(`File Size: ${archiveFileSize}`); + core12.debug(`File Size: ${archiveFileSize}`); options.archiveSizeBytes = archiveFileSize; - core14.debug("Reserving Cache"); + core12.debug("Reserving Cache"); const version = utils.getCacheVersion(paths, compressionMethod, enableCrossOsArchive); const request = { key, @@ -80549,16 +80549,16 @@ var require_cache3 = __commonJS({ const response = yield twirpClient.CreateCacheEntry(request); if (!response.ok) { if (response.message) { - core14.warning(`Cache reservation failed: ${response.message}`); + core12.warning(`Cache reservation failed: ${response.message}`); } throw new Error(response.message || "Response was not ok"); } signedUploadUrl = response.signedUploadUrl; } catch (error2) { - core14.debug(`Failed to reserve cache: ${error2}`); + core12.debug(`Failed to reserve cache: ${error2}`); throw new ReserveCacheError(`Unable to reserve cache with key ${key}, another job may be creating this cache.`); } - core14.debug(`Attempting to upload cache located at: ${archivePath}`); + core12.debug(`Attempting to upload cache located at: ${archivePath}`); yield cacheHttpClient.saveCache(cacheId, archivePath, signedUploadUrl, options); const finalizeRequest = { key, @@ -80566,7 +80566,7 @@ var require_cache3 = __commonJS({ sizeBytes: `${archiveFileSize}` }; const finalizeResponse = yield twirpClient.FinalizeCacheEntryUpload(finalizeRequest); - core14.debug(`FinalizeCacheEntryUploadResponse: ${finalizeResponse.ok}`); + core12.debug(`FinalizeCacheEntryUploadResponse: ${finalizeResponse.ok}`); if (!finalizeResponse.ok) { if (finalizeResponse.message) { throw new FinalizeCacheError(finalizeResponse.message); @@ -80579,21 +80579,21 @@ var require_cache3 = __commonJS({ if (typedError.name === ValidationError.name) { throw error2; } else if (typedError.name === ReserveCacheError.name) { - core14.info(`Failed to save: ${typedError.message}`); + core12.info(`Failed to save: ${typedError.message}`); } else if (typedError.name === FinalizeCacheError.name) { - core14.warning(typedError.message); + core12.warning(typedError.message); } else { if (typedError instanceof http_client_1.HttpClientError && typeof typedError.statusCode === "number" && typedError.statusCode >= 500) { - core14.error(`Failed to save: ${typedError.message}`); + core12.error(`Failed to save: ${typedError.message}`); } else { - core14.warning(`Failed to save: ${typedError.message}`); + core12.warning(`Failed to save: ${typedError.message}`); } } } finally { try { yield utils.unlinkFile(archivePath); } catch (error2) { - core14.debug(`Failed to delete archive: ${error2}`); + core12.debug(`Failed to delete archive: ${error2}`); } } return cacheId; @@ -80801,7 +80801,7 @@ var require_retry_helper = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.RetryHelper = void 0; - var core14 = __importStar4(require_core()); + var core12 = __importStar4(require_core()); var RetryHelper = class { constructor(maxAttempts, minSeconds, maxSeconds) { if (maxAttempts < 1) { @@ -80824,10 +80824,10 @@ var require_retry_helper = __commonJS({ if (isRetryable && !isRetryable(err)) { throw err; } - core14.info(err.message); + core12.info(err.message); } const seconds = this.getSleepAmount(); - core14.info(`Waiting ${seconds} seconds before trying again`); + core12.info(`Waiting ${seconds} seconds before trying again`); yield this.sleep(seconds); attempt++; } @@ -80907,7 +80907,7 @@ var require_tool_cache = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.evaluateVersions = exports2.isExplicitVersion = exports2.findFromManifest = exports2.getManifestFromRepo = exports2.findAllVersions = exports2.find = exports2.cacheFile = exports2.cacheDir = exports2.extractZip = exports2.extractXar = exports2.extractTar = exports2.extract7z = exports2.downloadTool = exports2.HTTPError = void 0; - var core14 = __importStar4(require_core()); + var core12 = __importStar4(require_core()); var io6 = __importStar4(require_io()); var crypto = __importStar4(require("crypto")); var fs14 = __importStar4(require("fs")); @@ -80936,8 +80936,8 @@ var require_tool_cache = __commonJS({ return __awaiter4(this, void 0, void 0, function* () { dest = dest || path15.join(_getTempDirectory(), crypto.randomUUID()); yield io6.mkdirP(path15.dirname(dest)); - core14.debug(`Downloading ${url2}`); - core14.debug(`Destination ${dest}`); + core12.debug(`Downloading ${url2}`); + core12.debug(`Destination ${dest}`); const maxAttempts = 3; const minSeconds = _getGlobal("TEST_DOWNLOAD_TOOL_RETRY_MIN_SECONDS", 10); const maxSeconds = _getGlobal("TEST_DOWNLOAD_TOOL_RETRY_MAX_SECONDS", 20); @@ -80964,7 +80964,7 @@ var require_tool_cache = __commonJS({ allowRetries: false }); if (auth) { - core14.debug("set auth"); + core12.debug("set auth"); if (headers === void 0) { headers = {}; } @@ -80973,7 +80973,7 @@ var require_tool_cache = __commonJS({ const response = yield http.get(url2, headers); if (response.message.statusCode !== 200) { const err = new HTTPError2(response.message.statusCode); - core14.debug(`Failed to download from "${url2}". Code(${response.message.statusCode}) Message(${response.message.statusMessage})`); + core12.debug(`Failed to download from "${url2}". Code(${response.message.statusCode}) Message(${response.message.statusMessage})`); throw err; } const pipeline = util.promisify(stream2.pipeline); @@ -80982,16 +80982,16 @@ var require_tool_cache = __commonJS({ let succeeded = false; try { yield pipeline(readStream, fs14.createWriteStream(dest)); - core14.debug("download complete"); + core12.debug("download complete"); succeeded = true; return dest; } finally { if (!succeeded) { - core14.debug("download failed"); + core12.debug("download failed"); try { yield io6.rmRF(dest); } catch (err) { - core14.debug(`Failed to delete '${dest}'. ${err.message}`); + core12.debug(`Failed to delete '${dest}'. ${err.message}`); } } } @@ -81006,7 +81006,7 @@ var require_tool_cache = __commonJS({ process.chdir(dest); if (_7zPath) { try { - const logLevel = core14.isDebug() ? "-bb1" : "-bb0"; + const logLevel = core12.isDebug() ? "-bb1" : "-bb0"; const args = [ "x", logLevel, @@ -81056,7 +81056,7 @@ var require_tool_cache = __commonJS({ throw new Error("parameter 'file' is required"); } dest = yield _createExtractFolder(dest); - core14.debug("Checking tar --version"); + core12.debug("Checking tar --version"); let versionOutput = ""; yield (0, exec_1.exec)("tar --version", [], { ignoreReturnCode: true, @@ -81066,7 +81066,7 @@ var require_tool_cache = __commonJS({ stderr: (data) => versionOutput += data.toString() } }); - core14.debug(versionOutput.trim()); + core12.debug(versionOutput.trim()); const isGnuTar = versionOutput.toUpperCase().includes("GNU TAR"); let args; if (flags instanceof Array) { @@ -81074,7 +81074,7 @@ var require_tool_cache = __commonJS({ } else { args = [flags]; } - if (core14.isDebug() && !flags.includes("v")) { + if (core12.isDebug() && !flags.includes("v")) { args.push("-v"); } let destArg = dest; @@ -81106,7 +81106,7 @@ var require_tool_cache = __commonJS({ args = [flags]; } args.push("-x", "-C", dest, "-f", file); - if (core14.isDebug()) { + if (core12.isDebug()) { args.push("-v"); } const xarPath = yield io6.which("xar", true); @@ -81151,7 +81151,7 @@ var require_tool_cache = __commonJS({ "-Command", pwshCommand ]; - core14.debug(`Using pwsh at path: ${pwshPath}`); + core12.debug(`Using pwsh at path: ${pwshPath}`); yield (0, exec_1.exec)(`"${pwshPath}"`, args); } else { const powershellCommand = [ @@ -81171,7 +81171,7 @@ var require_tool_cache = __commonJS({ powershellCommand ]; const powershellPath = yield io6.which("powershell", true); - core14.debug(`Using powershell at path: ${powershellPath}`); + core12.debug(`Using powershell at path: ${powershellPath}`); yield (0, exec_1.exec)(`"${powershellPath}"`, args); } }); @@ -81180,7 +81180,7 @@ var require_tool_cache = __commonJS({ return __awaiter4(this, void 0, void 0, function* () { const unzipPath = yield io6.which("unzip", true); const args = [file]; - if (!core14.isDebug()) { + if (!core12.isDebug()) { args.unshift("-q"); } args.unshift("-o"); @@ -81191,8 +81191,8 @@ var require_tool_cache = __commonJS({ return __awaiter4(this, void 0, void 0, function* () { version = semver8.clean(version) || version; arch2 = arch2 || os2.arch(); - core14.debug(`Caching tool ${tool} ${version} ${arch2}`); - core14.debug(`source dir: ${sourceDir}`); + core12.debug(`Caching tool ${tool} ${version} ${arch2}`); + core12.debug(`source dir: ${sourceDir}`); if (!fs14.statSync(sourceDir).isDirectory()) { throw new Error("sourceDir is not a directory"); } @@ -81210,14 +81210,14 @@ var require_tool_cache = __commonJS({ return __awaiter4(this, void 0, void 0, function* () { version = semver8.clean(version) || version; arch2 = arch2 || os2.arch(); - core14.debug(`Caching tool ${tool} ${version} ${arch2}`); - core14.debug(`source file: ${sourceFile}`); + core12.debug(`Caching tool ${tool} ${version} ${arch2}`); + core12.debug(`source file: ${sourceFile}`); if (!fs14.statSync(sourceFile).isFile()) { throw new Error("sourceFile is not a file"); } const destFolder = yield _createToolPath(tool, version, arch2); const destPath = path15.join(destFolder, targetFile); - core14.debug(`destination file ${destPath}`); + core12.debug(`destination file ${destPath}`); yield io6.cp(sourceFile, destPath); _completeToolPath(tool, version, arch2); return destFolder; @@ -81241,12 +81241,12 @@ var require_tool_cache = __commonJS({ if (versionSpec) { versionSpec = semver8.clean(versionSpec) || ""; const cachePath = path15.join(_getCacheDirectory(), toolName, versionSpec, arch2); - core14.debug(`checking cache: ${cachePath}`); + core12.debug(`checking cache: ${cachePath}`); if (fs14.existsSync(cachePath) && fs14.existsSync(`${cachePath}.complete`)) { - core14.debug(`Found tool in cache ${toolName} ${versionSpec} ${arch2}`); + core12.debug(`Found tool in cache ${toolName} ${versionSpec} ${arch2}`); toolPath = cachePath; } else { - core14.debug("not found"); + core12.debug("not found"); } } return toolPath; @@ -81277,7 +81277,7 @@ var require_tool_cache = __commonJS({ const http = new httpm.HttpClient("tool-cache"); const headers = {}; if (auth) { - core14.debug("set auth"); + core12.debug("set auth"); headers.authorization = auth; } const response = yield http.getJson(treeUrl, headers); @@ -81298,7 +81298,7 @@ var require_tool_cache = __commonJS({ try { releases = JSON.parse(versionsRaw); } catch (_a) { - core14.debug("Invalid json"); + core12.debug("Invalid json"); } } return releases; @@ -81324,7 +81324,7 @@ var require_tool_cache = __commonJS({ function _createToolPath(tool, version, arch2) { return __awaiter4(this, void 0, void 0, function* () { const folderPath = path15.join(_getCacheDirectory(), tool, semver8.clean(version) || version, arch2 || ""); - core14.debug(`destination ${folderPath}`); + core12.debug(`destination ${folderPath}`); const markerPath = `${folderPath}.complete`; yield io6.rmRF(folderPath); yield io6.rmRF(markerPath); @@ -81336,19 +81336,19 @@ var require_tool_cache = __commonJS({ const folderPath = path15.join(_getCacheDirectory(), tool, semver8.clean(version) || version, arch2 || ""); const markerPath = `${folderPath}.complete`; fs14.writeFileSync(markerPath, ""); - core14.debug("finished caching tool"); + core12.debug("finished caching tool"); } function isExplicitVersion(versionSpec) { const c = semver8.clean(versionSpec) || ""; - core14.debug(`isExplicit: ${c}`); + core12.debug(`isExplicit: ${c}`); const valid3 = semver8.valid(c) != null; - core14.debug(`explicit? ${valid3}`); + core12.debug(`explicit? ${valid3}`); return valid3; } exports2.isExplicitVersion = isExplicitVersion; function evaluateVersions(versions, versionSpec) { let version = ""; - core14.debug(`evaluating ${versions.length} versions`); + core12.debug(`evaluating ${versions.length} versions`); versions = versions.sort((a, b) => { if (semver8.gt(a, b)) { return 1; @@ -81364,9 +81364,9 @@ var require_tool_cache = __commonJS({ } } if (version) { - core14.debug(`matched: ${version}`); + core12.debug(`matched: ${version}`); } else { - core14.debug("match not found"); + core12.debug("match not found"); } return version; } @@ -84866,7 +84866,7 @@ var fs13 = __toESM(require("fs")); var path14 = __toESM(require("path")); var url = __toESM(require("url")); var import_zlib = __toESM(require("zlib")); -var core13 = __toESM(require_core()); +var core11 = __toESM(require_core()); var jsonschema2 = __toESM(require_lib2()); // src/actions-util.ts @@ -92279,28 +92279,8 @@ async function addFingerprints(sarif, sourceRoot, logger) { } // src/init.ts -var core12 = __toESM(require_core()); var toolrunner4 = __toESM(require_toolrunner()); var io5 = __toESM(require_io()); - -// src/workflow.ts -var core11 = __toESM(require_core()); -function toCodedErrors(errors) { - return Object.entries(errors).reduce( - (acc, [code, message]) => { - acc[code] = { message, code }; - return acc; - }, - {} - ); -} -var WorkflowErrors = toCodedErrors({ - MissingPushHook: `Please specify an on.push hook to analyze and see code scanning alerts from the default branch on the Security tab.`, - CheckoutWrongHead: `git checkout HEAD^2 is no longer necessary. Please remove this step as Code Scanning recommends analyzing the merge commit for best results.`, - InconsistentActionVersion: `Not all workflow steps that use \`github/codeql-action\` actions use the same version. Please ensure that all such steps use the same version to avoid compatibility issues.` -}); - -// src/init.ts async function initCodeQL(toolsInput, apiDetails, tempDir, variant, defaultCliVersion, features, logger) { logger.startGroup("Setup CodeQL tools"); const { @@ -92430,7 +92410,7 @@ async function combineSarifFilesUsingCLI(sarifFiles, gitHubVersion, features, lo logger.warning( `Uploading multiple SARIF runs with the same category is deprecated ${deprecationWarningMessage}. Please update your workflow to upload a single run per category. ${deprecationMoreInformationMessage}` ); - core13.exportVariable("CODEQL_MERGE_SARIF_DEPRECATION_WARNING", "true"); + core11.exportVariable("CODEQL_MERGE_SARIF_DEPRECATION_WARNING", "true"); } return combineSarifFiles(sarifFiles, logger); } @@ -92529,13 +92509,13 @@ async function uploadPayload(payload, repositoryNwo, logger, analysis) { if (httpError !== void 0) { switch (httpError.status) { case 403: - core13.warning(httpError.message || GENERIC_403_MSG); + core11.warning(httpError.message || GENERIC_403_MSG); break; case 404: - core13.warning(httpError.message || GENERIC_404_MSG); + core11.warning(httpError.message || GENERIC_404_MSG); break; default: - core13.warning(httpError.message); + core11.warning(httpError.message); break; } } @@ -92665,9 +92645,9 @@ function validateSarifFileSchema(sarif, sarifFilePath, logger) { const warnings = (result.errors ?? []).filter( (err) => err.name === "format" && typeof err.argument === "string" && warningAttributes.includes(err.argument) ); - for (const warning7 of warnings) { + for (const warning6 of warnings) { logger.info( - `Warning: '${warning7.instance}' is not a valid URI in '${warning7.property}'.` + `Warning: '${warning6.instance}' is not a valid URI in '${warning6.property}'.` ); } if (errors.length > 0) { @@ -92966,7 +92946,7 @@ function validateUniqueCategory(sarif, sentinelPrefix) { `Aborting upload: only one run of the codeql/analyze or codeql/upload-sarif actions is allowed per job per tool/category. The easiest fix is to specify a unique value for the \`category\` input. If .runs[].automationDetails.id is specified in the sarif file, that will take precedence over your configured \`category\`. Category: (${id ? id : "none"}) Tool: (${tool ? tool : "none"})` ); } - core13.exportVariable(sentinelEnvVar, sentinelEnvVar); + core11.exportVariable(sentinelEnvVar, sentinelEnvVar); } } function sanitize(str2) { diff --git a/lib/upload-sarif-action.js b/lib/upload-sarif-action.js index 093f39d6d1..d49ad89b29 100644 --- a/lib/upload-sarif-action.js +++ b/lib/upload-sarif-action.js @@ -19743,10 +19743,10 @@ Support boolean input list: \`true | True | TRUE | false | False | FALSE\``); (0, command_1.issueCommand)("error", (0, utils_1.toCommandProperties)(properties), message instanceof Error ? message.toString() : message); } exports2.error = error2; - function warning8(message, properties = {}) { + function warning7(message, properties = {}) { (0, command_1.issueCommand)("warning", (0, utils_1.toCommandProperties)(properties), message instanceof Error ? message.toString() : message); } - exports2.warning = warning8; + exports2.warning = warning7; function notice(message, properties = {}) { (0, command_1.issueCommand)("notice", (0, utils_1.toCommandProperties)(properties), message instanceof Error ? message.toString() : message); } @@ -19755,22 +19755,22 @@ Support boolean input list: \`true | True | TRUE | false | False | FALSE\``); process.stdout.write(message + os3.EOL); } exports2.info = info4; - function startGroup4(name) { + function startGroup3(name) { (0, command_1.issue)("group", name); } - exports2.startGroup = startGroup4; - function endGroup4() { + exports2.startGroup = startGroup3; + function endGroup3() { (0, command_1.issue)("endgroup"); } - exports2.endGroup = endGroup4; + exports2.endGroup = endGroup3; function group(name, fn) { return __awaiter4(this, void 0, void 0, function* () { - startGroup4(name); + startGroup3(name); let result; try { result = yield fn(); } finally { - endGroup4(); + endGroup3(); } return result; }); @@ -34015,7 +34015,7 @@ var require_internal_glob_options_helper = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.getOptions = void 0; - var core16 = __importStar4(require_core()); + var core14 = __importStar4(require_core()); function getOptions(copy) { const result = { followSymbolicLinks: true, @@ -34025,15 +34025,15 @@ var require_internal_glob_options_helper = __commonJS({ if (copy) { if (typeof copy.followSymbolicLinks === "boolean") { result.followSymbolicLinks = copy.followSymbolicLinks; - core16.debug(`followSymbolicLinks '${result.followSymbolicLinks}'`); + core14.debug(`followSymbolicLinks '${result.followSymbolicLinks}'`); } if (typeof copy.implicitDescendants === "boolean") { result.implicitDescendants = copy.implicitDescendants; - core16.debug(`implicitDescendants '${result.implicitDescendants}'`); + core14.debug(`implicitDescendants '${result.implicitDescendants}'`); } if (typeof copy.omitBrokenSymbolicLinks === "boolean") { result.omitBrokenSymbolicLinks = copy.omitBrokenSymbolicLinks; - core16.debug(`omitBrokenSymbolicLinks '${result.omitBrokenSymbolicLinks}'`); + core14.debug(`omitBrokenSymbolicLinks '${result.omitBrokenSymbolicLinks}'`); } } return result; @@ -35487,7 +35487,7 @@ var require_internal_globber = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.DefaultGlobber = void 0; - var core16 = __importStar4(require_core()); + var core14 = __importStar4(require_core()); var fs15 = __importStar4(require("fs")); var globOptionsHelper = __importStar4(require_internal_glob_options_helper()); var path16 = __importStar4(require("path")); @@ -35538,7 +35538,7 @@ var require_internal_globber = __commonJS({ } const stack = []; for (const searchPath of patternHelper.getSearchPaths(patterns)) { - core16.debug(`Search path '${searchPath}'`); + core14.debug(`Search path '${searchPath}'`); try { yield __await4(fs15.promises.lstat(searchPath)); } catch (err) { @@ -35610,7 +35610,7 @@ var require_internal_globber = __commonJS({ } catch (err) { if (err.code === "ENOENT") { if (options.omitBrokenSymbolicLinks) { - core16.debug(`Broken symlink '${item.path}'`); + core14.debug(`Broken symlink '${item.path}'`); return void 0; } throw new Error(`No information found for the path '${item.path}'. This may indicate a broken symbolic link.`); @@ -35626,7 +35626,7 @@ var require_internal_globber = __commonJS({ traversalChain.pop(); } if (traversalChain.some((x) => x === realPath)) { - core16.debug(`Symlink cycle detected for path '${item.path}' and realpath '${realPath}'`); + core14.debug(`Symlink cycle detected for path '${item.path}' and realpath '${realPath}'`); return void 0; } traversalChain.push(realPath); @@ -36950,7 +36950,7 @@ var require_cacheUtils = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.getRuntimeToken = exports2.getCacheVersion = exports2.assertDefined = exports2.getGnuTarPathOnWindows = exports2.getCacheFileName = exports2.getCompressionMethod = exports2.unlinkFile = exports2.resolvePaths = exports2.getArchiveFileSizeInBytes = exports2.createTempDirectory = void 0; - var core16 = __importStar4(require_core()); + var core14 = __importStar4(require_core()); var exec2 = __importStar4(require_exec()); var glob = __importStar4(require_glob()); var io6 = __importStar4(require_io()); @@ -37003,7 +37003,7 @@ var require_cacheUtils = __commonJS({ _e = false; const file = _c; const relativeFile = path16.relative(workspace, file).replace(new RegExp(`\\${path16.sep}`, "g"), "/"); - core16.debug(`Matched: ${relativeFile}`); + core14.debug(`Matched: ${relativeFile}`); if (relativeFile === "") { paths.push("."); } else { @@ -37033,7 +37033,7 @@ var require_cacheUtils = __commonJS({ return __awaiter4(this, void 0, void 0, function* () { let versionOutput = ""; additionalArgs.push("--version"); - core16.debug(`Checking ${app} ${additionalArgs.join(" ")}`); + core14.debug(`Checking ${app} ${additionalArgs.join(" ")}`); try { yield exec2.exec(`${app}`, additionalArgs, { ignoreReturnCode: true, @@ -37044,10 +37044,10 @@ var require_cacheUtils = __commonJS({ } }); } catch (err) { - core16.debug(err.message); + core14.debug(err.message); } versionOutput = versionOutput.trim(); - core16.debug(versionOutput); + core14.debug(versionOutput); return versionOutput; }); } @@ -37055,7 +37055,7 @@ var require_cacheUtils = __commonJS({ return __awaiter4(this, void 0, void 0, function* () { const versionOutput = yield getVersion("zstd", ["--quiet"]); const version = semver8.clean(versionOutput); - core16.debug(`zstd version: ${version}`); + core14.debug(`zstd version: ${version}`); if (versionOutput === "") { return constants_1.CompressionMethod.Gzip; } else { @@ -72367,7 +72367,7 @@ var require_uploadUtils = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.uploadCacheArchiveSDK = exports2.UploadProgress = void 0; - var core16 = __importStar4(require_core()); + var core14 = __importStar4(require_core()); var storage_blob_1 = require_dist7(); var errors_1 = require_errors2(); var UploadProgress = class { @@ -72409,7 +72409,7 @@ var require_uploadUtils = __commonJS({ const percentage = (100 * (transferredBytes / this.contentLength)).toFixed(1); const elapsedTime = Date.now() - this.startTime; const uploadSpeed = (transferredBytes / (1024 * 1024) / (elapsedTime / 1e3)).toFixed(1); - core16.info(`Sent ${transferredBytes} of ${this.contentLength} (${percentage}%), ${uploadSpeed} MBs/sec`); + core14.info(`Sent ${transferredBytes} of ${this.contentLength} (${percentage}%), ${uploadSpeed} MBs/sec`); if (this.isDone()) { this.displayedComplete = true; } @@ -72464,14 +72464,14 @@ var require_uploadUtils = __commonJS({ }; try { uploadProgress.startDisplayTimer(); - core16.debug(`BlobClient: ${blobClient.name}:${blobClient.accountName}:${blobClient.containerName}`); + core14.debug(`BlobClient: ${blobClient.name}:${blobClient.accountName}:${blobClient.containerName}`); const response = yield blockBlobClient.uploadFile(archivePath, uploadOptions); if (response._response.status >= 400) { throw new errors_1.InvalidResponseError(`uploadCacheArchiveSDK: upload failed with status code ${response._response.status}`); } return response; } catch (error2) { - core16.warning(`uploadCacheArchiveSDK: internal error uploading cache archive: ${error2.message}`); + core14.warning(`uploadCacheArchiveSDK: internal error uploading cache archive: ${error2.message}`); throw error2; } finally { uploadProgress.stopDisplayTimer(); @@ -72542,7 +72542,7 @@ var require_requestUtils = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.retryHttpClientResponse = exports2.retryTypedResponse = exports2.retry = exports2.isRetryableStatusCode = exports2.isServerErrorStatusCode = exports2.isSuccessStatusCode = void 0; - var core16 = __importStar4(require_core()); + var core14 = __importStar4(require_core()); var http_client_1 = require_lib(); var constants_1 = require_constants10(); function isSuccessStatusCode(statusCode) { @@ -72603,9 +72603,9 @@ var require_requestUtils = __commonJS({ isRetryable = isRetryableStatusCode(statusCode); errorMessage = `Cache service responded with ${statusCode}`; } - core16.debug(`${name} - Attempt ${attempt} of ${maxAttempts} failed with error: ${errorMessage}`); + core14.debug(`${name} - Attempt ${attempt} of ${maxAttempts} failed with error: ${errorMessage}`); if (!isRetryable) { - core16.debug(`${name} - Error is not retryable`); + core14.debug(`${name} - Error is not retryable`); break; } yield sleep(delay2); @@ -72710,7 +72710,7 @@ var require_downloadUtils = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.downloadCacheStorageSDK = exports2.downloadCacheHttpClientConcurrent = exports2.downloadCacheHttpClient = exports2.DownloadProgress = void 0; - var core16 = __importStar4(require_core()); + var core14 = __importStar4(require_core()); var http_client_1 = require_lib(); var storage_blob_1 = require_dist7(); var buffer = __importStar4(require("buffer")); @@ -72748,7 +72748,7 @@ var require_downloadUtils = __commonJS({ this.segmentIndex = this.segmentIndex + 1; this.segmentSize = segmentSize; this.receivedBytes = 0; - core16.debug(`Downloading segment at offset ${this.segmentOffset} with length ${this.segmentSize}...`); + core14.debug(`Downloading segment at offset ${this.segmentOffset} with length ${this.segmentSize}...`); } /** * Sets the number of bytes received for the current segment. @@ -72782,7 +72782,7 @@ var require_downloadUtils = __commonJS({ const percentage = (100 * (transferredBytes / this.contentLength)).toFixed(1); const elapsedTime = Date.now() - this.startTime; const downloadSpeed = (transferredBytes / (1024 * 1024) / (elapsedTime / 1e3)).toFixed(1); - core16.info(`Received ${transferredBytes} of ${this.contentLength} (${percentage}%), ${downloadSpeed} MBs/sec`); + core14.info(`Received ${transferredBytes} of ${this.contentLength} (${percentage}%), ${downloadSpeed} MBs/sec`); if (this.isDone()) { this.displayedComplete = true; } @@ -72832,7 +72832,7 @@ var require_downloadUtils = __commonJS({ })); downloadResponse.message.socket.setTimeout(constants_1.SocketTimeout, () => { downloadResponse.message.destroy(); - core16.debug(`Aborting download, socket timed out after ${constants_1.SocketTimeout} ms`); + core14.debug(`Aborting download, socket timed out after ${constants_1.SocketTimeout} ms`); }); yield pipeResponseToStream(downloadResponse, writeStream); const contentLengthHeader = downloadResponse.message.headers["content-length"]; @@ -72843,7 +72843,7 @@ var require_downloadUtils = __commonJS({ throw new Error(`Incomplete download. Expected file size: ${expectedLength}, actual file size: ${actualLength}`); } } else { - core16.debug("Unable to validate download, no Content-Length header"); + core14.debug("Unable to validate download, no Content-Length header"); } }); } @@ -72963,7 +72963,7 @@ var require_downloadUtils = __commonJS({ const properties = yield client.getProperties(); const contentLength = (_a = properties.contentLength) !== null && _a !== void 0 ? _a : -1; if (contentLength < 0) { - core16.debug("Unable to determine content length, downloading file with http-client..."); + core14.debug("Unable to determine content length, downloading file with http-client..."); yield downloadCacheHttpClient(archiveLocation, archivePath); } else { const maxSegmentSize = Math.min(134217728, buffer.constants.MAX_LENGTH); @@ -73043,7 +73043,7 @@ var require_options = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.getDownloadOptions = exports2.getUploadOptions = void 0; - var core16 = __importStar4(require_core()); + var core14 = __importStar4(require_core()); function getUploadOptions(copy) { const result = { useAzureSdk: false, @@ -73063,9 +73063,9 @@ var require_options = __commonJS({ } result.uploadConcurrency = !isNaN(Number(process.env["CACHE_UPLOAD_CONCURRENCY"])) ? Math.min(32, Number(process.env["CACHE_UPLOAD_CONCURRENCY"])) : result.uploadConcurrency; result.uploadChunkSize = !isNaN(Number(process.env["CACHE_UPLOAD_CHUNK_SIZE"])) ? Math.min(128 * 1024 * 1024, Number(process.env["CACHE_UPLOAD_CHUNK_SIZE"]) * 1024 * 1024) : result.uploadChunkSize; - core16.debug(`Use Azure SDK: ${result.useAzureSdk}`); - core16.debug(`Upload concurrency: ${result.uploadConcurrency}`); - core16.debug(`Upload chunk size: ${result.uploadChunkSize}`); + core14.debug(`Use Azure SDK: ${result.useAzureSdk}`); + core14.debug(`Upload concurrency: ${result.uploadConcurrency}`); + core14.debug(`Upload chunk size: ${result.uploadChunkSize}`); return result; } exports2.getUploadOptions = getUploadOptions; @@ -73102,12 +73102,12 @@ var require_options = __commonJS({ if (segmentDownloadTimeoutMins && !isNaN(Number(segmentDownloadTimeoutMins)) && isFinite(Number(segmentDownloadTimeoutMins))) { result.segmentTimeoutInMs = Number(segmentDownloadTimeoutMins) * 60 * 1e3; } - core16.debug(`Use Azure SDK: ${result.useAzureSdk}`); - core16.debug(`Download concurrency: ${result.downloadConcurrency}`); - core16.debug(`Request timeout (ms): ${result.timeoutInMs}`); - core16.debug(`Cache segment download timeout mins env var: ${process.env["SEGMENT_DOWNLOAD_TIMEOUT_MINS"]}`); - core16.debug(`Segment download timeout (ms): ${result.segmentTimeoutInMs}`); - core16.debug(`Lookup only: ${result.lookupOnly}`); + core14.debug(`Use Azure SDK: ${result.useAzureSdk}`); + core14.debug(`Download concurrency: ${result.downloadConcurrency}`); + core14.debug(`Request timeout (ms): ${result.timeoutInMs}`); + core14.debug(`Cache segment download timeout mins env var: ${process.env["SEGMENT_DOWNLOAD_TIMEOUT_MINS"]}`); + core14.debug(`Segment download timeout (ms): ${result.segmentTimeoutInMs}`); + core14.debug(`Lookup only: ${result.lookupOnly}`); return result; } exports2.getDownloadOptions = getDownloadOptions; @@ -73287,7 +73287,7 @@ var require_cacheHttpClient = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.saveCache = exports2.reserveCache = exports2.downloadCache = exports2.getCacheEntry = void 0; - var core16 = __importStar4(require_core()); + var core14 = __importStar4(require_core()); var http_client_1 = require_lib(); var auth_1 = require_auth(); var fs15 = __importStar4(require("fs")); @@ -73305,7 +73305,7 @@ var require_cacheHttpClient = __commonJS({ throw new Error("Cache Service Url not found, unable to restore cache."); } const url2 = `${baseUrl}_apis/artifactcache/${resource}`; - core16.debug(`Resource Url: ${url2}`); + core14.debug(`Resource Url: ${url2}`); return url2; } function createAcceptHeader(type2, apiVersion) { @@ -73333,7 +73333,7 @@ var require_cacheHttpClient = __commonJS({ return httpClient.getJson(getCacheApiUrl(resource)); })); if (response.statusCode === 204) { - if (core16.isDebug()) { + if (core14.isDebug()) { yield printCachesListForDiagnostics(keys[0], httpClient, version); } return null; @@ -73346,9 +73346,9 @@ var require_cacheHttpClient = __commonJS({ if (!cacheDownloadUrl) { throw new Error("Cache not found."); } - core16.setSecret(cacheDownloadUrl); - core16.debug(`Cache Result:`); - core16.debug(JSON.stringify(cacheResult)); + core14.setSecret(cacheDownloadUrl); + core14.debug(`Cache Result:`); + core14.debug(JSON.stringify(cacheResult)); return cacheResult; }); } @@ -73363,10 +73363,10 @@ var require_cacheHttpClient = __commonJS({ const cacheListResult = response.result; const totalCount = cacheListResult === null || cacheListResult === void 0 ? void 0 : cacheListResult.totalCount; if (totalCount && totalCount > 0) { - core16.debug(`No matching cache found for cache key '${key}', version '${version} and scope ${process.env["GITHUB_REF"]}. There exist one or more cache(s) with similar key but they have different version or scope. See more info on cache matching here: https://docs.github.com/en/actions/using-workflows/caching-dependencies-to-speed-up-workflows#matching-a-cache-key + core14.debug(`No matching cache found for cache key '${key}', version '${version} and scope ${process.env["GITHUB_REF"]}. There exist one or more cache(s) with similar key but they have different version or scope. See more info on cache matching here: https://docs.github.com/en/actions/using-workflows/caching-dependencies-to-speed-up-workflows#matching-a-cache-key Other caches with similar key:`); for (const cacheEntry of (cacheListResult === null || cacheListResult === void 0 ? void 0 : cacheListResult.artifactCaches) || []) { - core16.debug(`Cache Key: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.cacheKey}, Cache Version: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.cacheVersion}, Cache Scope: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.scope}, Cache Created: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.creationTime}`); + core14.debug(`Cache Key: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.cacheKey}, Cache Version: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.cacheVersion}, Cache Scope: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.scope}, Cache Created: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.creationTime}`); } } } @@ -73411,7 +73411,7 @@ Other caches with similar key:`); } function uploadChunk(httpClient, resourceUrl, openStream, start, end) { return __awaiter4(this, void 0, void 0, function* () { - core16.debug(`Uploading chunk of size ${end - start + 1} bytes at offset ${start} with content range: ${getContentRange(start, end)}`); + core14.debug(`Uploading chunk of size ${end - start + 1} bytes at offset ${start} with content range: ${getContentRange(start, end)}`); const additionalHeaders = { "Content-Type": "application/octet-stream", "Content-Range": getContentRange(start, end) @@ -73433,7 +73433,7 @@ Other caches with similar key:`); const concurrency = utils.assertDefined("uploadConcurrency", uploadOptions.uploadConcurrency); const maxChunkSize = utils.assertDefined("uploadChunkSize", uploadOptions.uploadChunkSize); const parallelUploads = [...new Array(concurrency).keys()]; - core16.debug("Awaiting all uploads"); + core14.debug("Awaiting all uploads"); let offset = 0; try { yield Promise.all(parallelUploads.map(() => __awaiter4(this, void 0, void 0, function* () { @@ -73476,16 +73476,16 @@ Other caches with similar key:`); yield (0, uploadUtils_1.uploadCacheArchiveSDK)(signedUploadURL, archivePath, options); } else { const httpClient = createHttpClient(); - core16.debug("Upload cache"); + core14.debug("Upload cache"); yield uploadFile(httpClient, cacheId, archivePath, options); - core16.debug("Commiting cache"); + core14.debug("Commiting cache"); const cacheSize = utils.getArchiveFileSizeInBytes(archivePath); - core16.info(`Cache Size: ~${Math.round(cacheSize / (1024 * 1024))} MB (${cacheSize} B)`); + core14.info(`Cache Size: ~${Math.round(cacheSize / (1024 * 1024))} MB (${cacheSize} B)`); const commitCacheResponse = yield commitCache(httpClient, cacheId, cacheSize); if (!(0, requestUtils_1.isSuccessStatusCode)(commitCacheResponse.statusCode)) { throw new Error(`Cache service responded with ${commitCacheResponse.statusCode} during commit cache.`); } - core16.info("Cache saved successfully"); + core14.info("Cache saved successfully"); } }); } @@ -78937,7 +78937,7 @@ var require_cache3 = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.saveCache = exports2.restoreCache = exports2.isFeatureAvailable = exports2.FinalizeCacheError = exports2.ReserveCacheError = exports2.ValidationError = void 0; - var core16 = __importStar4(require_core()); + var core14 = __importStar4(require_core()); var path16 = __importStar4(require("path")); var utils = __importStar4(require_cacheUtils()); var cacheHttpClient = __importStar4(require_cacheHttpClient()); @@ -78997,7 +78997,7 @@ var require_cache3 = __commonJS({ function restoreCache3(paths, primaryKey, restoreKeys, options, enableCrossOsArchive = false) { return __awaiter4(this, void 0, void 0, function* () { const cacheServiceVersion = (0, config_1.getCacheServiceVersion)(); - core16.debug(`Cache service version: ${cacheServiceVersion}`); + core14.debug(`Cache service version: ${cacheServiceVersion}`); checkPaths(paths); switch (cacheServiceVersion) { case "v2": @@ -79013,8 +79013,8 @@ var require_cache3 = __commonJS({ return __awaiter4(this, void 0, void 0, function* () { restoreKeys = restoreKeys || []; const keys = [primaryKey, ...restoreKeys]; - core16.debug("Resolved Keys:"); - core16.debug(JSON.stringify(keys)); + core14.debug("Resolved Keys:"); + core14.debug(JSON.stringify(keys)); if (keys.length > 10) { throw new ValidationError(`Key Validation Error: Keys are limited to a maximum of 10.`); } @@ -79032,19 +79032,19 @@ var require_cache3 = __commonJS({ return void 0; } if (options === null || options === void 0 ? void 0 : options.lookupOnly) { - core16.info("Lookup only - skipping download"); + core14.info("Lookup only - skipping download"); return cacheEntry.cacheKey; } archivePath = path16.join(yield utils.createTempDirectory(), utils.getCacheFileName(compressionMethod)); - core16.debug(`Archive Path: ${archivePath}`); + core14.debug(`Archive Path: ${archivePath}`); yield cacheHttpClient.downloadCache(cacheEntry.archiveLocation, archivePath, options); - if (core16.isDebug()) { + if (core14.isDebug()) { yield (0, tar_1.listTar)(archivePath, compressionMethod); } const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath); - core16.info(`Cache Size: ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B)`); + core14.info(`Cache Size: ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B)`); yield (0, tar_1.extractTar)(archivePath, compressionMethod); - core16.info("Cache restored successfully"); + core14.info("Cache restored successfully"); return cacheEntry.cacheKey; } catch (error2) { const typedError = error2; @@ -79052,16 +79052,16 @@ var require_cache3 = __commonJS({ throw error2; } else { if (typedError instanceof http_client_1.HttpClientError && typeof typedError.statusCode === "number" && typedError.statusCode >= 500) { - core16.error(`Failed to restore: ${error2.message}`); + core14.error(`Failed to restore: ${error2.message}`); } else { - core16.warning(`Failed to restore: ${error2.message}`); + core14.warning(`Failed to restore: ${error2.message}`); } } } finally { try { yield utils.unlinkFile(archivePath); } catch (error2) { - core16.debug(`Failed to delete archive: ${error2}`); + core14.debug(`Failed to delete archive: ${error2}`); } } return void 0; @@ -79072,8 +79072,8 @@ var require_cache3 = __commonJS({ options = Object.assign(Object.assign({}, options), { useAzureSdk: true }); restoreKeys = restoreKeys || []; const keys = [primaryKey, ...restoreKeys]; - core16.debug("Resolved Keys:"); - core16.debug(JSON.stringify(keys)); + core14.debug("Resolved Keys:"); + core14.debug(JSON.stringify(keys)); if (keys.length > 10) { throw new ValidationError(`Key Validation Error: Keys are limited to a maximum of 10.`); } @@ -79091,30 +79091,30 @@ var require_cache3 = __commonJS({ }; const response = yield twirpClient.GetCacheEntryDownloadURL(request); if (!response.ok) { - core16.debug(`Cache not found for version ${request.version} of keys: ${keys.join(", ")}`); + core14.debug(`Cache not found for version ${request.version} of keys: ${keys.join(", ")}`); return void 0; } const isRestoreKeyMatch = request.key !== response.matchedKey; if (isRestoreKeyMatch) { - core16.info(`Cache hit for restore-key: ${response.matchedKey}`); + core14.info(`Cache hit for restore-key: ${response.matchedKey}`); } else { - core16.info(`Cache hit for: ${response.matchedKey}`); + core14.info(`Cache hit for: ${response.matchedKey}`); } if (options === null || options === void 0 ? void 0 : options.lookupOnly) { - core16.info("Lookup only - skipping download"); + core14.info("Lookup only - skipping download"); return response.matchedKey; } archivePath = path16.join(yield utils.createTempDirectory(), utils.getCacheFileName(compressionMethod)); - core16.debug(`Archive path: ${archivePath}`); - core16.debug(`Starting download of archive to: ${archivePath}`); + core14.debug(`Archive path: ${archivePath}`); + core14.debug(`Starting download of archive to: ${archivePath}`); yield cacheHttpClient.downloadCache(response.signedDownloadUrl, archivePath, options); const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath); - core16.info(`Cache Size: ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B)`); - if (core16.isDebug()) { + core14.info(`Cache Size: ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B)`); + if (core14.isDebug()) { yield (0, tar_1.listTar)(archivePath, compressionMethod); } yield (0, tar_1.extractTar)(archivePath, compressionMethod); - core16.info("Cache restored successfully"); + core14.info("Cache restored successfully"); return response.matchedKey; } catch (error2) { const typedError = error2; @@ -79122,9 +79122,9 @@ var require_cache3 = __commonJS({ throw error2; } else { if (typedError instanceof http_client_1.HttpClientError && typeof typedError.statusCode === "number" && typedError.statusCode >= 500) { - core16.error(`Failed to restore: ${error2.message}`); + core14.error(`Failed to restore: ${error2.message}`); } else { - core16.warning(`Failed to restore: ${error2.message}`); + core14.warning(`Failed to restore: ${error2.message}`); } } } finally { @@ -79133,7 +79133,7 @@ var require_cache3 = __commonJS({ yield utils.unlinkFile(archivePath); } } catch (error2) { - core16.debug(`Failed to delete archive: ${error2}`); + core14.debug(`Failed to delete archive: ${error2}`); } } return void 0; @@ -79142,7 +79142,7 @@ var require_cache3 = __commonJS({ function saveCache3(paths, key, options, enableCrossOsArchive = false) { return __awaiter4(this, void 0, void 0, function* () { const cacheServiceVersion = (0, config_1.getCacheServiceVersion)(); - core16.debug(`Cache service version: ${cacheServiceVersion}`); + core14.debug(`Cache service version: ${cacheServiceVersion}`); checkPaths(paths); checkKey(key); switch (cacheServiceVersion) { @@ -79161,26 +79161,26 @@ var require_cache3 = __commonJS({ const compressionMethod = yield utils.getCompressionMethod(); let cacheId = -1; const cachePaths = yield utils.resolvePaths(paths); - core16.debug("Cache Paths:"); - core16.debug(`${JSON.stringify(cachePaths)}`); + core14.debug("Cache Paths:"); + core14.debug(`${JSON.stringify(cachePaths)}`); if (cachePaths.length === 0) { throw new Error(`Path Validation Error: Path(s) specified in the action for caching do(es) not exist, hence no cache is being saved.`); } const archiveFolder = yield utils.createTempDirectory(); const archivePath = path16.join(archiveFolder, utils.getCacheFileName(compressionMethod)); - core16.debug(`Archive Path: ${archivePath}`); + core14.debug(`Archive Path: ${archivePath}`); try { yield (0, tar_1.createTar)(archiveFolder, cachePaths, compressionMethod); - if (core16.isDebug()) { + if (core14.isDebug()) { yield (0, tar_1.listTar)(archivePath, compressionMethod); } const fileSizeLimit = 10 * 1024 * 1024 * 1024; const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath); - core16.debug(`File Size: ${archiveFileSize}`); + core14.debug(`File Size: ${archiveFileSize}`); if (archiveFileSize > fileSizeLimit && !(0, config_1.isGhes)()) { throw new Error(`Cache size of ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B) is over the 10GB limit, not saving cache.`); } - core16.debug("Reserving Cache"); + core14.debug("Reserving Cache"); const reserveCacheResponse = yield cacheHttpClient.reserveCache(key, paths, { compressionMethod, enableCrossOsArchive, @@ -79193,26 +79193,26 @@ var require_cache3 = __commonJS({ } else { throw new ReserveCacheError(`Unable to reserve cache with key ${key}, another job may be creating this cache. More details: ${(_e = reserveCacheResponse === null || reserveCacheResponse === void 0 ? void 0 : reserveCacheResponse.error) === null || _e === void 0 ? void 0 : _e.message}`); } - core16.debug(`Saving Cache (ID: ${cacheId})`); + core14.debug(`Saving Cache (ID: ${cacheId})`); yield cacheHttpClient.saveCache(cacheId, archivePath, "", options); } catch (error2) { const typedError = error2; if (typedError.name === ValidationError.name) { throw error2; } else if (typedError.name === ReserveCacheError.name) { - core16.info(`Failed to save: ${typedError.message}`); + core14.info(`Failed to save: ${typedError.message}`); } else { if (typedError instanceof http_client_1.HttpClientError && typeof typedError.statusCode === "number" && typedError.statusCode >= 500) { - core16.error(`Failed to save: ${typedError.message}`); + core14.error(`Failed to save: ${typedError.message}`); } else { - core16.warning(`Failed to save: ${typedError.message}`); + core14.warning(`Failed to save: ${typedError.message}`); } } } finally { try { yield utils.unlinkFile(archivePath); } catch (error2) { - core16.debug(`Failed to delete archive: ${error2}`); + core14.debug(`Failed to delete archive: ${error2}`); } } return cacheId; @@ -79225,23 +79225,23 @@ var require_cache3 = __commonJS({ const twirpClient = cacheTwirpClient.internalCacheTwirpClient(); let cacheId = -1; const cachePaths = yield utils.resolvePaths(paths); - core16.debug("Cache Paths:"); - core16.debug(`${JSON.stringify(cachePaths)}`); + core14.debug("Cache Paths:"); + core14.debug(`${JSON.stringify(cachePaths)}`); if (cachePaths.length === 0) { throw new Error(`Path Validation Error: Path(s) specified in the action for caching do(es) not exist, hence no cache is being saved.`); } const archiveFolder = yield utils.createTempDirectory(); const archivePath = path16.join(archiveFolder, utils.getCacheFileName(compressionMethod)); - core16.debug(`Archive Path: ${archivePath}`); + core14.debug(`Archive Path: ${archivePath}`); try { yield (0, tar_1.createTar)(archiveFolder, cachePaths, compressionMethod); - if (core16.isDebug()) { + if (core14.isDebug()) { yield (0, tar_1.listTar)(archivePath, compressionMethod); } const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath); - core16.debug(`File Size: ${archiveFileSize}`); + core14.debug(`File Size: ${archiveFileSize}`); options.archiveSizeBytes = archiveFileSize; - core16.debug("Reserving Cache"); + core14.debug("Reserving Cache"); const version = utils.getCacheVersion(paths, compressionMethod, enableCrossOsArchive); const request = { key, @@ -79252,16 +79252,16 @@ var require_cache3 = __commonJS({ const response = yield twirpClient.CreateCacheEntry(request); if (!response.ok) { if (response.message) { - core16.warning(`Cache reservation failed: ${response.message}`); + core14.warning(`Cache reservation failed: ${response.message}`); } throw new Error(response.message || "Response was not ok"); } signedUploadUrl = response.signedUploadUrl; } catch (error2) { - core16.debug(`Failed to reserve cache: ${error2}`); + core14.debug(`Failed to reserve cache: ${error2}`); throw new ReserveCacheError(`Unable to reserve cache with key ${key}, another job may be creating this cache.`); } - core16.debug(`Attempting to upload cache located at: ${archivePath}`); + core14.debug(`Attempting to upload cache located at: ${archivePath}`); yield cacheHttpClient.saveCache(cacheId, archivePath, signedUploadUrl, options); const finalizeRequest = { key, @@ -79269,7 +79269,7 @@ var require_cache3 = __commonJS({ sizeBytes: `${archiveFileSize}` }; const finalizeResponse = yield twirpClient.FinalizeCacheEntryUpload(finalizeRequest); - core16.debug(`FinalizeCacheEntryUploadResponse: ${finalizeResponse.ok}`); + core14.debug(`FinalizeCacheEntryUploadResponse: ${finalizeResponse.ok}`); if (!finalizeResponse.ok) { if (finalizeResponse.message) { throw new FinalizeCacheError(finalizeResponse.message); @@ -79282,21 +79282,21 @@ var require_cache3 = __commonJS({ if (typedError.name === ValidationError.name) { throw error2; } else if (typedError.name === ReserveCacheError.name) { - core16.info(`Failed to save: ${typedError.message}`); + core14.info(`Failed to save: ${typedError.message}`); } else if (typedError.name === FinalizeCacheError.name) { - core16.warning(typedError.message); + core14.warning(typedError.message); } else { if (typedError instanceof http_client_1.HttpClientError && typeof typedError.statusCode === "number" && typedError.statusCode >= 500) { - core16.error(`Failed to save: ${typedError.message}`); + core14.error(`Failed to save: ${typedError.message}`); } else { - core16.warning(`Failed to save: ${typedError.message}`); + core14.warning(`Failed to save: ${typedError.message}`); } } } finally { try { yield utils.unlinkFile(archivePath); } catch (error2) { - core16.debug(`Failed to delete archive: ${error2}`); + core14.debug(`Failed to delete archive: ${error2}`); } } return cacheId; @@ -80801,7 +80801,7 @@ var require_retry_helper = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.RetryHelper = void 0; - var core16 = __importStar4(require_core()); + var core14 = __importStar4(require_core()); var RetryHelper = class { constructor(maxAttempts, minSeconds, maxSeconds) { if (maxAttempts < 1) { @@ -80824,10 +80824,10 @@ var require_retry_helper = __commonJS({ if (isRetryable && !isRetryable(err)) { throw err; } - core16.info(err.message); + core14.info(err.message); } const seconds = this.getSleepAmount(); - core16.info(`Waiting ${seconds} seconds before trying again`); + core14.info(`Waiting ${seconds} seconds before trying again`); yield this.sleep(seconds); attempt++; } @@ -80907,7 +80907,7 @@ var require_tool_cache = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.evaluateVersions = exports2.isExplicitVersion = exports2.findFromManifest = exports2.getManifestFromRepo = exports2.findAllVersions = exports2.find = exports2.cacheFile = exports2.cacheDir = exports2.extractZip = exports2.extractXar = exports2.extractTar = exports2.extract7z = exports2.downloadTool = exports2.HTTPError = void 0; - var core16 = __importStar4(require_core()); + var core14 = __importStar4(require_core()); var io6 = __importStar4(require_io()); var crypto = __importStar4(require("crypto")); var fs15 = __importStar4(require("fs")); @@ -80936,8 +80936,8 @@ var require_tool_cache = __commonJS({ return __awaiter4(this, void 0, void 0, function* () { dest = dest || path16.join(_getTempDirectory(), crypto.randomUUID()); yield io6.mkdirP(path16.dirname(dest)); - core16.debug(`Downloading ${url2}`); - core16.debug(`Destination ${dest}`); + core14.debug(`Downloading ${url2}`); + core14.debug(`Destination ${dest}`); const maxAttempts = 3; const minSeconds = _getGlobal("TEST_DOWNLOAD_TOOL_RETRY_MIN_SECONDS", 10); const maxSeconds = _getGlobal("TEST_DOWNLOAD_TOOL_RETRY_MAX_SECONDS", 20); @@ -80964,7 +80964,7 @@ var require_tool_cache = __commonJS({ allowRetries: false }); if (auth) { - core16.debug("set auth"); + core14.debug("set auth"); if (headers === void 0) { headers = {}; } @@ -80973,7 +80973,7 @@ var require_tool_cache = __commonJS({ const response = yield http.get(url2, headers); if (response.message.statusCode !== 200) { const err = new HTTPError2(response.message.statusCode); - core16.debug(`Failed to download from "${url2}". Code(${response.message.statusCode}) Message(${response.message.statusMessage})`); + core14.debug(`Failed to download from "${url2}". Code(${response.message.statusCode}) Message(${response.message.statusMessage})`); throw err; } const pipeline = util.promisify(stream2.pipeline); @@ -80982,16 +80982,16 @@ var require_tool_cache = __commonJS({ let succeeded = false; try { yield pipeline(readStream, fs15.createWriteStream(dest)); - core16.debug("download complete"); + core14.debug("download complete"); succeeded = true; return dest; } finally { if (!succeeded) { - core16.debug("download failed"); + core14.debug("download failed"); try { yield io6.rmRF(dest); } catch (err) { - core16.debug(`Failed to delete '${dest}'. ${err.message}`); + core14.debug(`Failed to delete '${dest}'. ${err.message}`); } } } @@ -81006,7 +81006,7 @@ var require_tool_cache = __commonJS({ process.chdir(dest); if (_7zPath) { try { - const logLevel = core16.isDebug() ? "-bb1" : "-bb0"; + const logLevel = core14.isDebug() ? "-bb1" : "-bb0"; const args = [ "x", logLevel, @@ -81056,7 +81056,7 @@ var require_tool_cache = __commonJS({ throw new Error("parameter 'file' is required"); } dest = yield _createExtractFolder(dest); - core16.debug("Checking tar --version"); + core14.debug("Checking tar --version"); let versionOutput = ""; yield (0, exec_1.exec)("tar --version", [], { ignoreReturnCode: true, @@ -81066,7 +81066,7 @@ var require_tool_cache = __commonJS({ stderr: (data) => versionOutput += data.toString() } }); - core16.debug(versionOutput.trim()); + core14.debug(versionOutput.trim()); const isGnuTar = versionOutput.toUpperCase().includes("GNU TAR"); let args; if (flags instanceof Array) { @@ -81074,7 +81074,7 @@ var require_tool_cache = __commonJS({ } else { args = [flags]; } - if (core16.isDebug() && !flags.includes("v")) { + if (core14.isDebug() && !flags.includes("v")) { args.push("-v"); } let destArg = dest; @@ -81106,7 +81106,7 @@ var require_tool_cache = __commonJS({ args = [flags]; } args.push("-x", "-C", dest, "-f", file); - if (core16.isDebug()) { + if (core14.isDebug()) { args.push("-v"); } const xarPath = yield io6.which("xar", true); @@ -81151,7 +81151,7 @@ var require_tool_cache = __commonJS({ "-Command", pwshCommand ]; - core16.debug(`Using pwsh at path: ${pwshPath}`); + core14.debug(`Using pwsh at path: ${pwshPath}`); yield (0, exec_1.exec)(`"${pwshPath}"`, args); } else { const powershellCommand = [ @@ -81171,7 +81171,7 @@ var require_tool_cache = __commonJS({ powershellCommand ]; const powershellPath = yield io6.which("powershell", true); - core16.debug(`Using powershell at path: ${powershellPath}`); + core14.debug(`Using powershell at path: ${powershellPath}`); yield (0, exec_1.exec)(`"${powershellPath}"`, args); } }); @@ -81180,7 +81180,7 @@ var require_tool_cache = __commonJS({ return __awaiter4(this, void 0, void 0, function* () { const unzipPath = yield io6.which("unzip", true); const args = [file]; - if (!core16.isDebug()) { + if (!core14.isDebug()) { args.unshift("-q"); } args.unshift("-o"); @@ -81191,8 +81191,8 @@ var require_tool_cache = __commonJS({ return __awaiter4(this, void 0, void 0, function* () { version = semver8.clean(version) || version; arch2 = arch2 || os3.arch(); - core16.debug(`Caching tool ${tool} ${version} ${arch2}`); - core16.debug(`source dir: ${sourceDir}`); + core14.debug(`Caching tool ${tool} ${version} ${arch2}`); + core14.debug(`source dir: ${sourceDir}`); if (!fs15.statSync(sourceDir).isDirectory()) { throw new Error("sourceDir is not a directory"); } @@ -81210,14 +81210,14 @@ var require_tool_cache = __commonJS({ return __awaiter4(this, void 0, void 0, function* () { version = semver8.clean(version) || version; arch2 = arch2 || os3.arch(); - core16.debug(`Caching tool ${tool} ${version} ${arch2}`); - core16.debug(`source file: ${sourceFile}`); + core14.debug(`Caching tool ${tool} ${version} ${arch2}`); + core14.debug(`source file: ${sourceFile}`); if (!fs15.statSync(sourceFile).isFile()) { throw new Error("sourceFile is not a file"); } const destFolder = yield _createToolPath(tool, version, arch2); const destPath = path16.join(destFolder, targetFile); - core16.debug(`destination file ${destPath}`); + core14.debug(`destination file ${destPath}`); yield io6.cp(sourceFile, destPath); _completeToolPath(tool, version, arch2); return destFolder; @@ -81241,12 +81241,12 @@ var require_tool_cache = __commonJS({ if (versionSpec) { versionSpec = semver8.clean(versionSpec) || ""; const cachePath = path16.join(_getCacheDirectory(), toolName, versionSpec, arch2); - core16.debug(`checking cache: ${cachePath}`); + core14.debug(`checking cache: ${cachePath}`); if (fs15.existsSync(cachePath) && fs15.existsSync(`${cachePath}.complete`)) { - core16.debug(`Found tool in cache ${toolName} ${versionSpec} ${arch2}`); + core14.debug(`Found tool in cache ${toolName} ${versionSpec} ${arch2}`); toolPath = cachePath; } else { - core16.debug("not found"); + core14.debug("not found"); } } return toolPath; @@ -81277,7 +81277,7 @@ var require_tool_cache = __commonJS({ const http = new httpm.HttpClient("tool-cache"); const headers = {}; if (auth) { - core16.debug("set auth"); + core14.debug("set auth"); headers.authorization = auth; } const response = yield http.getJson(treeUrl, headers); @@ -81298,7 +81298,7 @@ var require_tool_cache = __commonJS({ try { releases = JSON.parse(versionsRaw); } catch (_a) { - core16.debug("Invalid json"); + core14.debug("Invalid json"); } } return releases; @@ -81324,7 +81324,7 @@ var require_tool_cache = __commonJS({ function _createToolPath(tool, version, arch2) { return __awaiter4(this, void 0, void 0, function* () { const folderPath = path16.join(_getCacheDirectory(), tool, semver8.clean(version) || version, arch2 || ""); - core16.debug(`destination ${folderPath}`); + core14.debug(`destination ${folderPath}`); const markerPath = `${folderPath}.complete`; yield io6.rmRF(folderPath); yield io6.rmRF(markerPath); @@ -81336,19 +81336,19 @@ var require_tool_cache = __commonJS({ const folderPath = path16.join(_getCacheDirectory(), tool, semver8.clean(version) || version, arch2 || ""); const markerPath = `${folderPath}.complete`; fs15.writeFileSync(markerPath, ""); - core16.debug("finished caching tool"); + core14.debug("finished caching tool"); } function isExplicitVersion(versionSpec) { const c = semver8.clean(versionSpec) || ""; - core16.debug(`isExplicit: ${c}`); + core14.debug(`isExplicit: ${c}`); const valid3 = semver8.valid(c) != null; - core16.debug(`explicit? ${valid3}`); + core14.debug(`explicit? ${valid3}`); return valid3; } exports2.isExplicitVersion = isExplicitVersion; function evaluateVersions(versions, versionSpec) { let version = ""; - core16.debug(`evaluating ${versions.length} versions`); + core14.debug(`evaluating ${versions.length} versions`); versions = versions.sort((a, b) => { if (semver8.gt(a, b)) { return 1; @@ -81364,9 +81364,9 @@ var require_tool_cache = __commonJS({ } } if (version) { - core16.debug(`matched: ${version}`); + core14.debug(`matched: ${version}`); } else { - core16.debug("match not found"); + core14.debug("match not found"); } return version; } @@ -84839,7 +84839,7 @@ var require_sarif_schema_2_1_0 = __commonJS({ }); // src/upload-sarif-action.ts -var core15 = __toESM(require_core()); +var core13 = __toESM(require_core()); // src/actions-util.ts var fs4 = __toESM(require("fs")); @@ -90055,7 +90055,7 @@ var fs14 = __toESM(require("fs")); var path15 = __toESM(require("path")); var url = __toESM(require("url")); var import_zlib = __toESM(require("zlib")); -var core14 = __toESM(require_core()); +var core12 = __toESM(require_core()); var jsonschema2 = __toESM(require_lib2()); // src/codeql.ts @@ -92952,28 +92952,8 @@ async function addFingerprints(sarif, sourceRoot, logger) { } // src/init.ts -var core13 = __toESM(require_core()); var toolrunner4 = __toESM(require_toolrunner()); var io5 = __toESM(require_io()); - -// src/workflow.ts -var core12 = __toESM(require_core()); -function toCodedErrors(errors) { - return Object.entries(errors).reduce( - (acc, [code, message]) => { - acc[code] = { message, code }; - return acc; - }, - {} - ); -} -var WorkflowErrors = toCodedErrors({ - MissingPushHook: `Please specify an on.push hook to analyze and see code scanning alerts from the default branch on the Security tab.`, - CheckoutWrongHead: `git checkout HEAD^2 is no longer necessary. Please remove this step as Code Scanning recommends analyzing the merge commit for best results.`, - InconsistentActionVersion: `Not all workflow steps that use \`github/codeql-action\` actions use the same version. Please ensure that all such steps use the same version to avoid compatibility issues.` -}); - -// src/init.ts async function initCodeQL(toolsInput, apiDetails, tempDir, variant, defaultCliVersion, features, logger) { logger.startGroup("Setup CodeQL tools"); const { @@ -93103,7 +93083,7 @@ async function combineSarifFilesUsingCLI(sarifFiles, gitHubVersion, features, lo logger.warning( `Uploading multiple SARIF runs with the same category is deprecated ${deprecationWarningMessage}. Please update your workflow to upload a single run per category. ${deprecationMoreInformationMessage}` ); - core14.exportVariable("CODEQL_MERGE_SARIF_DEPRECATION_WARNING", "true"); + core12.exportVariable("CODEQL_MERGE_SARIF_DEPRECATION_WARNING", "true"); } return combineSarifFiles(sarifFiles, logger); } @@ -93202,13 +93182,13 @@ async function uploadPayload(payload, repositoryNwo, logger, analysis) { if (httpError !== void 0) { switch (httpError.status) { case 403: - core14.warning(httpError.message || GENERIC_403_MSG); + core12.warning(httpError.message || GENERIC_403_MSG); break; case 404: - core14.warning(httpError.message || GENERIC_404_MSG); + core12.warning(httpError.message || GENERIC_404_MSG); break; default: - core14.warning(httpError.message); + core12.warning(httpError.message); break; } } @@ -93321,9 +93301,9 @@ function validateSarifFileSchema(sarif, sarifFilePath, logger) { const warnings = (result.errors ?? []).filter( (err) => err.name === "format" && typeof err.argument === "string" && warningAttributes.includes(err.argument) ); - for (const warning8 of warnings) { + for (const warning7 of warnings) { logger.info( - `Warning: '${warning8.instance}' is not a valid URI in '${warning8.property}'.` + `Warning: '${warning7.instance}' is not a valid URI in '${warning7.property}'.` ); } if (errors.length > 0) { @@ -93592,7 +93572,7 @@ function validateUniqueCategory(sarif, sentinelPrefix) { `Aborting upload: only one run of the codeql/analyze or codeql/upload-sarif actions is allowed per job per tool/category. The easiest fix is to specify a unique value for the \`category\` input. If .runs[].automationDetails.id is specified in the sarif file, that will take precedence over your configured \`category\`. Category: (${id ? id : "none"}) Tool: (${tool ? tool : "none"})` ); } - core14.exportVariable(sentinelEnvVar, sentinelEnvVar); + core12.exportVariable(sentinelEnvVar, sentinelEnvVar); } } function sanitize(str2) { @@ -93732,11 +93712,11 @@ async function run() { } const codeScanningResult = uploadResults["code-scanning" /* CodeScanning */]; if (codeScanningResult !== void 0) { - core15.setOutput("sarif-id", codeScanningResult.sarifID); + core13.setOutput("sarif-id", codeScanningResult.sarifID); } - core15.setOutput("sarif-ids", JSON.stringify(uploadResults)); + core13.setOutput("sarif-ids", JSON.stringify(uploadResults)); if (shouldSkipSarifUpload()) { - core15.debug( + core13.debug( "SARIF upload disabled by an environment variable. Waiting for processing is disabled." ); } else if (getRequiredInput("wait-for-processing") === "true") { @@ -93756,7 +93736,7 @@ async function run() { } catch (unwrappedError) { const error2 = isThirdPartyAnalysis("upload-sarif" /* UploadSarif */) && unwrappedError instanceof InvalidSarifUploadError ? new ConfigurationError(unwrappedError.message) : wrapError(unwrappedError); const message = error2.message; - core15.setFailed(message); + core13.setFailed(message); const errorStatusReportBase = await createStatusReportBase( "upload-sarif" /* UploadSarif */, getActionsStatus(error2), @@ -93777,7 +93757,7 @@ async function runWrapper() { try { await run(); } catch (error2) { - core15.setFailed( + core13.setFailed( `codeql/upload-sarif action failed: ${getErrorMessage(error2)}` ); } diff --git a/src/init-action.ts b/src/init-action.ts index 6984f9a379..97cb6b784e 100644 --- a/src/init-action.ts +++ b/src/init-action.ts @@ -40,7 +40,6 @@ import { loadPropertiesFromApi } from "./feature-flags/properties"; import { checkInstallPython311, checkPacksForOverlayCompatibility, - checkWorkflow, cleanupDatabaseClusterDirectory, initCodeQL, initConfig, @@ -87,6 +86,7 @@ import { getErrorMessage, BuildMode, } from "./util"; +import { checkWorkflow } from "./workflow"; /** * Sends a status report indicating that the `init` Action is starting. diff --git a/src/init.test.ts b/src/init.test.ts index bd267fb26b..6640e081f3 100644 --- a/src/init.test.ts +++ b/src/init.test.ts @@ -2,104 +2,23 @@ import * as fs from "fs"; import path from "path"; import test, { ExecutionContext } from "ava"; -import * as sinon from "sinon"; -import * as actionsUtil from "./actions-util"; import { createStubCodeQL } from "./codeql"; -import { EnvVar } from "./environment"; import { checkPacksForOverlayCompatibility, - checkWorkflow, cleanupDatabaseClusterDirectory, } from "./init"; import { KnownLanguage } from "./languages"; import { LoggedMessage, - checkExpectedLogMessages, createTestConfig, getRecordingLogger, setupTests, } from "./testing-utils"; import { ConfigurationError, withTmpDir } from "./util"; -import * as workflow from "./workflow"; setupTests(test); -test("checkWorkflow - validates workflow if `SKIP_WORKFLOW_VALIDATION` is not set", async (t) => { - const messages: LoggedMessage[] = []; - const codeql = createStubCodeQL({}); - - sinon.stub(actionsUtil, "isDynamicWorkflow").returns(false); - const validateWorkflow = sinon.stub(workflow, "validateWorkflow"); - validateWorkflow.resolves(undefined); - - await checkWorkflow(getRecordingLogger(messages), codeql); - - t.assert( - validateWorkflow.calledOnce, - "`checkWorkflow` unexpectedly did not call `validateWorkflow`", - ); - checkExpectedLogMessages(t, messages, [ - "Detected no issues with the code scanning workflow.", - ]); -}); - -test("checkWorkflow - logs problems with workflow validation", async (t) => { - const messages: LoggedMessage[] = []; - const codeql = createStubCodeQL({}); - - sinon.stub(actionsUtil, "isDynamicWorkflow").returns(false); - const validateWorkflow = sinon.stub(workflow, "validateWorkflow"); - validateWorkflow.resolves("problem"); - - await checkWorkflow(getRecordingLogger(messages), codeql); - - t.assert( - validateWorkflow.calledOnce, - "`checkWorkflow` unexpectedly did not call `validateWorkflow`", - ); - checkExpectedLogMessages(t, messages, [ - "Unable to validate code scanning workflow: problem", - ]); -}); - -test("checkWorkflow - skips validation if `SKIP_WORKFLOW_VALIDATION` is `true`", async (t) => { - process.env[EnvVar.SKIP_WORKFLOW_VALIDATION] = "true"; - - const messages: LoggedMessage[] = []; - const codeql = createStubCodeQL({}); - - sinon.stub(actionsUtil, "isDynamicWorkflow").returns(false); - const validateWorkflow = sinon.stub(workflow, "validateWorkflow"); - - await checkWorkflow(getRecordingLogger(messages), codeql); - - t.assert( - validateWorkflow.notCalled, - "`checkWorkflow` called `validateWorkflow` unexpectedly", - ); - t.is(messages.length, 0); -}); - -test("checkWorkflow - skips validation for `dynamic` workflows", async (t) => { - const messages: LoggedMessage[] = []; - const codeql = createStubCodeQL({}); - - const isDynamicWorkflow = sinon - .stub(actionsUtil, "isDynamicWorkflow") - .returns(true); - const validateWorkflow = sinon.stub(workflow, "validateWorkflow"); - - await checkWorkflow(getRecordingLogger(messages), codeql); - - t.assert(isDynamicWorkflow.calledOnce); - t.assert( - validateWorkflow.notCalled, - "`checkWorkflow` called `validateWorkflow` unexpectedly", - ); - t.is(messages.length, 0); -}); - test("cleanupDatabaseClusterDirectory cleans up where possible", async (t) => { await withTmpDir(async (tmpDir: string) => { const dbLocation = path.resolve(tmpDir, "dbs"); diff --git a/src/init.ts b/src/init.ts index 7d8df61f68..b399ef8d9d 100644 --- a/src/init.ts +++ b/src/init.ts @@ -1,20 +1,14 @@ import * as fs from "fs"; import * as path from "path"; -import * as core from "@actions/core"; import * as toolrunner from "@actions/exec/lib/toolrunner"; import * as io from "@actions/io"; import * as yaml from "js-yaml"; -import { - getOptionalInput, - isDynamicWorkflow, - isSelfHostedRunner, -} from "./actions-util"; +import { getOptionalInput, isSelfHostedRunner } from "./actions-util"; import { GitHubApiDetails } from "./api-client"; import { CodeQL, setupCodeQL } from "./codeql"; import * as configUtils from "./config-utils"; -import { EnvVar } from "./environment"; import { CodeQLDefaultVersionInfo, FeatureEnablement } from "./feature-flags"; import { KnownLanguage, Language } from "./languages"; import { Logger, withGroupAsync } from "./logging"; @@ -22,33 +16,6 @@ import { ToolsSource } from "./setup-codeql"; import { ZstdAvailability } from "./tar"; import { ToolsDownloadStatusReport } from "./tools-download"; import * as util from "./util"; -import { validateWorkflow } from "./workflow"; - -/** - * A wrapper around `validateWorkflow` which reports the outcome. - * - * @param logger The logger to use. - * @param codeql The CodeQL instance. - */ -export async function checkWorkflow(logger: Logger, codeql: CodeQL) { - // Check the workflow for problems, unless `SKIP_WORKFLOW_VALIDATION` is `true` - // or the workflow trigger is `dynamic`. - if ( - !isDynamicWorkflow() && - process.env[EnvVar.SKIP_WORKFLOW_VALIDATION] !== "true" - ) { - core.startGroup("Validating workflow"); - const validateWorkflowResult = await validateWorkflow(codeql, logger); - if (validateWorkflowResult === undefined) { - logger.info("Detected no issues with the code scanning workflow."); - } else { - logger.warning( - `Unable to validate code scanning workflow: ${validateWorkflowResult}`, - ); - } - core.endGroup(); - } -} export async function initCodeQL( toolsInput: string | undefined, diff --git a/src/workflow.test.ts b/src/workflow.test.ts index e922d8079c..f05ad54851 100644 --- a/src/workflow.test.ts +++ b/src/workflow.test.ts @@ -2,9 +2,17 @@ import test, { ExecutionContext } from "ava"; import * as yaml from "js-yaml"; import * as sinon from "sinon"; -import { getCodeQLForTesting } from "./codeql"; -import { setupTests } from "./testing-utils"; +import * as actionsUtil from "./actions-util"; +import { createStubCodeQL, getCodeQLForTesting } from "./codeql"; +import { EnvVar } from "./environment"; import { + checkExpectedLogMessages, + getRecordingLogger, + LoggedMessage, + setupTests, +} from "./testing-utils"; +import { + checkWorkflow, CodedError, formatWorkflowCause, formatWorkflowErrors, @@ -13,6 +21,7 @@ import { Workflow, WorkflowErrors, } from "./workflow"; +import * as workflow from "./workflow"; function errorCodes( actual: CodedError[], @@ -870,3 +879,78 @@ test("getCategoryInputOrThrow throws error for workflow with multiple calls to a }, ); }); + +test("checkWorkflow - validates workflow if `SKIP_WORKFLOW_VALIDATION` is not set", async (t) => { + const messages: LoggedMessage[] = []; + const codeql = createStubCodeQL({}); + + sinon.stub(actionsUtil, "isDynamicWorkflow").returns(false); + const validateWorkflow = sinon.stub(workflow.internal, "validateWorkflow"); + validateWorkflow.resolves(undefined); + + await checkWorkflow(getRecordingLogger(messages), codeql); + + t.assert( + validateWorkflow.calledOnce, + "`checkWorkflow` unexpectedly did not call `validateWorkflow`", + ); + checkExpectedLogMessages(t, messages, [ + "Detected no issues with the code scanning workflow.", + ]); +}); + +test("checkWorkflow - logs problems with workflow validation", async (t) => { + const messages: LoggedMessage[] = []; + const codeql = createStubCodeQL({}); + + sinon.stub(actionsUtil, "isDynamicWorkflow").returns(false); + const validateWorkflow = sinon.stub(workflow.internal, "validateWorkflow"); + validateWorkflow.resolves("problem"); + + await checkWorkflow(getRecordingLogger(messages), codeql); + + t.assert( + validateWorkflow.calledOnce, + "`checkWorkflow` unexpectedly did not call `validateWorkflow`", + ); + checkExpectedLogMessages(t, messages, [ + "Unable to validate code scanning workflow: problem", + ]); +}); + +test("checkWorkflow - skips validation if `SKIP_WORKFLOW_VALIDATION` is `true`", async (t) => { + process.env[EnvVar.SKIP_WORKFLOW_VALIDATION] = "true"; + + const messages: LoggedMessage[] = []; + const codeql = createStubCodeQL({}); + + sinon.stub(actionsUtil, "isDynamicWorkflow").returns(false); + const validateWorkflow = sinon.stub(workflow.internal, "validateWorkflow"); + + await checkWorkflow(getRecordingLogger(messages), codeql); + + t.assert( + validateWorkflow.notCalled, + "`checkWorkflow` called `validateWorkflow` unexpectedly", + ); + t.is(messages.length, 0); +}); + +test("checkWorkflow - skips validation for `dynamic` workflows", async (t) => { + const messages: LoggedMessage[] = []; + const codeql = createStubCodeQL({}); + + const isDynamicWorkflow = sinon + .stub(actionsUtil, "isDynamicWorkflow") + .returns(true); + const validateWorkflow = sinon.stub(workflow.internal, "validateWorkflow"); + + await checkWorkflow(getRecordingLogger(messages), codeql); + + t.assert(isDynamicWorkflow.calledOnce); + t.assert( + validateWorkflow.notCalled, + "`checkWorkflow` called `validateWorkflow` unexpectedly", + ); + t.is(messages.length, 0); +}); diff --git a/src/workflow.ts b/src/workflow.ts index adcb22baec..9852259063 100644 --- a/src/workflow.ts +++ b/src/workflow.ts @@ -5,8 +5,10 @@ import zlib from "zlib"; import * as core from "@actions/core"; import * as yaml from "js-yaml"; +import { isDynamicWorkflow } from "./actions-util"; import * as api from "./api-client"; import { CodeQL } from "./codeql"; +import { EnvVar } from "./environment"; import { Logger } from "./logging"; import { getRequiredEnvParam, @@ -216,7 +218,7 @@ function hasWorkflowTrigger(triggerName: string, doc: Workflow): boolean { return Object.prototype.hasOwnProperty.call(doc.on, triggerName); } -export async function validateWorkflow( +async function validateWorkflow( codeql: CodeQL, logger: Logger, ): Promise { @@ -462,3 +464,36 @@ export function getCheckoutPathInputOrThrow( ) || getRequiredEnvParam("GITHUB_WORKSPACE") // if unspecified, checkout_path defaults to ${{ github.workspace }} ); } + +/** + * A wrapper around `validateWorkflow` which reports the outcome. + * + * @param logger The logger to use. + * @param codeql The CodeQL instance. + */ +export async function checkWorkflow(logger: Logger, codeql: CodeQL) { + // Check the workflow for problems, unless `SKIP_WORKFLOW_VALIDATION` is `true` + // or the workflow trigger is `dynamic`. + if ( + !isDynamicWorkflow() && + process.env[EnvVar.SKIP_WORKFLOW_VALIDATION] !== "true" + ) { + core.startGroup("Validating workflow"); + const validateWorkflowResult = await internal.validateWorkflow( + codeql, + logger, + ); + if (validateWorkflowResult === undefined) { + logger.info("Detected no issues with the code scanning workflow."); + } else { + logger.warning( + `Unable to validate code scanning workflow: ${validateWorkflowResult}`, + ); + } + core.endGroup(); + } +} + +export const internal = { + validateWorkflow, +}; From 52cec4178d4b99a6d36c9c50a6193baa64cb3962 Mon Sep 17 00:00:00 2001 From: "Michael B. Gale" Date: Mon, 27 Oct 2025 17:02:01 +0000 Subject: [PATCH 5/5] Downgrade log message from warning to debug level --- lib/init-action.js | 2 +- src/workflow.ts | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/lib/init-action.js b/lib/init-action.js index 47769e5f55..f82412930e 100644 --- a/lib/init-action.js +++ b/lib/init-action.js @@ -92218,7 +92218,7 @@ async function checkWorkflow(logger, codeql) { if (validateWorkflowResult === void 0) { logger.info("Detected no issues with the code scanning workflow."); } else { - logger.warning( + logger.debug( `Unable to validate code scanning workflow: ${validateWorkflowResult}` ); } diff --git a/src/workflow.ts b/src/workflow.ts index 9852259063..467980fb0a 100644 --- a/src/workflow.ts +++ b/src/workflow.ts @@ -486,7 +486,7 @@ export async function checkWorkflow(logger: Logger, codeql: CodeQL) { if (validateWorkflowResult === undefined) { logger.info("Detected no issues with the code scanning workflow."); } else { - logger.warning( + logger.debug( `Unable to validate code scanning workflow: ${validateWorkflowResult}`, ); }