From 1a4ff5493dc01a2e831dc5ffa7ed15aa8b275e72 Mon Sep 17 00:00:00 2001 From: Jef LeCompte Date: Wed, 29 Jun 2022 12:01:42 -0700 Subject: [PATCH 01/32] feat: add volta as node-version-file --- dist/cache-save/index.js | 376 +++++----- dist/setup/index.js | 1466 +++++++++++++++++++------------------- src/main.ts | 17 +- tsconfig.json | 2 +- 4 files changed, 938 insertions(+), 923 deletions(-) diff --git a/dist/cache-save/index.js b/dist/cache-save/index.js index 219ef59f..a55d2327 100644 --- a/dist/cache-save/index.js +++ b/dist/cache-save/index.js @@ -59827,74 +59827,74 @@ exports.fromPromise = function (fn) { /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; - -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; -var __importStar = (this && this.__importStar) || function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k]; - result["default"] = mod; - return result; -}; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", ({ value: true })); -const core = __importStar(__nccwpck_require__(2186)); -const cache = __importStar(__nccwpck_require__(7799)); -const fs_1 = __importDefault(__nccwpck_require__(7147)); -const constants_1 = __nccwpck_require__(9042); -const cache_utils_1 = __nccwpck_require__(1678); -// Catch and log any unhandled exceptions. These exceptions can leak out of the uploadChunk method in -// @actions/toolkit when a failed upload closes the file descriptor causing any in-process reads to -// throw an uncaught exception. Instead of failing this action, just warn. -process.on('uncaughtException', e => { - const warningPrefix = '[warning]'; - core.info(`${warningPrefix}${e.message}`); -}); -function run() { - return __awaiter(this, void 0, void 0, function* () { - try { - const cacheLock = core.getInput('cache'); - yield cachePackages(cacheLock); - } - catch (error) { - core.setFailed(error.message); - } - }); -} -exports.run = run; -const cachePackages = (packageManager) => __awaiter(void 0, void 0, void 0, function* () { - const state = core.getState(constants_1.State.CacheMatchedKey); - const primaryKey = core.getState(constants_1.State.CachePrimaryKey); - const packageManagerInfo = yield cache_utils_1.getPackageManagerInfo(packageManager); - if (!packageManagerInfo) { - core.debug(`Caching for '${packageManager}' is not supported`); - return; - } - const cachePath = yield cache_utils_1.getCacheDirectoryPath(packageManagerInfo, packageManager); - if (!fs_1.default.existsSync(cachePath)) { - throw new Error(`Cache folder path is retrieved for ${packageManager} but doesn't exist on disk: ${cachePath}`); - } - if (primaryKey === state) { - core.info(`Cache hit occurred on the primary key ${primaryKey}, not saving cache.`); - return; - } - const cacheId = yield cache.saveCache([cachePath], primaryKey); - if (cacheId == -1) { - return; - } - core.info(`Cache saved with the key: ${primaryKey}`); -}); -run(); + +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k]; + result["default"] = mod; + return result; +}; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +const core = __importStar(__nccwpck_require__(2186)); +const cache = __importStar(__nccwpck_require__(7799)); +const fs_1 = __importDefault(__nccwpck_require__(7147)); +const constants_1 = __nccwpck_require__(9042); +const cache_utils_1 = __nccwpck_require__(1678); +// Catch and log any unhandled exceptions. These exceptions can leak out of the uploadChunk method in +// @actions/toolkit when a failed upload closes the file descriptor causing any in-process reads to +// throw an uncaught exception. Instead of failing this action, just warn. +process.on('uncaughtException', e => { + const warningPrefix = '[warning]'; + core.info(`${warningPrefix}${e.message}`); +}); +function run() { + return __awaiter(this, void 0, void 0, function* () { + try { + const cacheLock = core.getInput('cache'); + yield cachePackages(cacheLock); + } + catch (error) { + core.setFailed(error.message); + } + }); +} +exports.run = run; +const cachePackages = (packageManager) => __awaiter(void 0, void 0, void 0, function* () { + const state = core.getState(constants_1.State.CacheMatchedKey); + const primaryKey = core.getState(constants_1.State.CachePrimaryKey); + const packageManagerInfo = yield cache_utils_1.getPackageManagerInfo(packageManager); + if (!packageManagerInfo) { + core.debug(`Caching for '${packageManager}' is not supported`); + return; + } + const cachePath = yield cache_utils_1.getCacheDirectoryPath(packageManagerInfo, packageManager); + if (!fs_1.default.existsSync(cachePath)) { + throw new Error(`Cache folder path is retrieved for ${packageManager} but doesn't exist on disk: ${cachePath}`); + } + if (primaryKey === state) { + core.info(`Cache hit occurred on the primary key ${primaryKey}, not saving cache.`); + return; + } + const cacheId = yield cache.saveCache([cachePath], primaryKey); + if (cacheId == -1) { + return; + } + core.info(`Cache saved with the key: ${primaryKey}`); +}); +run(); /***/ }), @@ -59903,109 +59903,109 @@ run(); /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; - -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; -var __importStar = (this && this.__importStar) || function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k]; - result["default"] = mod; - return result; -}; -Object.defineProperty(exports, "__esModule", ({ value: true })); -const core = __importStar(__nccwpck_require__(2186)); -const exec = __importStar(__nccwpck_require__(1514)); -const cache = __importStar(__nccwpck_require__(7799)); -exports.supportedPackageManagers = { - npm: { - lockFilePatterns: ['package-lock.json', 'yarn.lock'], - getCacheFolderCommand: 'npm config get cache' - }, - pnpm: { - lockFilePatterns: ['pnpm-lock.yaml'], - getCacheFolderCommand: 'pnpm store path' - }, - yarn1: { - lockFilePatterns: ['yarn.lock'], - getCacheFolderCommand: 'yarn cache dir' - }, - yarn2: { - lockFilePatterns: ['yarn.lock'], - getCacheFolderCommand: 'yarn config get cacheFolder' - } -}; -exports.getCommandOutput = (toolCommand) => __awaiter(void 0, void 0, void 0, function* () { - let { stdout, stderr, exitCode } = yield exec.getExecOutput(toolCommand, undefined, { ignoreReturnCode: true }); - if (exitCode) { - stderr = !stderr.trim() - ? `The '${toolCommand}' command failed with exit code: ${exitCode}` - : stderr; - throw new Error(stderr); - } - return stdout.trim(); -}); -const getPackageManagerVersion = (packageManager, command) => __awaiter(void 0, void 0, void 0, function* () { - const stdOut = yield exports.getCommandOutput(`${packageManager} ${command}`); - if (!stdOut) { - throw new Error(`Could not retrieve version of ${packageManager}`); - } - return stdOut; -}); -exports.getPackageManagerInfo = (packageManager) => __awaiter(void 0, void 0, void 0, function* () { - if (packageManager === 'npm') { - return exports.supportedPackageManagers.npm; - } - else if (packageManager === 'pnpm') { - return exports.supportedPackageManagers.pnpm; - } - else if (packageManager === 'yarn') { - const yarnVersion = yield getPackageManagerVersion('yarn', '--version'); - core.debug(`Consumed yarn version is ${yarnVersion}`); - if (yarnVersion.startsWith('1.')) { - return exports.supportedPackageManagers.yarn1; - } - else { - return exports.supportedPackageManagers.yarn2; - } - } - else { - return null; - } -}); -exports.getCacheDirectoryPath = (packageManagerInfo, packageManager) => __awaiter(void 0, void 0, void 0, function* () { - const stdOut = yield exports.getCommandOutput(packageManagerInfo.getCacheFolderCommand); - if (!stdOut) { - throw new Error(`Could not get cache folder path for ${packageManager}`); - } - core.debug(`${packageManager} path is ${stdOut}`); - return stdOut; -}); -function isGhes() { - const ghUrl = new URL(process.env['GITHUB_SERVER_URL'] || 'https://github.com'); - return ghUrl.hostname.toUpperCase() !== 'GITHUB.COM'; -} -exports.isGhes = isGhes; -function isCacheFeatureAvailable() { - if (!cache.isFeatureAvailable()) { - if (isGhes()) { - throw new Error('Cache action is only supported on GHES version >= 3.5. If you are on version >=3.5 Please check with GHES admin if Actions cache service is enabled or not.'); - } - else { - core.warning('The runner was not able to contact the cache service. Caching will be skipped'); - } - return false; - } - return true; -} -exports.isCacheFeatureAvailable = isCacheFeatureAvailable; + +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k]; + result["default"] = mod; + return result; +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +const core = __importStar(__nccwpck_require__(2186)); +const exec = __importStar(__nccwpck_require__(1514)); +const cache = __importStar(__nccwpck_require__(7799)); +exports.supportedPackageManagers = { + npm: { + lockFilePatterns: ['package-lock.json', 'yarn.lock'], + getCacheFolderCommand: 'npm config get cache' + }, + pnpm: { + lockFilePatterns: ['pnpm-lock.yaml'], + getCacheFolderCommand: 'pnpm store path' + }, + yarn1: { + lockFilePatterns: ['yarn.lock'], + getCacheFolderCommand: 'yarn cache dir' + }, + yarn2: { + lockFilePatterns: ['yarn.lock'], + getCacheFolderCommand: 'yarn config get cacheFolder' + } +}; +exports.getCommandOutput = (toolCommand) => __awaiter(void 0, void 0, void 0, function* () { + let { stdout, stderr, exitCode } = yield exec.getExecOutput(toolCommand, undefined, { ignoreReturnCode: true }); + if (exitCode) { + stderr = !stderr.trim() + ? `The '${toolCommand}' command failed with exit code: ${exitCode}` + : stderr; + throw new Error(stderr); + } + return stdout.trim(); +}); +const getPackageManagerVersion = (packageManager, command) => __awaiter(void 0, void 0, void 0, function* () { + const stdOut = yield exports.getCommandOutput(`${packageManager} ${command}`); + if (!stdOut) { + throw new Error(`Could not retrieve version of ${packageManager}`); + } + return stdOut; +}); +exports.getPackageManagerInfo = (packageManager) => __awaiter(void 0, void 0, void 0, function* () { + if (packageManager === 'npm') { + return exports.supportedPackageManagers.npm; + } + else if (packageManager === 'pnpm') { + return exports.supportedPackageManagers.pnpm; + } + else if (packageManager === 'yarn') { + const yarnVersion = yield getPackageManagerVersion('yarn', '--version'); + core.debug(`Consumed yarn version is ${yarnVersion}`); + if (yarnVersion.startsWith('1.')) { + return exports.supportedPackageManagers.yarn1; + } + else { + return exports.supportedPackageManagers.yarn2; + } + } + else { + return null; + } +}); +exports.getCacheDirectoryPath = (packageManagerInfo, packageManager) => __awaiter(void 0, void 0, void 0, function* () { + const stdOut = yield exports.getCommandOutput(packageManagerInfo.getCacheFolderCommand); + if (!stdOut) { + throw new Error(`Could not get cache folder path for ${packageManager}`); + } + core.debug(`${packageManager} path is ${stdOut}`); + return stdOut; +}); +function isGhes() { + const ghUrl = new URL(process.env['GITHUB_SERVER_URL'] || 'https://github.com'); + return ghUrl.hostname.toUpperCase() !== 'GITHUB.COM'; +} +exports.isGhes = isGhes; +function isCacheFeatureAvailable() { + if (!cache.isFeatureAvailable()) { + if (isGhes()) { + throw new Error('Cache action is only supported on GHES version >= 3.5. If you are on version >=3.5 Please check with GHES admin if Actions cache service is enabled or not.'); + } + else { + core.warning('The runner was not able to contact the cache service. Caching will be skipped'); + } + return false; + } + return true; +} +exports.isCacheFeatureAvailable = isCacheFeatureAvailable; /***/ }), @@ -60014,23 +60014,23 @@ exports.isCacheFeatureAvailable = isCacheFeatureAvailable; /***/ ((__unused_webpack_module, exports) => { "use strict"; - -Object.defineProperty(exports, "__esModule", ({ value: true })); -var LockType; -(function (LockType) { - LockType["Npm"] = "npm"; - LockType["Pnpm"] = "pnpm"; - LockType["Yarn"] = "yarn"; -})(LockType = exports.LockType || (exports.LockType = {})); -var State; -(function (State) { - State["CachePrimaryKey"] = "CACHE_KEY"; - State["CacheMatchedKey"] = "CACHE_RESULT"; -})(State = exports.State || (exports.State = {})); -var Outputs; -(function (Outputs) { - Outputs["CacheHit"] = "cache-hit"; -})(Outputs = exports.Outputs || (exports.Outputs = {})); + +Object.defineProperty(exports, "__esModule", ({ value: true })); +var LockType; +(function (LockType) { + LockType["Npm"] = "npm"; + LockType["Pnpm"] = "pnpm"; + LockType["Yarn"] = "yarn"; +})(LockType = exports.LockType || (exports.LockType = {})); +var State; +(function (State) { + State["CachePrimaryKey"] = "CACHE_KEY"; + State["CacheMatchedKey"] = "CACHE_RESULT"; +})(State = exports.State || (exports.State = {})); +var Outputs; +(function (Outputs) { + Outputs["CacheHit"] = "cache-hit"; +})(Outputs = exports.Outputs || (exports.Outputs = {})); /***/ }), diff --git a/dist/setup/index.js b/dist/setup/index.js index 1b249398..cae0cf5e 100644 --- a/dist/setup/index.js +++ b/dist/setup/index.js @@ -71113,62 +71113,62 @@ function wrappy (fn, cb) { /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; - -var __importStar = (this && this.__importStar) || function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k]; - result["default"] = mod; - return result; -}; -Object.defineProperty(exports, "__esModule", ({ value: true })); -const fs = __importStar(__nccwpck_require__(7147)); -const os = __importStar(__nccwpck_require__(2037)); -const path = __importStar(__nccwpck_require__(1017)); -const core = __importStar(__nccwpck_require__(2186)); -const github = __importStar(__nccwpck_require__(5438)); -function configAuthentication(registryUrl, alwaysAuth) { - const npmrc = path.resolve(process.env['RUNNER_TEMP'] || process.cwd(), '.npmrc'); - if (!registryUrl.endsWith('/')) { - registryUrl += '/'; - } - writeRegistryToFile(registryUrl, npmrc, alwaysAuth); -} -exports.configAuthentication = configAuthentication; -function writeRegistryToFile(registryUrl, fileLocation, alwaysAuth) { - let scope = core.getInput('scope'); - if (!scope && registryUrl.indexOf('npm.pkg.github.com') > -1) { - scope = github.context.repo.owner; - } - if (scope && scope[0] != '@') { - scope = '@' + scope; - } - if (scope) { - scope = scope.toLowerCase(); - } - core.debug(`Setting auth in ${fileLocation}`); - let newContents = ''; - if (fs.existsSync(fileLocation)) { - const curContents = fs.readFileSync(fileLocation, 'utf8'); - curContents.split(os.EOL).forEach((line) => { - // Add current contents unless they are setting the registry - if (!line.toLowerCase().startsWith('registry')) { - newContents += line + os.EOL; - } - }); - } - // Remove http: or https: from front of registry. - const authString = registryUrl.replace(/(^\w+:|^)/, '') + ':_authToken=${NODE_AUTH_TOKEN}'; - const registryString = scope - ? `${scope}:registry=${registryUrl}` - : `registry=${registryUrl}`; - const alwaysAuthString = `always-auth=${alwaysAuth}`; - newContents += `${authString}${os.EOL}${registryString}${os.EOL}${alwaysAuthString}`; - fs.writeFileSync(fileLocation, newContents); - core.exportVariable('NPM_CONFIG_USERCONFIG', fileLocation); - // Export empty node_auth_token if didn't exist so npm doesn't complain about not being able to find it - core.exportVariable('NODE_AUTH_TOKEN', process.env.NODE_AUTH_TOKEN || 'XXXXX-XXXXX-XXXXX-XXXXX'); -} + +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k]; + result["default"] = mod; + return result; +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +const fs = __importStar(__nccwpck_require__(7147)); +const os = __importStar(__nccwpck_require__(2037)); +const path = __importStar(__nccwpck_require__(1017)); +const core = __importStar(__nccwpck_require__(2186)); +const github = __importStar(__nccwpck_require__(5438)); +function configAuthentication(registryUrl, alwaysAuth) { + const npmrc = path.resolve(process.env['RUNNER_TEMP'] || process.cwd(), '.npmrc'); + if (!registryUrl.endsWith('/')) { + registryUrl += '/'; + } + writeRegistryToFile(registryUrl, npmrc, alwaysAuth); +} +exports.configAuthentication = configAuthentication; +function writeRegistryToFile(registryUrl, fileLocation, alwaysAuth) { + let scope = core.getInput('scope'); + if (!scope && registryUrl.indexOf('npm.pkg.github.com') > -1) { + scope = github.context.repo.owner; + } + if (scope && scope[0] != '@') { + scope = '@' + scope; + } + if (scope) { + scope = scope.toLowerCase(); + } + core.debug(`Setting auth in ${fileLocation}`); + let newContents = ''; + if (fs.existsSync(fileLocation)) { + const curContents = fs.readFileSync(fileLocation, 'utf8'); + curContents.split(os.EOL).forEach((line) => { + // Add current contents unless they are setting the registry + if (!line.toLowerCase().startsWith('registry')) { + newContents += line + os.EOL; + } + }); + } + // Remove http: or https: from front of registry. + const authString = registryUrl.replace(/(^\w+:|^)/, '') + ':_authToken=${NODE_AUTH_TOKEN}'; + const registryString = scope + ? `${scope}:registry=${registryUrl}` + : `registry=${registryUrl}`; + const alwaysAuthString = `always-auth=${alwaysAuth}`; + newContents += `${authString}${os.EOL}${registryString}${os.EOL}${alwaysAuthString}`; + fs.writeFileSync(fileLocation, newContents); + core.exportVariable('NPM_CONFIG_USERCONFIG', fileLocation); + // Export empty node_auth_token if didn't exist so npm doesn't complain about not being able to find it + core.exportVariable('NODE_AUTH_TOKEN', process.env.NODE_AUTH_TOKEN || 'XXXXX-XXXXX-XXXXX-XXXXX'); +} /***/ }), @@ -71177,70 +71177,70 @@ function writeRegistryToFile(registryUrl, fileLocation, alwaysAuth) { /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; - -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; -var __importStar = (this && this.__importStar) || function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k]; - result["default"] = mod; - return result; -}; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", ({ value: true })); -const cache = __importStar(__nccwpck_require__(7799)); -const core = __importStar(__nccwpck_require__(2186)); -const glob = __importStar(__nccwpck_require__(8090)); -const path_1 = __importDefault(__nccwpck_require__(1017)); -const fs_1 = __importDefault(__nccwpck_require__(7147)); -const constants_1 = __nccwpck_require__(9042); -const cache_utils_1 = __nccwpck_require__(1678); -exports.restoreCache = (packageManager, cacheDependencyPath) => __awaiter(void 0, void 0, void 0, function* () { - const packageManagerInfo = yield cache_utils_1.getPackageManagerInfo(packageManager); - if (!packageManagerInfo) { - throw new Error(`Caching for '${packageManager}' is not supported`); - } - const platform = process.env.RUNNER_OS; - const cachePath = yield cache_utils_1.getCacheDirectoryPath(packageManagerInfo, packageManager); - const lockFilePath = cacheDependencyPath - ? cacheDependencyPath - : findLockFile(packageManagerInfo); - const fileHash = yield glob.hashFiles(lockFilePath); - if (!fileHash) { - throw new Error('Some specified paths were not resolved, unable to cache dependencies.'); - } - const primaryKey = `node-cache-${platform}-${packageManager}-${fileHash}`; - core.debug(`primary key is ${primaryKey}`); - core.saveState(constants_1.State.CachePrimaryKey, primaryKey); - const cacheKey = yield cache.restoreCache([cachePath], primaryKey); - core.setOutput('cache-hit', Boolean(cacheKey)); - if (!cacheKey) { - core.info(`${packageManager} cache is not found`); - return; - } - core.saveState(constants_1.State.CacheMatchedKey, cacheKey); - core.info(`Cache restored from key: ${cacheKey}`); -}); -const findLockFile = (packageManager) => { - let lockFiles = packageManager.lockFilePatterns; - const workspace = process.env.GITHUB_WORKSPACE; - const rootContent = fs_1.default.readdirSync(workspace); - const lockFile = lockFiles.find(item => rootContent.includes(item)); - if (!lockFile) { - throw new Error(`Dependencies lock file is not found in ${workspace}. Supported file patterns: ${lockFiles.toString()}`); - } - return path_1.default.join(workspace, lockFile); -}; + +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k]; + result["default"] = mod; + return result; +}; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +const cache = __importStar(__nccwpck_require__(7799)); +const core = __importStar(__nccwpck_require__(2186)); +const glob = __importStar(__nccwpck_require__(8090)); +const path_1 = __importDefault(__nccwpck_require__(1017)); +const fs_1 = __importDefault(__nccwpck_require__(7147)); +const constants_1 = __nccwpck_require__(9042); +const cache_utils_1 = __nccwpck_require__(1678); +exports.restoreCache = (packageManager, cacheDependencyPath) => __awaiter(void 0, void 0, void 0, function* () { + const packageManagerInfo = yield cache_utils_1.getPackageManagerInfo(packageManager); + if (!packageManagerInfo) { + throw new Error(`Caching for '${packageManager}' is not supported`); + } + const platform = process.env.RUNNER_OS; + const cachePath = yield cache_utils_1.getCacheDirectoryPath(packageManagerInfo, packageManager); + const lockFilePath = cacheDependencyPath + ? cacheDependencyPath + : findLockFile(packageManagerInfo); + const fileHash = yield glob.hashFiles(lockFilePath); + if (!fileHash) { + throw new Error('Some specified paths were not resolved, unable to cache dependencies.'); + } + const primaryKey = `node-cache-${platform}-${packageManager}-${fileHash}`; + core.debug(`primary key is ${primaryKey}`); + core.saveState(constants_1.State.CachePrimaryKey, primaryKey); + const cacheKey = yield cache.restoreCache([cachePath], primaryKey); + core.setOutput('cache-hit', Boolean(cacheKey)); + if (!cacheKey) { + core.info(`${packageManager} cache is not found`); + return; + } + core.saveState(constants_1.State.CacheMatchedKey, cacheKey); + core.info(`Cache restored from key: ${cacheKey}`); +}); +const findLockFile = (packageManager) => { + let lockFiles = packageManager.lockFilePatterns; + const workspace = process.env.GITHUB_WORKSPACE; + const rootContent = fs_1.default.readdirSync(workspace); + const lockFile = lockFiles.find(item => rootContent.includes(item)); + if (!lockFile) { + throw new Error(`Dependencies lock file is not found in ${workspace}. Supported file patterns: ${lockFiles.toString()}`); + } + return path_1.default.join(workspace, lockFile); +}; /***/ }), @@ -71249,109 +71249,109 @@ const findLockFile = (packageManager) => { /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; - -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; -var __importStar = (this && this.__importStar) || function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k]; - result["default"] = mod; - return result; -}; -Object.defineProperty(exports, "__esModule", ({ value: true })); -const core = __importStar(__nccwpck_require__(2186)); -const exec = __importStar(__nccwpck_require__(1514)); -const cache = __importStar(__nccwpck_require__(7799)); -exports.supportedPackageManagers = { - npm: { - lockFilePatterns: ['package-lock.json', 'yarn.lock'], - getCacheFolderCommand: 'npm config get cache' - }, - pnpm: { - lockFilePatterns: ['pnpm-lock.yaml'], - getCacheFolderCommand: 'pnpm store path' - }, - yarn1: { - lockFilePatterns: ['yarn.lock'], - getCacheFolderCommand: 'yarn cache dir' - }, - yarn2: { - lockFilePatterns: ['yarn.lock'], - getCacheFolderCommand: 'yarn config get cacheFolder' - } -}; -exports.getCommandOutput = (toolCommand) => __awaiter(void 0, void 0, void 0, function* () { - let { stdout, stderr, exitCode } = yield exec.getExecOutput(toolCommand, undefined, { ignoreReturnCode: true }); - if (exitCode) { - stderr = !stderr.trim() - ? `The '${toolCommand}' command failed with exit code: ${exitCode}` - : stderr; - throw new Error(stderr); - } - return stdout.trim(); -}); -const getPackageManagerVersion = (packageManager, command) => __awaiter(void 0, void 0, void 0, function* () { - const stdOut = yield exports.getCommandOutput(`${packageManager} ${command}`); - if (!stdOut) { - throw new Error(`Could not retrieve version of ${packageManager}`); - } - return stdOut; -}); -exports.getPackageManagerInfo = (packageManager) => __awaiter(void 0, void 0, void 0, function* () { - if (packageManager === 'npm') { - return exports.supportedPackageManagers.npm; - } - else if (packageManager === 'pnpm') { - return exports.supportedPackageManagers.pnpm; - } - else if (packageManager === 'yarn') { - const yarnVersion = yield getPackageManagerVersion('yarn', '--version'); - core.debug(`Consumed yarn version is ${yarnVersion}`); - if (yarnVersion.startsWith('1.')) { - return exports.supportedPackageManagers.yarn1; - } - else { - return exports.supportedPackageManagers.yarn2; - } - } - else { - return null; - } -}); -exports.getCacheDirectoryPath = (packageManagerInfo, packageManager) => __awaiter(void 0, void 0, void 0, function* () { - const stdOut = yield exports.getCommandOutput(packageManagerInfo.getCacheFolderCommand); - if (!stdOut) { - throw new Error(`Could not get cache folder path for ${packageManager}`); - } - core.debug(`${packageManager} path is ${stdOut}`); - return stdOut; -}); -function isGhes() { - const ghUrl = new URL(process.env['GITHUB_SERVER_URL'] || 'https://github.com'); - return ghUrl.hostname.toUpperCase() !== 'GITHUB.COM'; -} -exports.isGhes = isGhes; -function isCacheFeatureAvailable() { - if (!cache.isFeatureAvailable()) { - if (isGhes()) { - throw new Error('Cache action is only supported on GHES version >= 3.5. If you are on version >=3.5 Please check with GHES admin if Actions cache service is enabled or not.'); - } - else { - core.warning('The runner was not able to contact the cache service. Caching will be skipped'); - } - return false; - } - return true; -} -exports.isCacheFeatureAvailable = isCacheFeatureAvailable; + +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k]; + result["default"] = mod; + return result; +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +const core = __importStar(__nccwpck_require__(2186)); +const exec = __importStar(__nccwpck_require__(1514)); +const cache = __importStar(__nccwpck_require__(7799)); +exports.supportedPackageManagers = { + npm: { + lockFilePatterns: ['package-lock.json', 'yarn.lock'], + getCacheFolderCommand: 'npm config get cache' + }, + pnpm: { + lockFilePatterns: ['pnpm-lock.yaml'], + getCacheFolderCommand: 'pnpm store path' + }, + yarn1: { + lockFilePatterns: ['yarn.lock'], + getCacheFolderCommand: 'yarn cache dir' + }, + yarn2: { + lockFilePatterns: ['yarn.lock'], + getCacheFolderCommand: 'yarn config get cacheFolder' + } +}; +exports.getCommandOutput = (toolCommand) => __awaiter(void 0, void 0, void 0, function* () { + let { stdout, stderr, exitCode } = yield exec.getExecOutput(toolCommand, undefined, { ignoreReturnCode: true }); + if (exitCode) { + stderr = !stderr.trim() + ? `The '${toolCommand}' command failed with exit code: ${exitCode}` + : stderr; + throw new Error(stderr); + } + return stdout.trim(); +}); +const getPackageManagerVersion = (packageManager, command) => __awaiter(void 0, void 0, void 0, function* () { + const stdOut = yield exports.getCommandOutput(`${packageManager} ${command}`); + if (!stdOut) { + throw new Error(`Could not retrieve version of ${packageManager}`); + } + return stdOut; +}); +exports.getPackageManagerInfo = (packageManager) => __awaiter(void 0, void 0, void 0, function* () { + if (packageManager === 'npm') { + return exports.supportedPackageManagers.npm; + } + else if (packageManager === 'pnpm') { + return exports.supportedPackageManagers.pnpm; + } + else if (packageManager === 'yarn') { + const yarnVersion = yield getPackageManagerVersion('yarn', '--version'); + core.debug(`Consumed yarn version is ${yarnVersion}`); + if (yarnVersion.startsWith('1.')) { + return exports.supportedPackageManagers.yarn1; + } + else { + return exports.supportedPackageManagers.yarn2; + } + } + else { + return null; + } +}); +exports.getCacheDirectoryPath = (packageManagerInfo, packageManager) => __awaiter(void 0, void 0, void 0, function* () { + const stdOut = yield exports.getCommandOutput(packageManagerInfo.getCacheFolderCommand); + if (!stdOut) { + throw new Error(`Could not get cache folder path for ${packageManager}`); + } + core.debug(`${packageManager} path is ${stdOut}`); + return stdOut; +}); +function isGhes() { + const ghUrl = new URL(process.env['GITHUB_SERVER_URL'] || 'https://github.com'); + return ghUrl.hostname.toUpperCase() !== 'GITHUB.COM'; +} +exports.isGhes = isGhes; +function isCacheFeatureAvailable() { + if (!cache.isFeatureAvailable()) { + if (isGhes()) { + throw new Error('Cache action is only supported on GHES version >= 3.5. If you are on version >=3.5 Please check with GHES admin if Actions cache service is enabled or not.'); + } + else { + core.warning('The runner was not able to contact the cache service. Caching will be skipped'); + } + return false; + } + return true; +} +exports.isCacheFeatureAvailable = isCacheFeatureAvailable; /***/ }), @@ -71360,23 +71360,23 @@ exports.isCacheFeatureAvailable = isCacheFeatureAvailable; /***/ ((__unused_webpack_module, exports) => { "use strict"; - -Object.defineProperty(exports, "__esModule", ({ value: true })); -var LockType; -(function (LockType) { - LockType["Npm"] = "npm"; - LockType["Pnpm"] = "pnpm"; - LockType["Yarn"] = "yarn"; -})(LockType = exports.LockType || (exports.LockType = {})); -var State; -(function (State) { - State["CachePrimaryKey"] = "CACHE_KEY"; - State["CacheMatchedKey"] = "CACHE_RESULT"; -})(State = exports.State || (exports.State = {})); -var Outputs; -(function (Outputs) { - Outputs["CacheHit"] = "cache-hit"; -})(Outputs = exports.Outputs || (exports.Outputs = {})); + +Object.defineProperty(exports, "__esModule", ({ value: true })); +var LockType; +(function (LockType) { + LockType["Npm"] = "npm"; + LockType["Pnpm"] = "pnpm"; + LockType["Yarn"] = "yarn"; +})(LockType = exports.LockType || (exports.LockType = {})); +var State; +(function (State) { + State["CachePrimaryKey"] = "CACHE_KEY"; + State["CacheMatchedKey"] = "CACHE_RESULT"; +})(State = exports.State || (exports.State = {})); +var Outputs; +(function (Outputs) { + Outputs["CacheHit"] = "cache-hit"; +})(Outputs = exports.Outputs || (exports.Outputs = {})); /***/ }), @@ -71385,399 +71385,399 @@ var Outputs; /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; - -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; -var __importStar = (this && this.__importStar) || function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k]; - result["default"] = mod; - return result; -}; -Object.defineProperty(exports, "__esModule", ({ value: true })); -const os = __nccwpck_require__(2037); -const assert = __importStar(__nccwpck_require__(9491)); -const core = __importStar(__nccwpck_require__(2186)); -const hc = __importStar(__nccwpck_require__(9925)); -const io = __importStar(__nccwpck_require__(7436)); -const tc = __importStar(__nccwpck_require__(7784)); -const path = __importStar(__nccwpck_require__(1017)); -const semver = __importStar(__nccwpck_require__(5911)); -const fs = __nccwpck_require__(7147); -function getNode(versionSpec, stable, checkLatest, auth, arch = os.arch()) { - return __awaiter(this, void 0, void 0, function* () { - // Store manifest data to avoid multiple calls - let manifest; - let nodeVersions; - let osPlat = os.platform(); - let osArch = translateArchToDistUrl(arch); - if (isLtsAlias(versionSpec)) { - core.info('Attempt to resolve LTS alias from manifest...'); - // No try-catch since it's not possible to resolve LTS alias without manifest - manifest = yield getManifest(auth); - versionSpec = resolveLtsAliasFromManifest(versionSpec, stable, manifest); - } - if (isLatestSyntax(versionSpec)) { - nodeVersions = yield getVersionsFromDist(); - versionSpec = yield queryDistForMatch(versionSpec, arch, nodeVersions); - core.info(`getting latest node version...`); - } - if (checkLatest) { - core.info('Attempt to resolve the latest version from manifest...'); - const resolvedVersion = yield resolveVersionFromManifest(versionSpec, stable, auth, osArch, manifest); - if (resolvedVersion) { - versionSpec = resolvedVersion; - core.info(`Resolved as '${versionSpec}'`); - } - else { - core.info(`Failed to resolve version ${versionSpec} from manifest`); - } - } - // check cache - let toolPath; - toolPath = tc.find('node', versionSpec, osArch); - // If not found in cache, download - if (toolPath) { - core.info(`Found in cache @ ${toolPath}`); - } - else { - core.info(`Attempting to download ${versionSpec}...`); - let downloadPath = ''; - let info = null; - // - // Try download from internal distribution (popular versions only) - // - try { - info = yield getInfoFromManifest(versionSpec, stable, auth, osArch, manifest); - if (info) { - core.info(`Acquiring ${info.resolvedVersion} - ${info.arch} from ${info.downloadUrl}`); - downloadPath = yield tc.downloadTool(info.downloadUrl, undefined, auth); - } - else { - core.info('Not found in manifest. Falling back to download directly from Node'); - } - } - catch (err) { - // Rate limit? - if (err instanceof tc.HTTPError && - (err.httpStatusCode === 403 || err.httpStatusCode === 429)) { - core.info(`Received HTTP status code ${err.httpStatusCode}. This usually indicates the rate limit has been exceeded`); - } - else { - core.info(err.message); - } - core.debug(err.stack); - core.info('Falling back to download directly from Node'); - } - // - // Download from nodejs.org - // - if (!downloadPath) { - info = yield getInfoFromDist(versionSpec, arch, nodeVersions); - if (!info) { - throw new Error(`Unable to find Node version '${versionSpec}' for platform ${osPlat} and architecture ${osArch}.`); - } - core.info(`Acquiring ${info.resolvedVersion} - ${info.arch} from ${info.downloadUrl}`); - try { - downloadPath = yield tc.downloadTool(info.downloadUrl); - } - catch (err) { - if (err instanceof tc.HTTPError && err.httpStatusCode == 404) { - return yield acquireNodeFromFallbackLocation(info.resolvedVersion, info.arch); - } - throw err; - } - } - // - // Extract - // - core.info('Extracting ...'); - let extPath; - info = info || {}; // satisfy compiler, never null when reaches here - if (osPlat == 'win32') { - let _7zPath = path.join(__dirname, '../..', 'externals', '7zr.exe'); - extPath = yield tc.extract7z(downloadPath, undefined, _7zPath); - // 7z extracts to folder matching file name - let nestedPath = path.join(extPath, path.basename(info.fileName, '.7z')); - if (fs.existsSync(nestedPath)) { - extPath = nestedPath; - } - } - else { - extPath = yield tc.extractTar(downloadPath, undefined, [ - 'xz', - '--strip', - '1' - ]); - } - // - // Install into the local tool cache - node extracts with a root folder that matches the fileName downloaded - // - core.info('Adding to the cache ...'); - toolPath = yield tc.cacheDir(extPath, 'node', info.resolvedVersion, info.arch); - core.info('Done'); - } - // - // a tool installer initimately knows details about the layout of that tool - // for example, node binary is in the bin folder after the extract on Mac/Linux. - // layouts could change by version, by platform etc... but that's the tool installers job - // - if (osPlat != 'win32') { - toolPath = path.join(toolPath, 'bin'); - } - // - // prepend the tools path. instructs the agent to prepend for future tasks - core.addPath(toolPath); - }); -} -exports.getNode = getNode; -function isLtsAlias(versionSpec) { - return versionSpec.startsWith('lts/'); -} -function getManifest(auth) { - core.debug('Getting manifest from actions/node-versions@main'); - return tc.getManifestFromRepo('actions', 'node-versions', auth, 'main'); -} -function resolveLtsAliasFromManifest(versionSpec, stable, manifest) { - var _a; - const alias = (_a = versionSpec.split('lts/')[1]) === null || _a === void 0 ? void 0 : _a.toLowerCase(); - if (!alias) { - throw new Error(`Unable to parse LTS alias for Node version '${versionSpec}'`); - } - core.debug(`LTS alias '${alias}' for Node version '${versionSpec}'`); - // Supported formats are `lts/`, `lts/*`, and `lts/-n`. Where asterisk means highest possible LTS and -n means the nth-highest. - const n = Number(alias); - const aliases = Object.fromEntries(manifest - .filter(x => x.lts && x.stable === stable) - .map(x => [x.lts.toLowerCase(), x]) - .reverse()); - const numbered = Object.values(aliases); - const release = alias === '*' - ? numbered[numbered.length - 1] - : n < 0 - ? numbered[numbered.length - 1 + n] - : aliases[alias]; - if (!release) { - throw new Error(`Unable to find LTS release '${alias}' for Node version '${versionSpec}'.`); - } - core.debug(`Found LTS release '${release.version}' for Node version '${versionSpec}'`); - return release.version.split('.')[0]; -} -function getInfoFromManifest(versionSpec, stable, auth, osArch = translateArchToDistUrl(os.arch()), manifest) { - return __awaiter(this, void 0, void 0, function* () { - let info = null; - if (!manifest) { - core.debug('No manifest cached'); - manifest = yield getManifest(auth); - } - const rel = yield tc.findFromManifest(versionSpec, stable, manifest, osArch); - if (rel && rel.files.length > 0) { - info = {}; - info.resolvedVersion = rel.version; - info.arch = rel.files[0].arch; - info.downloadUrl = rel.files[0].download_url; - info.fileName = rel.files[0].filename; - } - return info; - }); -} -function getInfoFromDist(versionSpec, arch = os.arch(), nodeVersions) { - return __awaiter(this, void 0, void 0, function* () { - let osPlat = os.platform(); - let osArch = translateArchToDistUrl(arch); - let version = yield queryDistForMatch(versionSpec, arch, nodeVersions); - if (!version) { - return null; - } - // - // Download - a tool installer intimately knows how to get the tool (and construct urls) - // - version = semver.clean(version) || ''; - let fileName = osPlat == 'win32' - ? `node-v${version}-win-${osArch}` - : `node-v${version}-${osPlat}-${osArch}`; - let urlFileName = osPlat == 'win32' ? `${fileName}.7z` : `${fileName}.tar.gz`; - let url = `https://nodejs.org/dist/v${version}/${urlFileName}`; - return { - downloadUrl: url, - resolvedVersion: version, - arch: arch, - fileName: fileName - }; - }); -} -function resolveVersionFromManifest(versionSpec, stable, auth, osArch = translateArchToDistUrl(os.arch()), manifest) { - return __awaiter(this, void 0, void 0, function* () { - try { - const info = yield getInfoFromManifest(versionSpec, stable, auth, osArch, manifest); - return info === null || info === void 0 ? void 0 : info.resolvedVersion; - } - catch (err) { - core.info('Unable to resolve version from manifest...'); - core.debug(err.message); - } - }); -} -// TODO - should we just export this from @actions/tool-cache? Lifted directly from there -function evaluateVersions(versions, versionSpec) { - let version = ''; - core.debug(`evaluating ${versions.length} versions`); - versions = versions.sort((a, b) => { - if (semver.gt(a, b)) { - return 1; - } - return -1; - }); - for (let i = versions.length - 1; i >= 0; i--) { - const potential = versions[i]; - const satisfied = semver.satisfies(potential, versionSpec); - if (satisfied) { - version = potential; - break; - } - } - if (version) { - core.debug(`matched: ${version}`); - } - else { - core.debug('match not found'); - } - return version; -} -function queryDistForMatch(versionSpec, arch = os.arch(), nodeVersions) { - return __awaiter(this, void 0, void 0, function* () { - let osPlat = os.platform(); - let osArch = translateArchToDistUrl(arch); - // node offers a json list of versions - let dataFileName; - switch (osPlat) { - case 'linux': - dataFileName = `linux-${osArch}`; - break; - case 'darwin': - dataFileName = `osx-${osArch}-tar`; - break; - case 'win32': - dataFileName = `win-${osArch}-exe`; - break; - default: - throw new Error(`Unexpected OS '${osPlat}'`); - } - if (!nodeVersions) { - core.debug('No dist manifest cached'); - nodeVersions = yield getVersionsFromDist(); - } - let versions = []; - if (isLatestSyntax(versionSpec)) { - core.info(`getting latest node version...`); - return nodeVersions[0].version; - } - nodeVersions.forEach((nodeVersion) => { - // ensure this version supports your os and platform - if (nodeVersion.files.indexOf(dataFileName) >= 0) { - versions.push(nodeVersion.version); - } - }); - // get the latest version that matches the version spec - let version = evaluateVersions(versions, versionSpec); - return version; - }); -} -function getVersionsFromDist() { - return __awaiter(this, void 0, void 0, function* () { - let dataUrl = 'https://nodejs.org/dist/index.json'; - let httpClient = new hc.HttpClient('setup-node', [], { - allowRetries: true, - maxRetries: 3 - }); - let response = yield httpClient.getJson(dataUrl); - return response.result || []; - }); -} -exports.getVersionsFromDist = getVersionsFromDist; -// For non LTS versions of Node, the files we need (for Windows) are sometimes located -// in a different folder than they normally are for other versions. -// Normally the format is similar to: https://nodejs.org/dist/v5.10.1/node-v5.10.1-win-x64.7z -// In this case, there will be two files located at: -// /dist/v5.10.1/win-x64/node.exe -// /dist/v5.10.1/win-x64/node.lib -// If this is not the structure, there may also be two files located at: -// /dist/v0.12.18/node.exe -// /dist/v0.12.18/node.lib -// This method attempts to download and cache the resources from these alternative locations. -// Note also that the files are normally zipped but in this case they are just an exe -// and lib file in a folder, not zipped. -function acquireNodeFromFallbackLocation(version, arch = os.arch()) { - return __awaiter(this, void 0, void 0, function* () { - let osPlat = os.platform(); - let osArch = translateArchToDistUrl(arch); - // Create temporary folder to download in to - const tempDownloadFolder = 'temp_' + Math.floor(Math.random() * 2000000000); - const tempDirectory = process.env['RUNNER_TEMP'] || ''; - assert.ok(tempDirectory, 'Expected RUNNER_TEMP to be defined'); - const tempDir = path.join(tempDirectory, tempDownloadFolder); - yield io.mkdirP(tempDir); - let exeUrl; - let libUrl; - try { - exeUrl = `https://nodejs.org/dist/v${version}/win-${osArch}/node.exe`; - libUrl = `https://nodejs.org/dist/v${version}/win-${osArch}/node.lib`; - core.info(`Downloading only node binary from ${exeUrl}`); - const exePath = yield tc.downloadTool(exeUrl); - yield io.cp(exePath, path.join(tempDir, 'node.exe')); - const libPath = yield tc.downloadTool(libUrl); - yield io.cp(libPath, path.join(tempDir, 'node.lib')); - } - catch (err) { - if (err instanceof tc.HTTPError && err.httpStatusCode == 404) { - exeUrl = `https://nodejs.org/dist/v${version}/node.exe`; - libUrl = `https://nodejs.org/dist/v${version}/node.lib`; - const exePath = yield tc.downloadTool(exeUrl); - yield io.cp(exePath, path.join(tempDir, 'node.exe')); - const libPath = yield tc.downloadTool(libUrl); - yield io.cp(libPath, path.join(tempDir, 'node.lib')); - } - else { - throw err; - } - } - let toolPath = yield tc.cacheDir(tempDir, 'node', version, arch); - core.addPath(toolPath); - return toolPath; - }); -} -// os.arch does not always match the relative download url, e.g. -// os.arch == 'arm' != node-v12.13.1-linux-armv7l.tar.gz -// All other currently supported architectures match, e.g.: -// os.arch = arm64 => https://nodejs.org/dist/v{VERSION}/node-v{VERSION}-{OS}-arm64.tar.gz -// os.arch = x64 => https://nodejs.org/dist/v{VERSION}/node-v{VERSION}-{OS}-x64.tar.gz -function translateArchToDistUrl(arch) { - switch (arch) { - case 'arm': - return 'armv7l'; - default: - return arch; - } -} -function parseNodeVersionFile(contents) { - let nodeVersion = contents.trim(); - if (/^v\d/.test(nodeVersion)) { - nodeVersion = nodeVersion.substring(1); - } - return nodeVersion; -} -exports.parseNodeVersionFile = parseNodeVersionFile; -function isLatestSyntax(versionSpec) { - return ['current', 'latest', 'node'].includes(versionSpec); -} + +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k]; + result["default"] = mod; + return result; +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +const os = __nccwpck_require__(2037); +const assert = __importStar(__nccwpck_require__(9491)); +const core = __importStar(__nccwpck_require__(2186)); +const hc = __importStar(__nccwpck_require__(9925)); +const io = __importStar(__nccwpck_require__(7436)); +const tc = __importStar(__nccwpck_require__(7784)); +const path = __importStar(__nccwpck_require__(1017)); +const semver = __importStar(__nccwpck_require__(5911)); +const fs = __nccwpck_require__(7147); +function getNode(versionSpec, stable, checkLatest, auth, arch = os.arch()) { + return __awaiter(this, void 0, void 0, function* () { + // Store manifest data to avoid multiple calls + let manifest; + let nodeVersions; + let osPlat = os.platform(); + let osArch = translateArchToDistUrl(arch); + if (isLtsAlias(versionSpec)) { + core.info('Attempt to resolve LTS alias from manifest...'); + // No try-catch since it's not possible to resolve LTS alias without manifest + manifest = yield getManifest(auth); + versionSpec = resolveLtsAliasFromManifest(versionSpec, stable, manifest); + } + if (isLatestSyntax(versionSpec)) { + nodeVersions = yield getVersionsFromDist(); + versionSpec = yield queryDistForMatch(versionSpec, arch, nodeVersions); + core.info(`getting latest node version...`); + } + if (checkLatest) { + core.info('Attempt to resolve the latest version from manifest...'); + const resolvedVersion = yield resolveVersionFromManifest(versionSpec, stable, auth, osArch, manifest); + if (resolvedVersion) { + versionSpec = resolvedVersion; + core.info(`Resolved as '${versionSpec}'`); + } + else { + core.info(`Failed to resolve version ${versionSpec} from manifest`); + } + } + // check cache + let toolPath; + toolPath = tc.find('node', versionSpec, osArch); + // If not found in cache, download + if (toolPath) { + core.info(`Found in cache @ ${toolPath}`); + } + else { + core.info(`Attempting to download ${versionSpec}...`); + let downloadPath = ''; + let info = null; + // + // Try download from internal distribution (popular versions only) + // + try { + info = yield getInfoFromManifest(versionSpec, stable, auth, osArch, manifest); + if (info) { + core.info(`Acquiring ${info.resolvedVersion} - ${info.arch} from ${info.downloadUrl}`); + downloadPath = yield tc.downloadTool(info.downloadUrl, undefined, auth); + } + else { + core.info('Not found in manifest. Falling back to download directly from Node'); + } + } + catch (err) { + // Rate limit? + if (err instanceof tc.HTTPError && + (err.httpStatusCode === 403 || err.httpStatusCode === 429)) { + core.info(`Received HTTP status code ${err.httpStatusCode}. This usually indicates the rate limit has been exceeded`); + } + else { + core.info(err.message); + } + core.debug(err.stack); + core.info('Falling back to download directly from Node'); + } + // + // Download from nodejs.org + // + if (!downloadPath) { + info = yield getInfoFromDist(versionSpec, arch, nodeVersions); + if (!info) { + throw new Error(`Unable to find Node version '${versionSpec}' for platform ${osPlat} and architecture ${osArch}.`); + } + core.info(`Acquiring ${info.resolvedVersion} - ${info.arch} from ${info.downloadUrl}`); + try { + downloadPath = yield tc.downloadTool(info.downloadUrl); + } + catch (err) { + if (err instanceof tc.HTTPError && err.httpStatusCode == 404) { + return yield acquireNodeFromFallbackLocation(info.resolvedVersion, info.arch); + } + throw err; + } + } + // + // Extract + // + core.info('Extracting ...'); + let extPath; + info = info || {}; // satisfy compiler, never null when reaches here + if (osPlat == 'win32') { + let _7zPath = path.join(__dirname, '../..', 'externals', '7zr.exe'); + extPath = yield tc.extract7z(downloadPath, undefined, _7zPath); + // 7z extracts to folder matching file name + let nestedPath = path.join(extPath, path.basename(info.fileName, '.7z')); + if (fs.existsSync(nestedPath)) { + extPath = nestedPath; + } + } + else { + extPath = yield tc.extractTar(downloadPath, undefined, [ + 'xz', + '--strip', + '1' + ]); + } + // + // Install into the local tool cache - node extracts with a root folder that matches the fileName downloaded + // + core.info('Adding to the cache ...'); + toolPath = yield tc.cacheDir(extPath, 'node', info.resolvedVersion, info.arch); + core.info('Done'); + } + // + // a tool installer initimately knows details about the layout of that tool + // for example, node binary is in the bin folder after the extract on Mac/Linux. + // layouts could change by version, by platform etc... but that's the tool installers job + // + if (osPlat != 'win32') { + toolPath = path.join(toolPath, 'bin'); + } + // + // prepend the tools path. instructs the agent to prepend for future tasks + core.addPath(toolPath); + }); +} +exports.getNode = getNode; +function isLtsAlias(versionSpec) { + return versionSpec.startsWith('lts/'); +} +function getManifest(auth) { + core.debug('Getting manifest from actions/node-versions@main'); + return tc.getManifestFromRepo('actions', 'node-versions', auth, 'main'); +} +function resolveLtsAliasFromManifest(versionSpec, stable, manifest) { + var _a; + const alias = (_a = versionSpec.split('lts/')[1]) === null || _a === void 0 ? void 0 : _a.toLowerCase(); + if (!alias) { + throw new Error(`Unable to parse LTS alias for Node version '${versionSpec}'`); + } + core.debug(`LTS alias '${alias}' for Node version '${versionSpec}'`); + // Supported formats are `lts/`, `lts/*`, and `lts/-n`. Where asterisk means highest possible LTS and -n means the nth-highest. + const n = Number(alias); + const aliases = Object.fromEntries(manifest + .filter(x => x.lts && x.stable === stable) + .map(x => [x.lts.toLowerCase(), x]) + .reverse()); + const numbered = Object.values(aliases); + const release = alias === '*' + ? numbered[numbered.length - 1] + : n < 0 + ? numbered[numbered.length - 1 + n] + : aliases[alias]; + if (!release) { + throw new Error(`Unable to find LTS release '${alias}' for Node version '${versionSpec}'.`); + } + core.debug(`Found LTS release '${release.version}' for Node version '${versionSpec}'`); + return release.version.split('.')[0]; +} +function getInfoFromManifest(versionSpec, stable, auth, osArch = translateArchToDistUrl(os.arch()), manifest) { + return __awaiter(this, void 0, void 0, function* () { + let info = null; + if (!manifest) { + core.debug('No manifest cached'); + manifest = yield getManifest(auth); + } + const rel = yield tc.findFromManifest(versionSpec, stable, manifest, osArch); + if (rel && rel.files.length > 0) { + info = {}; + info.resolvedVersion = rel.version; + info.arch = rel.files[0].arch; + info.downloadUrl = rel.files[0].download_url; + info.fileName = rel.files[0].filename; + } + return info; + }); +} +function getInfoFromDist(versionSpec, arch = os.arch(), nodeVersions) { + return __awaiter(this, void 0, void 0, function* () { + let osPlat = os.platform(); + let osArch = translateArchToDistUrl(arch); + let version = yield queryDistForMatch(versionSpec, arch, nodeVersions); + if (!version) { + return null; + } + // + // Download - a tool installer intimately knows how to get the tool (and construct urls) + // + version = semver.clean(version) || ''; + let fileName = osPlat == 'win32' + ? `node-v${version}-win-${osArch}` + : `node-v${version}-${osPlat}-${osArch}`; + let urlFileName = osPlat == 'win32' ? `${fileName}.7z` : `${fileName}.tar.gz`; + let url = `https://nodejs.org/dist/v${version}/${urlFileName}`; + return { + downloadUrl: url, + resolvedVersion: version, + arch: arch, + fileName: fileName + }; + }); +} +function resolveVersionFromManifest(versionSpec, stable, auth, osArch = translateArchToDistUrl(os.arch()), manifest) { + return __awaiter(this, void 0, void 0, function* () { + try { + const info = yield getInfoFromManifest(versionSpec, stable, auth, osArch, manifest); + return info === null || info === void 0 ? void 0 : info.resolvedVersion; + } + catch (err) { + core.info('Unable to resolve version from manifest...'); + core.debug(err.message); + } + }); +} +// TODO - should we just export this from @actions/tool-cache? Lifted directly from there +function evaluateVersions(versions, versionSpec) { + let version = ''; + core.debug(`evaluating ${versions.length} versions`); + versions = versions.sort((a, b) => { + if (semver.gt(a, b)) { + return 1; + } + return -1; + }); + for (let i = versions.length - 1; i >= 0; i--) { + const potential = versions[i]; + const satisfied = semver.satisfies(potential, versionSpec); + if (satisfied) { + version = potential; + break; + } + } + if (version) { + core.debug(`matched: ${version}`); + } + else { + core.debug('match not found'); + } + return version; +} +function queryDistForMatch(versionSpec, arch = os.arch(), nodeVersions) { + return __awaiter(this, void 0, void 0, function* () { + let osPlat = os.platform(); + let osArch = translateArchToDistUrl(arch); + // node offers a json list of versions + let dataFileName; + switch (osPlat) { + case 'linux': + dataFileName = `linux-${osArch}`; + break; + case 'darwin': + dataFileName = `osx-${osArch}-tar`; + break; + case 'win32': + dataFileName = `win-${osArch}-exe`; + break; + default: + throw new Error(`Unexpected OS '${osPlat}'`); + } + if (!nodeVersions) { + core.debug('No dist manifest cached'); + nodeVersions = yield getVersionsFromDist(); + } + let versions = []; + if (isLatestSyntax(versionSpec)) { + core.info(`getting latest node version...`); + return nodeVersions[0].version; + } + nodeVersions.forEach((nodeVersion) => { + // ensure this version supports your os and platform + if (nodeVersion.files.indexOf(dataFileName) >= 0) { + versions.push(nodeVersion.version); + } + }); + // get the latest version that matches the version spec + let version = evaluateVersions(versions, versionSpec); + return version; + }); +} +function getVersionsFromDist() { + return __awaiter(this, void 0, void 0, function* () { + let dataUrl = 'https://nodejs.org/dist/index.json'; + let httpClient = new hc.HttpClient('setup-node', [], { + allowRetries: true, + maxRetries: 3 + }); + let response = yield httpClient.getJson(dataUrl); + return response.result || []; + }); +} +exports.getVersionsFromDist = getVersionsFromDist; +// For non LTS versions of Node, the files we need (for Windows) are sometimes located +// in a different folder than they normally are for other versions. +// Normally the format is similar to: https://nodejs.org/dist/v5.10.1/node-v5.10.1-win-x64.7z +// In this case, there will be two files located at: +// /dist/v5.10.1/win-x64/node.exe +// /dist/v5.10.1/win-x64/node.lib +// If this is not the structure, there may also be two files located at: +// /dist/v0.12.18/node.exe +// /dist/v0.12.18/node.lib +// This method attempts to download and cache the resources from these alternative locations. +// Note also that the files are normally zipped but in this case they are just an exe +// and lib file in a folder, not zipped. +function acquireNodeFromFallbackLocation(version, arch = os.arch()) { + return __awaiter(this, void 0, void 0, function* () { + let osPlat = os.platform(); + let osArch = translateArchToDistUrl(arch); + // Create temporary folder to download in to + const tempDownloadFolder = 'temp_' + Math.floor(Math.random() * 2000000000); + const tempDirectory = process.env['RUNNER_TEMP'] || ''; + assert.ok(tempDirectory, 'Expected RUNNER_TEMP to be defined'); + const tempDir = path.join(tempDirectory, tempDownloadFolder); + yield io.mkdirP(tempDir); + let exeUrl; + let libUrl; + try { + exeUrl = `https://nodejs.org/dist/v${version}/win-${osArch}/node.exe`; + libUrl = `https://nodejs.org/dist/v${version}/win-${osArch}/node.lib`; + core.info(`Downloading only node binary from ${exeUrl}`); + const exePath = yield tc.downloadTool(exeUrl); + yield io.cp(exePath, path.join(tempDir, 'node.exe')); + const libPath = yield tc.downloadTool(libUrl); + yield io.cp(libPath, path.join(tempDir, 'node.lib')); + } + catch (err) { + if (err instanceof tc.HTTPError && err.httpStatusCode == 404) { + exeUrl = `https://nodejs.org/dist/v${version}/node.exe`; + libUrl = `https://nodejs.org/dist/v${version}/node.lib`; + const exePath = yield tc.downloadTool(exeUrl); + yield io.cp(exePath, path.join(tempDir, 'node.exe')); + const libPath = yield tc.downloadTool(libUrl); + yield io.cp(libPath, path.join(tempDir, 'node.lib')); + } + else { + throw err; + } + } + let toolPath = yield tc.cacheDir(tempDir, 'node', version, arch); + core.addPath(toolPath); + return toolPath; + }); +} +// os.arch does not always match the relative download url, e.g. +// os.arch == 'arm' != node-v12.13.1-linux-armv7l.tar.gz +// All other currently supported architectures match, e.g.: +// os.arch = arm64 => https://nodejs.org/dist/v{VERSION}/node-v{VERSION}-{OS}-arm64.tar.gz +// os.arch = x64 => https://nodejs.org/dist/v{VERSION}/node-v{VERSION}-{OS}-x64.tar.gz +function translateArchToDistUrl(arch) { + switch (arch) { + case 'arm': + return 'armv7l'; + default: + return arch; + } +} +function parseNodeVersionFile(contents) { + let nodeVersion = contents.trim(); + if (/^v\d/.test(nodeVersion)) { + nodeVersion = nodeVersion.substring(1); + } + return nodeVersion; +} +exports.parseNodeVersionFile = parseNodeVersionFile; +function isLatestSyntax(versionSpec) { + return ['current', 'latest', 'node'].includes(versionSpec); +} /***/ }), @@ -71786,99 +71786,105 @@ function isLatestSyntax(versionSpec) { /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; - -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; -var __importStar = (this && this.__importStar) || function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k]; - result["default"] = mod; - return result; -}; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", ({ value: true })); -const core = __importStar(__nccwpck_require__(2186)); -const installer = __importStar(__nccwpck_require__(2574)); -const fs_1 = __importDefault(__nccwpck_require__(7147)); -const auth = __importStar(__nccwpck_require__(7573)); -const path = __importStar(__nccwpck_require__(1017)); -const cache_restore_1 = __nccwpck_require__(9517); -const cache_utils_1 = __nccwpck_require__(1678); -const os = __nccwpck_require__(2037); -function run() { - return __awaiter(this, void 0, void 0, function* () { - try { - // - // Version is optional. If supplied, install / use from the tool cache - // If not supplied then task is still used to setup proxy, auth, etc... - // - let version = resolveVersionInput(); - let arch = core.getInput('architecture'); - const cache = core.getInput('cache'); - // if architecture supplied but node-version is not - // if we don't throw a warning, the already installed x64 node will be used which is not probably what user meant. - if (arch && !version) { - core.warning('`architecture` is provided but `node-version` is missing. In this configuration, the version/architecture of Node will not be changed. To fix this, provide `architecture` in combination with `node-version`'); - } - if (!arch) { - arch = os.arch(); - } - if (version) { - let token = core.getInput('token'); - let auth = !token || cache_utils_1.isGhes() ? undefined : `token ${token}`; - let stable = (core.getInput('stable') || 'true').toUpperCase() === 'TRUE'; - const checkLatest = (core.getInput('check-latest') || 'false').toUpperCase() === 'TRUE'; - yield installer.getNode(version, stable, checkLatest, auth, arch); - } - const registryUrl = core.getInput('registry-url'); - const alwaysAuth = core.getInput('always-auth'); - if (registryUrl) { - auth.configAuthentication(registryUrl, alwaysAuth); - } - if (cache && cache_utils_1.isCacheFeatureAvailable()) { - const cacheDependencyPath = core.getInput('cache-dependency-path'); - yield cache_restore_1.restoreCache(cache, cacheDependencyPath); - } - const matchersPath = path.join(__dirname, '../..', '.github'); - core.info(`##[add-matcher]${path.join(matchersPath, 'tsc.json')}`); - core.info(`##[add-matcher]${path.join(matchersPath, 'eslint-stylish.json')}`); - core.info(`##[add-matcher]${path.join(matchersPath, 'eslint-compact.json')}`); - } - catch (err) { - core.setFailed(err.message); - } - }); -} -exports.run = run; -function resolveVersionInput() { - let version = core.getInput('node-version'); - const versionFileInput = core.getInput('node-version-file'); - if (version && versionFileInput) { - core.warning('Both node-version and node-version-file inputs are specified, only node-version will be used'); - } - if (version) { - return version; - } - if (versionFileInput) { - const versionFilePath = path.join(process.env.GITHUB_WORKSPACE, versionFileInput); - if (!fs_1.default.existsSync(versionFilePath)) { - throw new Error(`The specified node version file at: ${versionFilePath} does not exist`); - } - version = installer.parseNodeVersionFile(fs_1.default.readFileSync(versionFilePath, 'utf8')); - core.info(`Resolved ${versionFileInput} as ${version}`); - } - return version; -} + +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k]; + result["default"] = mod; + return result; +}; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +const core = __importStar(__nccwpck_require__(2186)); +const installer = __importStar(__nccwpck_require__(2574)); +const fs_1 = __importDefault(__nccwpck_require__(7147)); +const auth = __importStar(__nccwpck_require__(7573)); +const path = __importStar(__nccwpck_require__(1017)); +const cache_restore_1 = __nccwpck_require__(9517); +const cache_utils_1 = __nccwpck_require__(1678); +const os = __nccwpck_require__(2037); +function run() { + return __awaiter(this, void 0, void 0, function* () { + try { + // + // Version is optional. If supplied, install / use from the tool cache + // If not supplied then task is still used to setup proxy, auth, etc... + // + let version = resolveVersionInput(); + let arch = core.getInput('architecture'); + const cache = core.getInput('cache'); + // if architecture supplied but node-version is not + // if we don't throw a warning, the already installed x64 node will be used which is not probably what user meant. + if (arch && !version) { + core.warning('`architecture` is provided but `node-version` is missing. In this configuration, the version/architecture of Node will not be changed. To fix this, provide `architecture` in combination with `node-version`'); + } + if (!arch) { + arch = os.arch(); + } + if (version) { + let token = core.getInput('token'); + let auth = !token || cache_utils_1.isGhes() ? undefined : `token ${token}`; + let stable = (core.getInput('stable') || 'true').toUpperCase() === 'TRUE'; + const checkLatest = (core.getInput('check-latest') || 'false').toUpperCase() === 'TRUE'; + yield installer.getNode(version, stable, checkLatest, auth, arch); + } + const registryUrl = core.getInput('registry-url'); + const alwaysAuth = core.getInput('always-auth'); + if (registryUrl) { + auth.configAuthentication(registryUrl, alwaysAuth); + } + if (cache && cache_utils_1.isCacheFeatureAvailable()) { + const cacheDependencyPath = core.getInput('cache-dependency-path'); + yield cache_restore_1.restoreCache(cache, cacheDependencyPath); + } + const matchersPath = path.join(__dirname, '../..', '.github'); + core.info(`##[add-matcher]${path.join(matchersPath, 'tsc.json')}`); + core.info(`##[add-matcher]${path.join(matchersPath, 'eslint-stylish.json')}`); + core.info(`##[add-matcher]${path.join(matchersPath, 'eslint-compact.json')}`); + } + catch (err) { + core.setFailed(err.message); + } + }); +} +exports.run = run; +function resolveVersionInput() { + let version = core.getInput('node-version'); + const nodeVersionFile = core.getInput('node-version-file'); + const versionFileInput = nodeVersionFile === 'volta' ? 'package.json' : nodeVersionFile; + if (version && versionFileInput) { + core.warning('Both node-version and node-version-file inputs are specified, only node-version will be used'); + } + if (version) { + return version; + } + if (versionFileInput) { + const versionFilePath = path.join(process.env.GITHUB_WORKSPACE, versionFileInput); + if (!fs_1.default.existsSync(versionFilePath)) { + throw new Error(`The specified node version file at: ${versionFilePath} does not exist`); + } + if (nodeVersionFile === 'volta') { + version = JSON.parse(fs_1.default.readFileSync(versionFilePath, 'utf8')).volta.node; + } + else { + version = installer.parseNodeVersionFile(fs_1.default.readFileSync(versionFilePath, 'utf8')); + } + core.info(`Resolved ${versionFileInput} as ${version}`); + } + return version; +} /***/ }), @@ -72134,10 +72140,10 @@ var __webpack_exports__ = {}; (() => { "use strict"; var exports = __webpack_exports__; - -Object.defineProperty(exports, "__esModule", ({ value: true })); -const main_1 = __nccwpck_require__(399); -main_1.run(); + +Object.defineProperty(exports, "__esModule", ({ value: true })); +const main_1 = __nccwpck_require__(399); +main_1.run(); })(); diff --git a/src/main.ts b/src/main.ts index 3c5661b5..40d1cc1a 100644 --- a/src/main.ts +++ b/src/main.ts @@ -65,7 +65,9 @@ export async function run() { function resolveVersionInput(): string { let version = core.getInput('node-version'); - const versionFileInput = core.getInput('node-version-file'); + const nodeVersionFile = core.getInput('node-version-file'); + const versionFileInput = + nodeVersionFile === 'volta' ? 'package.json' : nodeVersionFile; if (version && versionFileInput) { core.warning( @@ -82,14 +84,21 @@ function resolveVersionInput(): string { process.env.GITHUB_WORKSPACE!, versionFileInput ); + if (!fs.existsSync(versionFilePath)) { throw new Error( `The specified node version file at: ${versionFilePath} does not exist` ); } - version = installer.parseNodeVersionFile( - fs.readFileSync(versionFilePath, 'utf8') - ); + + if (nodeVersionFile === 'volta') { + version = JSON.parse(fs.readFileSync(versionFilePath, 'utf8')).volta.node; + } else { + version = installer.parseNodeVersionFile( + fs.readFileSync(versionFilePath, 'utf8') + ); + } + core.info(`Resolved ${versionFileInput} as ${version}`); } diff --git a/tsconfig.json b/tsconfig.json index 8405b0a1..84dd7201 100644 --- a/tsconfig.json +++ b/tsconfig.json @@ -6,7 +6,7 @@ "rootDir": "./src", /* Specify the root directory of input files. Use to control the output directory structure with --outDir. */ "sourceMap": true, "strict": true, /* Enable all strict type-checking options. */ - "noImplicitAny": false, /* Raise error on expressions and declarations with an implied 'any' type. */ + "noImplicitAny": false, /* Raise error on expressions and declarations with an implied 'any' type. */ "esModuleInterop": true /* Enables emit interoperability between CommonJS and ES Modules via creation of namespace objects for all imports. Implies 'allowSyntheticDefaultImports'. */ }, "exclude": ["__tests__", "lib", "node_modules"] From a03d9f05e4a9f94563f3a9adf502086290e91629 Mon Sep 17 00:00:00 2001 From: Jef LeCompte Date: Wed, 29 Jun 2022 12:28:21 -0700 Subject: [PATCH 02/32] test: add node-version-file test --- __tests__/data/package.json | 11 +++++++++++ __tests__/installer.test.ts | 21 +++++++++++++++++++++ 2 files changed, 32 insertions(+) create mode 100644 __tests__/data/package.json diff --git a/__tests__/data/package.json b/__tests__/data/package.json new file mode 100644 index 00000000..9fdc65e0 --- /dev/null +++ b/__tests__/data/package.json @@ -0,0 +1,11 @@ +{ + "name": "test", + "version": "1.0.0", + "private": true, + "scripts": { + "test": "echo test" + }, + "volta": { + "node": "16.15.1" + } +} diff --git a/__tests__/installer.test.ts b/__tests__/installer.test.ts index 5a3ad553..5f72690d 100644 --- a/__tests__/installer.test.ts +++ b/__tests__/installer.test.ts @@ -584,6 +584,27 @@ describe('setup-node', () => { ); }); + it('reads node-version-file if provided with volta', async () => { + // Arrange + const expectedVersionSpec = '16.15.1'; + const versionFile = 'package.json'; + process.env['GITHUB_WORKSPACE'] = path.join(__dirname, 'data'); + inputs['node-version-file'] = 'volta'; + + existsSpy.mockImplementationOnce( + input => input === path.join(__dirname, 'data', versionFile) + ); + // Act + await main.run(); + + // Assert + expect(existsSpy).toHaveBeenCalledTimes(1); + expect(existsSpy).toHaveReturnedWith(true); + expect(logSpy).toHaveBeenCalledWith( + `Resolved ${versionFile} as ${expectedVersionSpec}` + ); + }); + it('both node-version-file and node-version are provided', async () => { inputs['node-version'] = '12'; const versionSpec = 'v14'; From f40b60859df12529fd3fc9d55e82790979c48393 Mon Sep 17 00:00:00 2001 From: Jef LeCompte Date: Wed, 29 Jun 2022 12:30:42 -0700 Subject: [PATCH 03/32] docs: add special volta usage --- docs/advanced-usage.md | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/docs/advanced-usage.md b/docs/advanced-usage.md index 36c1ec8b..3414c1df 100644 --- a/docs/advanced-usage.md +++ b/docs/advanced-usage.md @@ -56,8 +56,10 @@ steps: ## Node version file -The `node-version-file` input accepts a path to a file containing the version of Node.js to be used by a project, for example `.nvmrc` or `.node-version`. If both the `node-version` and the `node-version-file` inputs are provided then the `node-version` input is used. +The `node-version-file` input accepts a path to a file containing the version of Node.js to be used by a project, for example `.nvmrc` or `.node-version`. If both the `node-version` and the `node-version-file` inputs are provided then the `node-version` input is used. In the special case of using `volta`, the action will use `package.json` and look for the `volta.node` key. + See [supported version syntax](https://github.com/actions/setup-node#supported-version-syntax) + > The action will search for the node version file relative to the repository root. ```yaml From 9aa86428fe409b3794b36f656716588d0fa19de8 Mon Sep 17 00:00:00 2001 From: Jef LeCompte Date: Wed, 29 Jun 2022 15:11:14 -0700 Subject: [PATCH 04/32] chore: run prettier --- __tests__/installer.test.ts | 4 ++-- src/main.ts | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/__tests__/installer.test.ts b/__tests__/installer.test.ts index 5f72690d..26cf7d7c 100644 --- a/__tests__/installer.test.ts +++ b/__tests__/installer.test.ts @@ -592,7 +592,7 @@ describe('setup-node', () => { inputs['node-version-file'] = 'volta'; existsSpy.mockImplementationOnce( - input => input === path.join(__dirname, 'data', versionFile) + input => input === path.join(__dirname, 'data', versionFile) ); // Act await main.run(); @@ -601,7 +601,7 @@ describe('setup-node', () => { expect(existsSpy).toHaveBeenCalledTimes(1); expect(existsSpy).toHaveReturnedWith(true); expect(logSpy).toHaveBeenCalledWith( - `Resolved ${versionFile} as ${expectedVersionSpec}` + `Resolved ${versionFile} as ${expectedVersionSpec}` ); }); diff --git a/src/main.ts b/src/main.ts index 40d1cc1a..4ad4e719 100644 --- a/src/main.ts +++ b/src/main.ts @@ -95,7 +95,7 @@ function resolveVersionInput(): string { version = JSON.parse(fs.readFileSync(versionFilePath, 'utf8')).volta.node; } else { version = installer.parseNodeVersionFile( - fs.readFileSync(versionFilePath, 'utf8') + fs.readFileSync(versionFilePath, 'utf8') ); } From dbfbe9b6dab362ff4d3dacc32758182a21c603bf Mon Sep 17 00:00:00 2001 From: Jef LeCompte Date: Wed, 29 Jun 2022 15:37:53 -0700 Subject: [PATCH 05/32] refactor: move volta logic --- __tests__/installer.test.ts | 24 ++++++++++++++++++++---- dist/setup/index.js | 18 +++++++++--------- src/installer.ts | 8 +++++++- src/main.ts | 14 ++++---------- 4 files changed, 40 insertions(+), 24 deletions(-) diff --git a/__tests__/installer.test.ts b/__tests__/installer.test.ts index 26cf7d7c..42c67603 100644 --- a/__tests__/installer.test.ts +++ b/__tests__/installer.test.ts @@ -560,7 +560,7 @@ describe('setup-node', () => { expect(parseNodeVersionSpy).toHaveBeenCalledTimes(0); }); - it('reads node-version-file if provided', async () => { + it('reads node-version-file if provided (.nvmrc)', async () => { // Arrange const versionSpec = 'v14'; const versionFile = '.nvmrc'; @@ -572,6 +572,7 @@ describe('setup-node', () => { existsSpy.mockImplementationOnce( input => input === path.join(__dirname, 'data', versionFile) ); + // Act await main.run(); @@ -584,22 +585,37 @@ describe('setup-node', () => { ); }); - it('reads node-version-file if provided with volta', async () => { + it('reads node-version-file if provided (package.json, volta)', async () => { // Arrange - const expectedVersionSpec = '16.15.1'; + const versionSpec = `{ + "name": "test", + "version": "1.0.0", + "private": true, + "scripts": { + "test": "echo test" + }, + "volta": { + "node": "16.15.1" + } +} +`; const versionFile = 'package.json'; + const expectedVersionSpec = '16.15.1'; process.env['GITHUB_WORKSPACE'] = path.join(__dirname, 'data'); - inputs['node-version-file'] = 'volta'; + inputs['node-version-file'] = versionFile; + parseNodeVersionSpy.mockImplementation(() => expectedVersionSpec); existsSpy.mockImplementationOnce( input => input === path.join(__dirname, 'data', versionFile) ); + // Act await main.run(); // Assert expect(existsSpy).toHaveBeenCalledTimes(1); expect(existsSpy).toHaveReturnedWith(true); + expect(parseNodeVersionSpy).toHaveBeenCalledWith(versionSpec); expect(logSpy).toHaveBeenCalledWith( `Resolved ${versionFile} as ${expectedVersionSpec}` ); diff --git a/dist/setup/index.js b/dist/setup/index.js index cae0cf5e..0a98b4d2 100644 --- a/dist/setup/index.js +++ b/dist/setup/index.js @@ -71768,7 +71768,13 @@ function translateArchToDistUrl(arch) { } } function parseNodeVersionFile(contents) { - let nodeVersion = contents.trim(); + let nodeVersion; + if (contents.includes('volta')) { + nodeVersion = JSON.parse(contents).volta.node; + } + else { + nodeVersion = contents.trim(); + } if (/^v\d/.test(nodeVersion)) { nodeVersion = nodeVersion.substring(1); } @@ -71862,8 +71868,7 @@ function run() { exports.run = run; function resolveVersionInput() { let version = core.getInput('node-version'); - const nodeVersionFile = core.getInput('node-version-file'); - const versionFileInput = nodeVersionFile === 'volta' ? 'package.json' : nodeVersionFile; + const versionFileInput = core.getInput('node-version-file'); if (version && versionFileInput) { core.warning('Both node-version and node-version-file inputs are specified, only node-version will be used'); } @@ -71875,12 +71880,7 @@ function resolveVersionInput() { if (!fs_1.default.existsSync(versionFilePath)) { throw new Error(`The specified node version file at: ${versionFilePath} does not exist`); } - if (nodeVersionFile === 'volta') { - version = JSON.parse(fs_1.default.readFileSync(versionFilePath, 'utf8')).volta.node; - } - else { - version = installer.parseNodeVersionFile(fs_1.default.readFileSync(versionFilePath, 'utf8')); - } + version = installer.parseNodeVersionFile(fs_1.default.readFileSync(versionFilePath, 'utf8')); core.info(`Resolved ${versionFileInput} as ${version}`); } return version; diff --git a/src/installer.ts b/src/installer.ts index 97e3bcae..c9fd0d2c 100644 --- a/src/installer.ts +++ b/src/installer.ts @@ -495,7 +495,13 @@ function translateArchToDistUrl(arch: string): string { } export function parseNodeVersionFile(contents: string): string { - let nodeVersion = contents.trim(); + let nodeVersion; + + if (contents.includes('volta')) { + nodeVersion = JSON.parse(contents).volta.node; + } else { + nodeVersion = contents.trim(); + } if (/^v\d/.test(nodeVersion)) { nodeVersion = nodeVersion.substring(1); diff --git a/src/main.ts b/src/main.ts index 4ad4e719..42576375 100644 --- a/src/main.ts +++ b/src/main.ts @@ -65,9 +65,7 @@ export async function run() { function resolveVersionInput(): string { let version = core.getInput('node-version'); - const nodeVersionFile = core.getInput('node-version-file'); - const versionFileInput = - nodeVersionFile === 'volta' ? 'package.json' : nodeVersionFile; + const versionFileInput = core.getInput('node-version-file'); if (version && versionFileInput) { core.warning( @@ -91,13 +89,9 @@ function resolveVersionInput(): string { ); } - if (nodeVersionFile === 'volta') { - version = JSON.parse(fs.readFileSync(versionFilePath, 'utf8')).volta.node; - } else { - version = installer.parseNodeVersionFile( - fs.readFileSync(versionFilePath, 'utf8') - ); - } + version = installer.parseNodeVersionFile( + fs.readFileSync(versionFilePath, 'utf8') + ); core.info(`Resolved ${versionFileInput} as ${version}`); } From 8211e009a789b906cb9b1473ee05b0a4f58edb16 Mon Sep 17 00:00:00 2001 From: Jef LeCompte Date: Wed, 29 Jun 2022 15:39:34 -0700 Subject: [PATCH 06/32] docs: update verbiage for package.json --- docs/advanced-usage.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/advanced-usage.md b/docs/advanced-usage.md index 3414c1df..17b46370 100644 --- a/docs/advanced-usage.md +++ b/docs/advanced-usage.md @@ -56,7 +56,7 @@ steps: ## Node version file -The `node-version-file` input accepts a path to a file containing the version of Node.js to be used by a project, for example `.nvmrc` or `.node-version`. If both the `node-version` and the `node-version-file` inputs are provided then the `node-version` input is used. In the special case of using `volta`, the action will use `package.json` and look for the `volta.node` key. +The `node-version-file` input accepts a path to a file containing the version of Node.js to be used by a project, for example `.nvmrc` or `.node-version`. If both the `node-version` and the `node-version-file` inputs are provided then the `node-version` input is used. In the special case of using `package.json`, the action will look for keys like `volta.node` key to receive the version. See [supported version syntax](https://github.com/actions/setup-node#supported-version-syntax) From d86a20eb78d29d751b5f8366dbabb9b0adcda390 Mon Sep 17 00:00:00 2001 From: Jef LeCompte Date: Thu, 30 Jun 2022 09:17:18 -0700 Subject: [PATCH 07/32] test: fix versionSpec --- __tests__/installer.test.ts | 13 +------------ 1 file changed, 1 insertion(+), 12 deletions(-) diff --git a/__tests__/installer.test.ts b/__tests__/installer.test.ts index 42c67603..7166eb7d 100644 --- a/__tests__/installer.test.ts +++ b/__tests__/installer.test.ts @@ -587,19 +587,8 @@ describe('setup-node', () => { it('reads node-version-file if provided (package.json, volta)', async () => { // Arrange - const versionSpec = `{ - "name": "test", - "version": "1.0.0", - "private": true, - "scripts": { - "test": "echo test" - }, - "volta": { - "node": "16.15.1" - } -} -`; const versionFile = 'package.json'; + const versionSpec = fs.readFileSync(path.join(__dirname, 'data', versionFile), 'utf8'); const expectedVersionSpec = '16.15.1'; process.env['GITHUB_WORKSPACE'] = path.join(__dirname, 'data'); inputs['node-version-file'] = versionFile; From 1c48dc5a9ea481b0befae42489e0be5be5a17ad2 Mon Sep 17 00:00:00 2001 From: Jef LeCompte Date: Thu, 30 Jun 2022 09:22:35 -0700 Subject: [PATCH 08/32] chore: trim no matter what --- dist/setup/index.js | 5 +---- src/installer.ts | 5 ++--- 2 files changed, 3 insertions(+), 7 deletions(-) diff --git a/dist/setup/index.js b/dist/setup/index.js index 0a98b4d2..67c1d96d 100644 --- a/dist/setup/index.js +++ b/dist/setup/index.js @@ -71768,13 +71768,10 @@ function translateArchToDistUrl(arch) { } } function parseNodeVersionFile(contents) { - let nodeVersion; + let nodeVersion = contents.trim(); if (contents.includes('volta')) { nodeVersion = JSON.parse(contents).volta.node; } - else { - nodeVersion = contents.trim(); - } if (/^v\d/.test(nodeVersion)) { nodeVersion = nodeVersion.substring(1); } diff --git a/src/installer.ts b/src/installer.ts index c9fd0d2c..a56b20a4 100644 --- a/src/installer.ts +++ b/src/installer.ts @@ -495,17 +495,16 @@ function translateArchToDistUrl(arch: string): string { } export function parseNodeVersionFile(contents: string): string { - let nodeVersion; + let nodeVersion = contents.trim(); if (contents.includes('volta')) { nodeVersion = JSON.parse(contents).volta.node; - } else { - nodeVersion = contents.trim(); } if (/^v\d/.test(nodeVersion)) { nodeVersion = nodeVersion.substring(1); } + return nodeVersion; } From 4096f07b51bc632869fe5443f4103ae295d0b16c Mon Sep 17 00:00:00 2001 From: Jef LeCompte Date: Thu, 30 Jun 2022 09:57:00 -0700 Subject: [PATCH 09/32] chore: run prettier --- __tests__/installer.test.ts | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/__tests__/installer.test.ts b/__tests__/installer.test.ts index 7166eb7d..80e1d85b 100644 --- a/__tests__/installer.test.ts +++ b/__tests__/installer.test.ts @@ -588,7 +588,10 @@ describe('setup-node', () => { it('reads node-version-file if provided (package.json, volta)', async () => { // Arrange const versionFile = 'package.json'; - const versionSpec = fs.readFileSync(path.join(__dirname, 'data', versionFile), 'utf8'); + const versionSpec = fs.readFileSync( + path.join(__dirname, 'data', versionFile), + 'utf8' + ); const expectedVersionSpec = '16.15.1'; process.env['GITHUB_WORKSPACE'] = path.join(__dirname, 'data'); inputs['node-version-file'] = versionFile; From 57cec77d94c8910e67960b27121ac8f968c37ccc Mon Sep 17 00:00:00 2001 From: Jef LeCompte Date: Wed, 27 Jul 2022 14:25:39 -0700 Subject: [PATCH 10/32] refactor: volta check --- __tests__/data/package.json | 3 +++ __tests__/installer.test.ts | 28 ---------------------------- src/installer.ts | 2 +- 3 files changed, 4 insertions(+), 29 deletions(-) diff --git a/__tests__/data/package.json b/__tests__/data/package.json index e537e200..34063c3a 100644 --- a/__tests__/data/package.json +++ b/__tests__/data/package.json @@ -1,5 +1,8 @@ { "engines": { "node": ">=14.0.0" + }, + "volta": { + "node": "14.0.0" } } diff --git a/__tests__/installer.test.ts b/__tests__/installer.test.ts index cb6bdbe2..b21674f3 100644 --- a/__tests__/installer.test.ts +++ b/__tests__/installer.test.ts @@ -592,34 +592,6 @@ describe('setup-node', () => { ); }); - it('reads node-version-file if provided (package.json, volta)', async () => { - // Arrange - const versionFile = 'package.json'; - const versionSpec = fs.readFileSync( - path.join(__dirname, 'data', versionFile), - 'utf8' - ); - const expectedVersionSpec = '16.15.1'; - process.env['GITHUB_WORKSPACE'] = path.join(__dirname, 'data'); - inputs['node-version-file'] = versionFile; - - parseNodeVersionSpy.mockImplementation(() => expectedVersionSpec); - existsSpy.mockImplementationOnce( - input => input === path.join(__dirname, 'data', versionFile) - ); - - // Act - await main.run(); - - // Assert - expect(existsSpy).toHaveBeenCalledTimes(1); - expect(existsSpy).toHaveReturnedWith(true); - expect(parseNodeVersionSpy).toHaveBeenCalledWith(versionSpec); - expect(logSpy).toHaveBeenCalledWith( - `Resolved ${versionFile} as ${expectedVersionSpec}` - ); - }); - it('reads package.json as node-version-file if provided', async () => { // Arrange const versionSpec = fs.readFileSync( diff --git a/src/installer.ts b/src/installer.ts index 193ff16a..028ecb80 100644 --- a/src/installer.ts +++ b/src/installer.ts @@ -505,7 +505,7 @@ export function parseNodeVersionFile(contents: string): string { // Try parsing the file as an NPM `package.json` // file. nodeVersion = JSON.parse(contents).engines?.node; - + if (!nodeVersion) JSON.parse(contents).volta?.node; if (!nodeVersion) throw new Error(); } catch (err) { // In the case of an unknown format, From dee2a9689c0019eed93a012121de369ad6bb0b58 Mon Sep 17 00:00:00 2001 From: Jef LeCompte Date: Wed, 27 Jul 2022 14:26:16 -0700 Subject: [PATCH 11/32] chore: run build --- dist/setup/index.js | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/dist/setup/index.js b/dist/setup/index.js index 0a0c0bad..117dc50d 100644 --- a/dist/setup/index.js +++ b/dist/setup/index.js @@ -71768,7 +71768,7 @@ function translateArchToDistUrl(arch) { } } function parseNodeVersionFile(contents) { - var _a, _b; + var _a, _b, _c; let nodeVersion; const found = contents.match(/^(?:nodejs\s+)?v?(?[^\s]+)$/m); nodeVersion = (_a = found === null || found === void 0 ? void 0 : found.groups) === null || _a === void 0 ? void 0 : _a.version; @@ -71777,6 +71777,8 @@ function parseNodeVersionFile(contents) { // Try parsing the file as an NPM `package.json` // file. nodeVersion = (_b = JSON.parse(contents).engines) === null || _b === void 0 ? void 0 : _b.node; + if (!nodeVersion) + (_c = JSON.parse(contents).volta) === null || _c === void 0 ? void 0 : _c.node; if (!nodeVersion) throw new Error(); } From 9f20343a3a8f4c58491c4056e96564ac052f61fe Mon Sep 17 00:00:00 2001 From: Jef LeCompte Date: Wed, 27 Jul 2022 14:27:21 -0700 Subject: [PATCH 12/32] style: change test name --- __tests__/installer.test.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/__tests__/installer.test.ts b/__tests__/installer.test.ts index b21674f3..5cf55f77 100644 --- a/__tests__/installer.test.ts +++ b/__tests__/installer.test.ts @@ -567,7 +567,7 @@ describe('setup-node', () => { expect(parseNodeVersionSpy).toHaveBeenCalledTimes(0); }); - it('reads node-version-file if provided (.nvmrc)', async () => { + it('reads node-version-file if provided', async () => { // Arrange const versionSpec = 'v14'; const versionFile = '.nvmrc'; From 5b579f16381b804435968b7cb2131868c08c0350 Mon Sep 17 00:00:00 2001 From: Jef LeCompte Date: Wed, 27 Jul 2022 14:31:06 -0700 Subject: [PATCH 13/32] refactor: check volta first, then engine --- src/installer.ts | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/src/installer.ts b/src/installer.ts index 028ecb80..e4dde61e 100644 --- a/src/installer.ts +++ b/src/installer.ts @@ -502,10 +502,9 @@ export function parseNodeVersionFile(contents: string): string { if (!nodeVersion) { try { - // Try parsing the file as an NPM `package.json` - // file. - nodeVersion = JSON.parse(contents).engines?.node; - if (!nodeVersion) JSON.parse(contents).volta?.node; + // Try parsing the file as an NPM `package.json` file. + nodeVersion = JSON.parse(contents).volta?.node; + if (!nodeVersion)nodeVersion = JSON.parse(contents).engines?.node; if (!nodeVersion) throw new Error(); } catch (err) { // In the case of an unknown format, From bb59d50268bbdb3cf44cf12e6a6594b4980affd4 Mon Sep 17 00:00:00 2001 From: Jef LeCompte Date: Wed, 27 Jul 2022 14:35:09 -0700 Subject: [PATCH 14/32] chore: run prettier --- dist/setup/index.js | 7 +++---- src/installer.ts | 2 +- 2 files changed, 4 insertions(+), 5 deletions(-) diff --git a/dist/setup/index.js b/dist/setup/index.js index 117dc50d..84095537 100644 --- a/dist/setup/index.js +++ b/dist/setup/index.js @@ -71774,11 +71774,10 @@ function parseNodeVersionFile(contents) { nodeVersion = (_a = found === null || found === void 0 ? void 0 : found.groups) === null || _a === void 0 ? void 0 : _a.version; if (!nodeVersion) { try { - // Try parsing the file as an NPM `package.json` - // file. - nodeVersion = (_b = JSON.parse(contents).engines) === null || _b === void 0 ? void 0 : _b.node; + // Try parsing the file as an NPM `package.json` file. + nodeVersion = (_b = JSON.parse(contents).volta) === null || _b === void 0 ? void 0 : _b.node; if (!nodeVersion) - (_c = JSON.parse(contents).volta) === null || _c === void 0 ? void 0 : _c.node; + nodeVersion = (_c = JSON.parse(contents).engines) === null || _c === void 0 ? void 0 : _c.node; if (!nodeVersion) throw new Error(); } diff --git a/src/installer.ts b/src/installer.ts index e4dde61e..6414bfa9 100644 --- a/src/installer.ts +++ b/src/installer.ts @@ -504,7 +504,7 @@ export function parseNodeVersionFile(contents: string): string { try { // Try parsing the file as an NPM `package.json` file. nodeVersion = JSON.parse(contents).volta?.node; - if (!nodeVersion)nodeVersion = JSON.parse(contents).engines?.node; + if (!nodeVersion) nodeVersion = JSON.parse(contents).engines?.node; if (!nodeVersion) throw new Error(); } catch (err) { // In the case of an unknown format, From 18090dee772fb76ecc54b4a5cd938fd7c7b56d6c Mon Sep 17 00:00:00 2001 From: Evgenii Korolevskii Date: Mon, 1 Aug 2022 01:24:36 +0200 Subject: [PATCH 15/32] updated docs --- docs/contributors.md | 124 +++++++++++++++++++++++++++++++++++++------ 1 file changed, 109 insertions(+), 15 deletions(-) diff --git a/docs/contributors.md b/docs/contributors.md index fece2ea2..5af46076 100644 --- a/docs/contributors.md +++ b/docs/contributors.md @@ -1,22 +1,116 @@ + # Contributors -### Checkin +Thank you for contributing! -- Do checkin source (src) -- Do checkin build output (lib) -- Do checkin runtime node_modules -- Do not checkin devDependency node_modules (husky can help see below) +We have prepared a short guide so that the process of making your contribution is as simple and clear as possible. Please check it out before you contribute! -### devDependencies +## How can I contribute... -In order to handle correctly checking in node_modules without devDependencies, we run [Husky](https://github.com/typicode/husky) before each commit. -This step ensures that formatting and checkin rules are followed and that devDependencies are excluded. To make sure Husky runs correctly, please use the following workflow: +* [Contribute Documentation:green_book:](#contribute-documentation) -``` -npm install # installs all devDependencies including Husky -git add abc.ext # Add the files you've changed. This should include files in src, lib, and node_modules (see above) -git commit -m "Informative commit message" # Commit. This will run Husky -``` +* [Contribute Code :computer:](#contribute-code) -During the commit step, Husky will take care of formatting all files with [Prettier](https://github.com/prettier/prettier) as well as pruning out devDependencies using `npm prune --production`. -It will also make sure these changes are appropriately included in your commit (no further work is needed) \ No newline at end of file +* [Provide Support on Issues:pencil:](#provide-support-on-issues) + +* [Review Pull Requests:mag:](#review-pull-requests) + +## Contribute documentation + +Documentation is a super important, critical part of this project. Docs are how we keep track of what we're doing, how, and why. It's how we stay on the same page about our policies and how we tell others everything they need to be able to use this project or contribute to it. + +Documentation contributions of any size are welcome! Feel free to contribute even if you're just rewording a sentence to be more clear, or fixing a spelling mistake! + +**How to contribute:** + +Pull requests are the easiest way to contribute changes to git repos at GitHub. They are the preferred contribution method, as they offer a nice way of commenting and amending the proposed changes. + +- Please check that no one else has already created a pull request with these changes +- Use a "feature branch" for your changes. That separates the changes in the pull request from your other changes and makes it easy to edit/amend commits in the pull request +- Make sure your changes are formatted correctly and consistently with the rest of the documentation +- Re-read what you wrote, and run a spellchecker on it to make sure you didn't miss anything +- If your pull request is connected to an open issue, please, leave a link to this issue in the `Related issue:` section +- If you later need to add new commits to the pull request, you can simply commit the changes to the local branch and then push them. The pull request gets automatically updated + +**Once you've filed the pull request:** + +- Maintainers will review your pull request +- If a maintainer requests changes, first of all, try to think about this request critically and only after that implement and request another review +- If your PR gets accepted, it will soon be merged into the main branch. But your contribution will take effect only after the release of a new version of the action +> Sometimes maintainers reject pull requests and that's ok! Usually, along with rejection, we supply the reason for it. Nonetheless, we still really appreciate you taking the time to do it, and we don't take that lightly :heart: + +## Contribute code + +We like code commits a lot! They're super handy, and they keep the project going and doing the work it needs to do to be useful to others. + +Code contributions of just about any size are acceptable! + +The main difference between code contributions and documentation contributions is that contributing code requires the inclusion of relevant tests for the code being added or changed. Contributions without accompanying tests will be held off until a test is added unless the maintainers consider the specific tests to be either impossible or way too much of a burden for such a contribution. + +**How to contribute:** + +Pull requests are the easiest way to contribute changes to git repos at GitHub. They are the preferred contribution method, as they offer a nice way of commenting and amending the proposed changes. + +- Please check that no one else has already created a pull request with these changes +- Use a "feature branch" for your changes. That separates the changes in the pull request from your other changes and makes it easy to edit/amend commits in the pull request +- **Run `pre-checkin` script to format, build and test changes** +- Make sure your changes are well formatted and that all tests are passing +- If your pull request is connected to an open issue, please, leave a link to this issue in the `Related issue:` section +- If you later need to add new commits to the pull request, you can simply commit the changes to the local branch and then push them. The pull request gets automatically updated + +**Learn more about how to work with the repository:** + +- To implement new features or fix bugs, you need to make changes to the `.ts` files, which are located in the `src` folder +- To comply with the code style, **you need to run the `format` script** +- To transpile source code to `javascript` we use [NCC](https://github.com/vercel/ncc). **It is very important to run the `build` script after making changes**, otherwise your changes will not get into the final `javascript` build +- You can also start formatting, building code, and testing with a single `pre-checkin` command + +**Learn more about how to implement tests:** + +Adding or changing tests is an integral part of making a change to the code. +Unit tests are in the `__tests__` folder, and end-to-end tests are in the `workflows` folder (in particular, in the file [e2e-cache.yml](https://github.com/actions/setup-node/blob/main/.github/workflows/e2e-cache.yml)). + +- The contributor can add various types of tests (like unit tests or end-to-end tests), which, in his opinion, will be necessary and sufficient for testing new or changed functionality +- Tests should cover a successful execution, as well as some edge cases and possible errors +- As already mentioned, pull requests without tests will be considered more carefully by maintainers. If you are sure that in this situation the tests are not needed or cannot be implemented with a commensurate effort - please add this clarification message to your pull request + +**Once you've filed the pull request:** + +- CI will start automatically with some checks. Wait until the end of the execution and make sure that all checks passed successfully. If some checks fail, you can open them one by one, try to find the reason for failing and make changes to your code to resolve the problem +- Maintainers will review your pull request +- If a maintainer requests changes, first of all, try to think about his request critically and only after that implement and request another review +- If your PR gets accepted, it will soon be merged into the main branch. But your contribution will take effect only after the release of a new version of the action +> Sometimes maintainers reject pull requests and that's ok! Usually, along with rejection, we supply the reason for it. Nonetheless, we still really appreciate you taking the time to do it, and we don't take that lightly :heart: + +## Provide support on issues + +Helping out other users with their questions is an awesome way of contributing to any community. It's not uncommon for most of the issues on open source projects to be support-related questions by users trying to understand something they ran into or find their way around a known bug. + +**To help other folks out with their questions:** + +- Go to the [issue tracker](https://github.com/actions/setup-node/issues) +- Read through the list until you find something that you're familiar enough with to answer to +- Respond to the issue with whatever details are needed to clarify the question, or get more details about what's going on +- Once the discussion wraps up and things are clarified, ask the original issue filer (or a maintainer) to close it for you + +*Some notes on picking up support issues:* + +- Avoid responding to issues you don't know you can answer accurately +- Try to refer to past issues with accepted answers as much as possible. Link to them from your replies +- Be kind and patient with users. Often, folks who have run into confusing things might be upset or impatient. This is natural. If you feel uncomfortable in conversation with them, it's better to stay away or withdraw from the issue. + + > If some user is violating our code of conduct [standards](https://github.com/actions/setup-node/blob/main/CODE_OF_CONDUCT.md#our-standards), refer to the [Enforcement](https://github.com/actions/setup-node/blob/main/CODE_OF_CONDUCT.md#enforcement) section of the Code of Conduct to resolve the conflict + + +## Review pull requests + + +Another great way to contribute is pull request reviews. Please, be extra kind: people who submit code/doc contributions are putting themselves in a pretty vulnerable position, and have put time and care into what they've done (even if that's not obvious to you!) Please, always respond with respect, and be understanding, but don't feel like you need to sacrifice your standards for their sake, either. + +**How to review:** + +- Go to the [pull requests](https://github.com/actions/setup-node/pulls) +- Make sure you're familiar with the code or documentation is updated, unless it's a minor change (spellchecking, minor formatting, etc.) +- Review changes using the GitHub functionality. You can ask a clarifying question, point out an error or suggest an alternative. +> Note: You may ask for minor changes - "nitpicks", but consider whether they are real blockers to merging or not +- Submit your review, which may include comments, an approval, or a changes request \ No newline at end of file From 0d3aa68dd378dc694aff6c2ac613322fdc8d2ca4 Mon Sep 17 00:00:00 2001 From: Evgenii Korolevskii Date: Mon, 1 Aug 2022 01:27:02 +0200 Subject: [PATCH 16/32] format --- docs/contributors.md | 1 - 1 file changed, 1 deletion(-) diff --git a/docs/contributors.md b/docs/contributors.md index 5af46076..f8a13efb 100644 --- a/docs/contributors.md +++ b/docs/contributors.md @@ -1,4 +1,3 @@ - # Contributors Thank you for contributing! From 5d6bb1273a34ad0caa868b9c415912ad61dbb5bc Mon Sep 17 00:00:00 2001 From: Evgenii Korolevskii Date: Wed, 3 Aug 2022 19:26:34 +0200 Subject: [PATCH 17/32] rephased documents --- docs/contributors.md | 18 +++++++++--------- 1 file changed, 9 insertions(+), 9 deletions(-) diff --git a/docs/contributors.md b/docs/contributors.md index f8a13efb..ecd40ecb 100644 --- a/docs/contributors.md +++ b/docs/contributors.md @@ -22,11 +22,11 @@ Documentation contributions of any size are welcome! Feel free to contribute eve **How to contribute:** -Pull requests are the easiest way to contribute changes to git repos at GitHub. They are the preferred contribution method, as they offer a nice way of commenting and amending the proposed changes. +Pull requests are the easiest way to contribute changes to git repos at GitHub. They are the preferred contribution method, as they offer a convenient way of commenting and amending the proposed changes. -- Please check that no one else has already created a pull request with these changes +- Please check that no one else has already created a pull request with these or similar changes - Use a "feature branch" for your changes. That separates the changes in the pull request from your other changes and makes it easy to edit/amend commits in the pull request -- Make sure your changes are formatted correctly and consistently with the rest of the documentation +- Make sure your changes are formatted properly and consistently with the rest of the documentation - Re-read what you wrote, and run a spellchecker on it to make sure you didn't miss anything - If your pull request is connected to an open issue, please, leave a link to this issue in the `Related issue:` section - If you later need to add new commits to the pull request, you can simply commit the changes to the local branch and then push them. The pull request gets automatically updated @@ -35,7 +35,7 @@ Pull requests are the easiest way to contribute changes to git repos at GitHub. - Maintainers will review your pull request - If a maintainer requests changes, first of all, try to think about this request critically and only after that implement and request another review -- If your PR gets accepted, it will soon be merged into the main branch. But your contribution will take effect only after the release of a new version of the action +- If your PR gets accepted, it will soon be merged into the main branch. But your contribution will take effect only after the release of a new version of the action and updating the major tag > Sometimes maintainers reject pull requests and that's ok! Usually, along with rejection, we supply the reason for it. Nonetheless, we still really appreciate you taking the time to do it, and we don't take that lightly :heart: ## Contribute code @@ -48,9 +48,9 @@ The main difference between code contributions and documentation contributions i **How to contribute:** -Pull requests are the easiest way to contribute changes to git repos at GitHub. They are the preferred contribution method, as they offer a nice way of commenting and amending the proposed changes. +Pull requests are the easiest way to contribute changes to git repos at GitHub. They are the preferred contribution method, as they offer a convenient way of commenting and amending the proposed changes. -- Please check that no one else has already created a pull request with these changes +- Please check that no one else has already created a pull request with these or similar changes - Use a "feature branch" for your changes. That separates the changes in the pull request from your other changes and makes it easy to edit/amend commits in the pull request - **Run `pre-checkin` script to format, build and test changes** - Make sure your changes are well formatted and that all tests are passing @@ -67,7 +67,7 @@ Pull requests are the easiest way to contribute changes to git repos at GitHub. **Learn more about how to implement tests:** Adding or changing tests is an integral part of making a change to the code. -Unit tests are in the `__tests__` folder, and end-to-end tests are in the `workflows` folder (in particular, in the file [e2e-cache.yml](https://github.com/actions/setup-node/blob/main/.github/workflows/e2e-cache.yml)). +Unit tests are in the `__tests__` folder, and end-to-end tests are in the `workflows` folder (in the particular file [e2e-cache.yml](https://github.com/actions/setup-node/blob/main/.github/workflows/e2e-cache.yml)). - The contributor can add various types of tests (like unit tests or end-to-end tests), which, in his opinion, will be necessary and sufficient for testing new or changed functionality - Tests should cover a successful execution, as well as some edge cases and possible errors @@ -78,7 +78,7 @@ Unit tests are in the `__tests__` folder, and end-to-end tests are in the `workf - CI will start automatically with some checks. Wait until the end of the execution and make sure that all checks passed successfully. If some checks fail, you can open them one by one, try to find the reason for failing and make changes to your code to resolve the problem - Maintainers will review your pull request - If a maintainer requests changes, first of all, try to think about his request critically and only after that implement and request another review -- If your PR gets accepted, it will soon be merged into the main branch. But your contribution will take effect only after the release of a new version of the action +- If your PR gets accepted, it will soon be merged into the main branch. But your contribution will take effect only after the release of a new version of the action and updating the major tag > Sometimes maintainers reject pull requests and that's ok! Usually, along with rejection, we supply the reason for it. Nonetheless, we still really appreciate you taking the time to do it, and we don't take that lightly :heart: ## Provide support on issues @@ -104,7 +104,7 @@ Helping out other users with their questions is an awesome way of contributing t ## Review pull requests -Another great way to contribute is pull request reviews. Please, be extra kind: people who submit code/doc contributions are putting themselves in a pretty vulnerable position, and have put time and care into what they've done (even if that's not obvious to you!) Please, always respond with respect, and be understanding, but don't feel like you need to sacrifice your standards for their sake, either. +Another great way to contribute is to review pull request. Please, be extra kind: people who submit code/doc contributions are putting themselves in a pretty vulnerable position, and have put time and care into what they've done (even if that's not obvious to you!) Please, always respond with respect, and be understanding, but don't feel like you need to sacrifice your standards for their sake, either. **How to review:** From d79e93a91d48dfaebe7232991fc16e6886025509 Mon Sep 17 00:00:00 2001 From: Jef LeCompte Date: Wed, 3 Aug 2022 16:35:57 -0700 Subject: [PATCH 18/32] docs: add package.json information --- docs/advanced-usage.md | 19 ++++++++++++++++--- 1 file changed, 16 insertions(+), 3 deletions(-) diff --git a/docs/advanced-usage.md b/docs/advanced-usage.md index f795d215..9c9ec75b 100644 --- a/docs/advanced-usage.md +++ b/docs/advanced-usage.md @@ -48,7 +48,7 @@ steps: - uses: actions/checkout@v3 - uses: actions/setup-node@v3 with: - node-version: '14' + node-version: '16' check-latest: true - run: npm ci - run: npm test @@ -56,8 +56,8 @@ steps: ## Node version file -The `node-version-file` input accepts a path to a file containing the version of Node.js to be used by a project, for example `.nvmrc`, `.node-version` or `.tool-versions`. If both the `node-version` and the `node-version-file` inputs are provided then the `node-version` input is used. -See [supported version syntax](https://github.com/actions/setup-node#supported-version-syntax) +The `node-version-file` input accepts a path to a file containing the version of Node.js to be used by a project, for example `.nvmrc`, `.node-version`, `.tool-versions`, or `package.json`. If both the `node-version` and the `node-version-file` inputs are provided then the `node-version` input is used. +See [supported version syntax](https://github.com/actions/setup-node#supported-version-syntax). > The action will search for the node version file relative to the repository root. @@ -71,6 +71,19 @@ steps: - run: npm test ``` +When using the `package.json` input, the action will look for `volta.node` first. If `volta.node` isn't defined, then it will look for `engines.node`. + +```json +{ + "engines": { + "node": ">=16.0.0" + }, + "volta": { + "node": "16.0.0" + } +} +``` + ## Architecture You can use any of the [supported operating systems](https://docs.github.com/en/actions/reference/virtual-environments-for-github-hosted-runners), and the compatible `architecture` can be selected using `architecture`. Values are `x86`, `x64`, `arm64`, `armv6l`, `armv7l`, `ppc64le`, `s390x` (not all of the architectures are available on all platforms). From 348e00800850ca70958af8888ebdb3cce7cbcddc Mon Sep 17 00:00:00 2001 From: Vladimir Safonkin Date: Thu, 4 Aug 2022 09:57:19 +0200 Subject: [PATCH 19/32] Fix node version file parsing (#553) * Fix node version file parsing * Build index.js * Non-json file error handling * Format code * Add package.json to e2e tests * Minor fix --- .github/workflows/versions.yml | 2 +- __tests__/data/package.json | 2 +- dist/setup/index.js | 15 ++++++++++----- src/installer.ts | 14 +++++++++----- 4 files changed, 21 insertions(+), 12 deletions(-) diff --git a/.github/workflows/versions.yml b/.github/workflows/versions.yml index 1af0c125..9422a111 100644 --- a/.github/workflows/versions.yml +++ b/.github/workflows/versions.yml @@ -92,7 +92,7 @@ jobs: fail-fast: false matrix: os: [ubuntu-latest, windows-latest, macos-latest] - node-version-file: [.nvmrc, .tool-versions] + node-version-file: [.nvmrc, .tool-versions, package.json] steps: - uses: actions/checkout@v3 - name: Setup node from node version file diff --git a/__tests__/data/package.json b/__tests__/data/package.json index e537e200..b201009d 100644 --- a/__tests__/data/package.json +++ b/__tests__/data/package.json @@ -1,5 +1,5 @@ { "engines": { - "node": ">=14.0.0" + "node": "^14.0.0" } } diff --git a/dist/setup/index.js b/dist/setup/index.js index 0a0c0bad..0407c1ce 100644 --- a/dist/setup/index.js +++ b/dist/setup/index.js @@ -71770,13 +71770,18 @@ function translateArchToDistUrl(arch) { function parseNodeVersionFile(contents) { var _a, _b; let nodeVersion; - const found = contents.match(/^(?:nodejs\s+)?v?(?[^\s]+)$/m); - nodeVersion = (_a = found === null || found === void 0 ? void 0 : found.groups) === null || _a === void 0 ? void 0 : _a.version; + // Try parsing the file as an NPM `package.json` + // file. + try { + nodeVersion = (_a = JSON.parse(contents).engines) === null || _a === void 0 ? void 0 : _a.node; + } + catch (_c) { + core.warning('Node version file is not JSON file'); + } if (!nodeVersion) { try { - // Try parsing the file as an NPM `package.json` - // file. - nodeVersion = (_b = JSON.parse(contents).engines) === null || _b === void 0 ? void 0 : _b.node; + const found = contents.match(/^(?:nodejs\s+)?v?(?[^\s]+)$/m); + nodeVersion = (_b = found === null || found === void 0 ? void 0 : found.groups) === null || _b === void 0 ? void 0 : _b.version; if (!nodeVersion) throw new Error(); } diff --git a/src/installer.ts b/src/installer.ts index 193ff16a..c74fbc97 100644 --- a/src/installer.ts +++ b/src/installer.ts @@ -497,14 +497,18 @@ function translateArchToDistUrl(arch: string): string { export function parseNodeVersionFile(contents: string): string { let nodeVersion: string | undefined; - const found = contents.match(/^(?:nodejs\s+)?v?(?[^\s]+)$/m); - nodeVersion = found?.groups?.version; + // Try parsing the file as an NPM `package.json` + // file. + try { + nodeVersion = JSON.parse(contents).engines?.node; + } catch { + core.warning('Node version file is not JSON file'); + } if (!nodeVersion) { try { - // Try parsing the file as an NPM `package.json` - // file. - nodeVersion = JSON.parse(contents).engines?.node; + const found = contents.match(/^(?:nodejs\s+)?v?(?[^\s]+)$/m); + nodeVersion = found?.groups?.version; if (!nodeVersion) throw new Error(); } catch (err) { From 3a1b76e7820db63bd03cb94b042dae077a914fc9 Mon Sep 17 00:00:00 2001 From: Jef LeCompte Date: Thu, 4 Aug 2022 09:33:09 -0700 Subject: [PATCH 20/32] refactor: remove locally caught exceptions --- dist/setup/index.js | 17 ++++++----------- src/installer.ts | 16 ++++++---------- 2 files changed, 12 insertions(+), 21 deletions(-) diff --git a/dist/setup/index.js b/dist/setup/index.js index efd8162a..684be490 100644 --- a/dist/setup/index.js +++ b/dist/setup/index.js @@ -71780,18 +71780,13 @@ function parseNodeVersionFile(contents) { core.warning('Node version file is not JSON file'); } if (!nodeVersion) { - try { - const found = contents.match(/^(?:nodejs\s+)?v?(?[^\s]+)$/m); - nodeVersion = (_c = found === null || found === void 0 ? void 0 : found.groups) === null || _c === void 0 ? void 0 : _c.version; - if (!nodeVersion) - throw new Error(); - } - catch (err) { - // In the case of an unknown format, - // return as is and evaluate the version separately. - nodeVersion = contents.trim(); - } + const found = contents.match(/^(?:nodejs\s+)?v?(?[^\s]+)$/m); + nodeVersion = (_c = found === null || found === void 0 ? void 0 : found.groups) === null || _c === void 0 ? void 0 : _c.version; } + // In the case of an unknown format, + // return as is and evaluate the version separately. + if (!nodeVersion) + nodeVersion = contents.trim(); return nodeVersion; } exports.parseNodeVersionFile = parseNodeVersionFile; diff --git a/src/installer.ts b/src/installer.ts index 1e5e5623..bd9c8423 100644 --- a/src/installer.ts +++ b/src/installer.ts @@ -506,18 +506,14 @@ export function parseNodeVersionFile(contents: string): string { } if (!nodeVersion) { - try { - const found = contents.match(/^(?:nodejs\s+)?v?(?[^\s]+)$/m); - nodeVersion = found?.groups?.version; - - if (!nodeVersion) throw new Error(); - } catch (err) { - // In the case of an unknown format, - // return as is and evaluate the version separately. - nodeVersion = contents.trim(); - } + const found = contents.match(/^(?:nodejs\s+)?v?(?[^\s]+)$/m); + nodeVersion = found?.groups?.version; } + // In the case of an unknown format, + // return as is and evaluate the version separately. + if (!nodeVersion) nodeVersion = contents.trim(); + return nodeVersion as string; } From dbb54d08f2fd255751eb4ada17574f3caee8f5fc Mon Sep 17 00:00:00 2001 From: Evgenii Korolevskii <102794661+e-korolevskii@users.noreply.github.com> Date: Tue, 9 Aug 2022 23:56:01 +0200 Subject: [PATCH 21/32] Update docs/contributors.md Co-authored-by: Ivan <98037481+IvanZosimov@users.noreply.github.com> --- docs/contributors.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/contributors.md b/docs/contributors.md index ecd40ecb..7289d2a7 100644 --- a/docs/contributors.md +++ b/docs/contributors.md @@ -67,7 +67,7 @@ Pull requests are the easiest way to contribute changes to git repos at GitHub. **Learn more about how to implement tests:** Adding or changing tests is an integral part of making a change to the code. -Unit tests are in the `__tests__` folder, and end-to-end tests are in the `workflows` folder (in the particular file [e2e-cache.yml](https://github.com/actions/setup-node/blob/main/.github/workflows/e2e-cache.yml)). +Unit tests are in the `__tests__` folder, and end-to-end tests are in the `workflows` folder, particularly in the [versions.yml](https://github.com/e-korolevskii/setup-node/blob/update-contributors-guide/.github/workflows/versions.yml) and [e2e-cache.yml](https://github.com/actions/setup-node/blob/main/.github/workflows/e2e-cache.yml) files. - The contributor can add various types of tests (like unit tests or end-to-end tests), which, in his opinion, will be necessary and sufficient for testing new or changed functionality - Tests should cover a successful execution, as well as some edge cases and possible errors From 089aa7ea918421102c1e1dfa8ef95982aee6b0af Mon Sep 17 00:00:00 2001 From: Sergey Dolin Date: Fri, 19 Aug 2022 12:21:26 +0200 Subject: [PATCH 22/32] Add caveat for Yarn 2+ and private repos --- docs/advanced-usage.md | 20 ++++++++++++++++++++ 1 file changed, 20 insertions(+) diff --git a/docs/advanced-usage.md b/docs/advanced-usage.md index 0cf12ca4..da5397be 100644 --- a/docs/advanced-usage.md +++ b/docs/advanced-usage.md @@ -247,5 +247,25 @@ steps: # `npm rebuild` will run all those post-install scripts for us. - run: npm rebuild && npm run prepare --if-present ``` +### Yarn2 configuration +Yarn2 ignores both .npmrc and .yarnrc files created by the action, so before installing dependencies from the private repo it is necessary either to create or to modify existing yarnrc.yml file with `yarn config set` commands. +Below there's a sample "Setup .yarnrc.yml" step to configure private github registy for `my-org` organisation. + +```yaml +steps: +- uses: actions/checkout@v3 +- uses: actions/setup-node@v3 + with: + node-version: '14.x' +- name: Setup .yarnrc.yml + run: | + yarn config set npmScopes.my-org.npmRegistryServer "https://npm.pkg.github.com" + yarn config set npmScopes.my-org.npmAlwaysAuth true + yarn config set npmScopes.my-org.npmAuthToken $NPM_AUTH_TOKEN + env: + NPM_AUTH_TOKEN: ${{ secrets.YARN_TOKEN }} +- name: Install dependencies + run: yarn install --immutable +``` NOTE: As per https://github.com/actions/setup-node/issues/49 you cannot use `secrets.GITHUB_TOKEN` to access private GitHub Packages within the same organisation but in a different repository. From 792255d078993de3ffd3641db1e5994ca1cef656 Mon Sep 17 00:00:00 2001 From: Sergey Dolin Date: Mon, 22 Aug 2022 08:20:41 +0200 Subject: [PATCH 23/32] Fix wording --- docs/advanced-usage.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/advanced-usage.md b/docs/advanced-usage.md index da5397be..00f4198d 100644 --- a/docs/advanced-usage.md +++ b/docs/advanced-usage.md @@ -250,7 +250,7 @@ steps: ### Yarn2 configuration Yarn2 ignores both .npmrc and .yarnrc files created by the action, so before installing dependencies from the private repo it is necessary either to create or to modify existing yarnrc.yml file with `yarn config set` commands. -Below there's a sample "Setup .yarnrc.yml" step to configure private github registy for `my-org` organisation. +Below you can find a sample "Setup .yarnrc.yml" step, that is going to allow you to configure a private GitHub registry for 'my-org' organisation. ```yaml steps: From 32f78d9bfa2231a19189276b12cbe92b30c98872 Mon Sep 17 00:00:00 2001 From: Larissa Fortuna <56982181+lkfortuna@users.noreply.github.com> Date: Thu, 25 Aug 2022 15:35:44 -0700 Subject: [PATCH 24/32] Update README.md Updating term "virtual environments" to "runner images" and updating links --- README.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/README.md b/README.md index 6f44fdc6..c3d07869 100644 --- a/README.md +++ b/README.md @@ -31,7 +31,7 @@ The `node-version` input is optional. If not supplied, the node version from PAT The action will first check the local cache for a semver match. If unable to find a specific version in the cache, the action will attempt to download a version of Node.js. It will pull LTS versions from [node-versions releases](https://github.com/actions/node-versions/releases) and on miss or failure will fall back to the previous behavior of downloading directly from [node dist](https://nodejs.org/dist/). -For information regarding locally cached versions of Node.js on GitHub hosted runners, check out [GitHub Actions Virtual Environments](https://github.com/actions/virtual-environments). +For information regarding locally cached versions of Node.js on GitHub hosted runners, check out [GitHub Actions Runner Images](https://github.com/actions/runner-images). ### Supported version syntax @@ -44,7 +44,7 @@ Examples: - NVM LTS syntax: `lts/erbium`, `lts/fermium`, `lts/*`, `lts/-n` - Latest release: `*` or `latest`/`current`/`node` -**Note:** Like the other values, `*` will get the latest [locally-cached Node.js version](https://github.com/actions/virtual-environments/blob/main/images/linux/Ubuntu2004-Readme.md#nodejs), or the latest version from [actions/node-versions](https://github.com/actions/node-versions/blob/main/versions-manifest.json), depending on the [`check-latest`](docs/advanced-usage.md#check-latest-version) input. +**Note:** Like the other values, `*` will get the latest [locally-cached Node.js version](https://github.com/actions/runner-images/blob/main/images/linux/Ubuntu2204-Readme.md#nodejs), or the latest version from [actions/node-versions](https://github.com/actions/node-versions/blob/main/versions-manifest.json), depending on the [`check-latest`](docs/advanced-usage.md#check-latest-version) input. `current`/`latest`/`node` always resolve to the latest [dist version](https://nodejs.org/dist/index.json). That version is then downloaded from actions/node-versions if possible, or directly from Node.js if not. From dc62cc63a810be232b1515126bf96939fb1c4ba4 Mon Sep 17 00:00:00 2001 From: Aleksandr Chebotov Date: Mon, 29 Aug 2022 16:10:46 +0200 Subject: [PATCH 25/32] Update runner link --- docs/advanced-usage.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/advanced-usage.md b/docs/advanced-usage.md index 00f4198d..8af1b242 100644 --- a/docs/advanced-usage.md +++ b/docs/advanced-usage.md @@ -72,7 +72,7 @@ steps: ## Architecture -You can use any of the [supported operating systems](https://docs.github.com/en/actions/reference/virtual-environments-for-github-hosted-runners), and the compatible `architecture` can be selected using `architecture`. Values are `x86`, `x64`, `arm64`, `armv6l`, `armv7l`, `ppc64le`, `s390x` (not all of the architectures are available on all platforms). +You can use any of the [supported operating systems](https://docs.github.com/en/actions/using-github-hosted-runners/about-github-hosted-runners), and the compatible `architecture` can be selected using `architecture`. Values are `x86`, `x64`, `arm64`, `armv6l`, `armv7l`, `ppc64le`, `s390x` (not all of the architectures are available on all platforms). When using `architecture`, `node-version` must be provided as well. ```yaml From 35ba06beb71a75ab56021813e9ee318e7744e373 Mon Sep 17 00:00:00 2001 From: Evgenii Korolevskii Date: Wed, 31 Aug 2022 09:52:01 +0200 Subject: [PATCH 26/32] docs(contributor's guide): Update link --- docs/contributors.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/contributors.md b/docs/contributors.md index 7289d2a7..722ded67 100644 --- a/docs/contributors.md +++ b/docs/contributors.md @@ -67,7 +67,7 @@ Pull requests are the easiest way to contribute changes to git repos at GitHub. **Learn more about how to implement tests:** Adding or changing tests is an integral part of making a change to the code. -Unit tests are in the `__tests__` folder, and end-to-end tests are in the `workflows` folder, particularly in the [versions.yml](https://github.com/e-korolevskii/setup-node/blob/update-contributors-guide/.github/workflows/versions.yml) and [e2e-cache.yml](https://github.com/actions/setup-node/blob/main/.github/workflows/e2e-cache.yml) files. +Unit tests are in the `__tests__` folder, and end-to-end tests are in the `workflows` folder, particularly in the [versions.yml](https://github.com/actions/setup-node/blob/main/.github/workflows/versions.yml) and [e2e-cache.yml](https://github.com/actions/setup-node/blob/main/.github/workflows/e2e-cache.yml) files. - The contributor can add various types of tests (like unit tests or end-to-end tests), which, in his opinion, will be necessary and sufficient for testing new or changed functionality - Tests should cover a successful execution, as well as some edge cases and possible errors From b4b18e5317cee56876918c4f099a680d3bca1cb8 Mon Sep 17 00:00:00 2001 From: Milos Pantic <101411245+panticmilos@users.noreply.github.com> Date: Mon, 5 Sep 2022 13:22:56 +0200 Subject: [PATCH 27/32] Update package json version (#570) --- package-lock.json | 4 ++-- package.json | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/package-lock.json b/package-lock.json index 9c7c87b3..5963fa8f 100644 --- a/package-lock.json +++ b/package-lock.json @@ -1,12 +1,12 @@ { "name": "setup-node", - "version": "3.1.1", + "version": "3.4.1", "lockfileVersion": 2, "requires": true, "packages": { "": { "name": "setup-node", - "version": "3.1.1", + "version": "3.4.1", "license": "MIT", "dependencies": { "@actions/cache": "^3.0.0", diff --git a/package.json b/package.json index 10595093..ee15cc8c 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "setup-node", - "version": "3.1.1", + "version": "3.4.1", "private": true, "description": "setup node action", "main": "lib/setup-node.js", From c8f0d105851628358c640b5e1ba0feb8990cab50 Mon Sep 17 00:00:00 2001 From: Shude Li Date: Thu, 8 Sep 2022 20:53:41 +0800 Subject: [PATCH 28/32] upgrade `@action/cache` to 3.0.4 to fix stuck issue (#573) --- .licenses/npm/@actions/cache.dep.yml | 2 +- dist/cache-save/index.js | 128 +++++++++++++++++---------- dist/setup/index.js | 128 +++++++++++++++++---------- package-lock.json | 14 +-- package.json | 2 +- 5 files changed, 171 insertions(+), 103 deletions(-) diff --git a/.licenses/npm/@actions/cache.dep.yml b/.licenses/npm/@actions/cache.dep.yml index c3c245ef..d9a80f6b 100644 --- a/.licenses/npm/@actions/cache.dep.yml +++ b/.licenses/npm/@actions/cache.dep.yml @@ -1,6 +1,6 @@ --- name: "@actions/cache" -version: 3.0.0 +version: 3.0.4 type: npm summary: Actions cache lib homepage: https://github.com/actions/toolkit/tree/main/packages/cache diff --git a/dist/cache-save/index.js b/dist/cache-save/index.js index a79693b2..7ab6a4b1 100644 --- a/dist/cache-save/index.js +++ b/dist/cache-save/index.js @@ -525,7 +525,13 @@ function resolvePaths(patterns) { .replace(new RegExp(`\\${path.sep}`, 'g'), '/'); core.debug(`Matched: ${relativeFile}`); // Paths are made relative so the tar entries are all relative to the root of the workspace. - paths.push(`${relativeFile}`); + if (relativeFile === '') { + // path.relative returns empty string if workspace and file are equal + paths.push('.'); + } + else { + paths.push(`${relativeFile}`); + } } } catch (e_1_1) { e_1 = { error: e_1_1 }; } @@ -683,6 +689,7 @@ const util = __importStar(__nccwpck_require__(3837)); const utils = __importStar(__nccwpck_require__(1518)); const constants_1 = __nccwpck_require__(8840); const requestUtils_1 = __nccwpck_require__(3981); +const abort_controller_1 = __nccwpck_require__(2557); /** * Pipes the body of a HTTP response to a stream * @@ -866,15 +873,24 @@ function downloadCacheStorageSDK(archiveLocation, archivePath, options) { const fd = fs.openSync(archivePath, 'w'); try { downloadProgress.startDisplayTimer(); + const controller = new abort_controller_1.AbortController(); + const abortSignal = controller.signal; while (!downloadProgress.isDone()) { const segmentStart = downloadProgress.segmentOffset + downloadProgress.segmentSize; const segmentSize = Math.min(maxSegmentSize, contentLength - segmentStart); downloadProgress.nextSegment(segmentSize); - const result = yield client.downloadToBuffer(segmentStart, segmentSize, { + const result = yield promiseWithTimeout(options.segmentTimeoutInMs || 3600000, client.downloadToBuffer(segmentStart, segmentSize, { + abortSignal, concurrency: options.downloadConcurrency, onProgress: downloadProgress.onProgress() - }); - fs.writeFileSync(fd, result); + })); + if (result === 'timeout') { + controller.abort(); + throw new Error('Aborting cache download as the download time exceeded the timeout.'); + } + else if (Buffer.isBuffer(result)) { + fs.writeFileSync(fd, result); + } } } finally { @@ -885,6 +901,16 @@ function downloadCacheStorageSDK(archiveLocation, archivePath, options) { }); } exports.downloadCacheStorageSDK = downloadCacheStorageSDK; +const promiseWithTimeout = (timeoutMs, promise) => __awaiter(void 0, void 0, void 0, function* () { + let timeoutHandle; + const timeoutPromise = new Promise(resolve => { + timeoutHandle = setTimeout(() => resolve('timeout'), timeoutMs); + }); + return Promise.race([promise, timeoutPromise]).then(result => { + clearTimeout(timeoutHandle); + return result; + }); +}); //# sourceMappingURL=downloadUtils.js.map /***/ }), @@ -1044,6 +1070,7 @@ const fs_1 = __nccwpck_require__(7147); const path = __importStar(__nccwpck_require__(1017)); const utils = __importStar(__nccwpck_require__(1518)); const constants_1 = __nccwpck_require__(8840); +const IS_WINDOWS = process.platform === 'win32'; function getTarPath(args, compressionMethod) { return __awaiter(this, void 0, void 0, function* () { switch (process.platform) { @@ -1091,26 +1118,43 @@ function getWorkingDirectory() { var _a; return (_a = process.env['GITHUB_WORKSPACE']) !== null && _a !== void 0 ? _a : process.cwd(); } +// Common function for extractTar and listTar to get the compression method +function getCompressionProgram(compressionMethod) { + // -d: Decompress. + // unzstd is equivalent to 'zstd -d' + // --long=#: Enables long distance matching with # bits. Maximum is 30 (1GB) on 32-bit OS and 31 (2GB) on 64-bit. + // Using 30 here because we also support 32-bit self-hosted runners. + switch (compressionMethod) { + case constants_1.CompressionMethod.Zstd: + return [ + '--use-compress-program', + IS_WINDOWS ? 'zstd -d --long=30' : 'unzstd --long=30' + ]; + case constants_1.CompressionMethod.ZstdWithoutLong: + return ['--use-compress-program', IS_WINDOWS ? 'zstd -d' : 'unzstd']; + default: + return ['-z']; + } +} +function listTar(archivePath, compressionMethod) { + return __awaiter(this, void 0, void 0, function* () { + const args = [ + ...getCompressionProgram(compressionMethod), + '-tf', + archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), + '-P' + ]; + yield execTar(args, compressionMethod); + }); +} +exports.listTar = listTar; function extractTar(archivePath, compressionMethod) { return __awaiter(this, void 0, void 0, function* () { // Create directory to extract tar into const workingDirectory = getWorkingDirectory(); yield io.mkdirP(workingDirectory); - // --d: Decompress. - // --long=#: Enables long distance matching with # bits. Maximum is 30 (1GB) on 32-bit OS and 31 (2GB) on 64-bit. - // Using 30 here because we also support 32-bit self-hosted runners. - function getCompressionProgram() { - switch (compressionMethod) { - case constants_1.CompressionMethod.Zstd: - return ['--use-compress-program', 'zstd -d --long=30']; - case constants_1.CompressionMethod.ZstdWithoutLong: - return ['--use-compress-program', 'zstd -d']; - default: - return ['-z']; - } - } const args = [ - ...getCompressionProgram(), + ...getCompressionProgram(compressionMethod), '-xf', archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), '-P', @@ -1129,15 +1173,19 @@ function createTar(archiveFolder, sourceDirectories, compressionMethod) { fs_1.writeFileSync(path.join(archiveFolder, manifestFilename), sourceDirectories.join('\n')); const workingDirectory = getWorkingDirectory(); // -T#: Compress using # working thread. If # is 0, attempt to detect and use the number of physical CPU cores. + // zstdmt is equivalent to 'zstd -T0' // --long=#: Enables long distance matching with # bits. Maximum is 30 (1GB) on 32-bit OS and 31 (2GB) on 64-bit. // Using 30 here because we also support 32-bit self-hosted runners. // Long range mode is added to zstd in v1.3.2 release, so we will not use --long in older version of zstd. function getCompressionProgram() { switch (compressionMethod) { case constants_1.CompressionMethod.Zstd: - return ['--use-compress-program', 'zstd -T0 --long=30']; + return [ + '--use-compress-program', + IS_WINDOWS ? 'zstd -T0 --long=30' : 'zstdmt --long=30' + ]; case constants_1.CompressionMethod.ZstdWithoutLong: - return ['--use-compress-program', 'zstd -T0']; + return ['--use-compress-program', IS_WINDOWS ? 'zstd -T0' : 'zstdmt']; default: return ['-z']; } @@ -1159,32 +1207,6 @@ function createTar(archiveFolder, sourceDirectories, compressionMethod) { }); } exports.createTar = createTar; -function listTar(archivePath, compressionMethod) { - return __awaiter(this, void 0, void 0, function* () { - // --d: Decompress. - // --long=#: Enables long distance matching with # bits. - // Maximum is 30 (1GB) on 32-bit OS and 31 (2GB) on 64-bit. - // Using 30 here because we also support 32-bit self-hosted runners. - function getCompressionProgram() { - switch (compressionMethod) { - case constants_1.CompressionMethod.Zstd: - return ['--use-compress-program', 'zstd -d --long=30']; - case constants_1.CompressionMethod.ZstdWithoutLong: - return ['--use-compress-program', 'zstd -d']; - default: - return ['-z']; - } - } - const args = [ - ...getCompressionProgram(), - '-tf', - archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), - '-P' - ]; - yield execTar(args, compressionMethod); - }); -} -exports.listTar = listTar; //# sourceMappingURL=tar.js.map /***/ }), @@ -1235,7 +1257,8 @@ function getDownloadOptions(copy) { const result = { useAzureSdk: true, downloadConcurrency: 8, - timeoutInMs: 30000 + timeoutInMs: 30000, + segmentTimeoutInMs: 3600000 }; if (copy) { if (typeof copy.useAzureSdk === 'boolean') { @@ -1247,10 +1270,21 @@ function getDownloadOptions(copy) { if (typeof copy.timeoutInMs === 'number') { result.timeoutInMs = copy.timeoutInMs; } + if (typeof copy.segmentTimeoutInMs === 'number') { + result.segmentTimeoutInMs = copy.segmentTimeoutInMs; + } + } + const segmentDownloadTimeoutMins = process.env['SEGMENT_DOWNLOAD_TIMEOUT_MINS']; + if (segmentDownloadTimeoutMins && + !isNaN(Number(segmentDownloadTimeoutMins)) && + isFinite(Number(segmentDownloadTimeoutMins))) { + result.segmentTimeoutInMs = Number(segmentDownloadTimeoutMins) * 60 * 1000; } core.debug(`Use Azure SDK: ${result.useAzureSdk}`); core.debug(`Download concurrency: ${result.downloadConcurrency}`); core.debug(`Request timeout (ms): ${result.timeoutInMs}`); + core.debug(`Cache segment download timeout mins env var: ${process.env['SEGMENT_DOWNLOAD_TIMEOUT_MINS']}`); + core.debug(`Segment download timeout (ms): ${result.segmentTimeoutInMs}`); return result; } exports.getDownloadOptions = getDownloadOptions; diff --git a/dist/setup/index.js b/dist/setup/index.js index 0407c1ce..90f833a8 100644 --- a/dist/setup/index.js +++ b/dist/setup/index.js @@ -525,7 +525,13 @@ function resolvePaths(patterns) { .replace(new RegExp(`\\${path.sep}`, 'g'), '/'); core.debug(`Matched: ${relativeFile}`); // Paths are made relative so the tar entries are all relative to the root of the workspace. - paths.push(`${relativeFile}`); + if (relativeFile === '') { + // path.relative returns empty string if workspace and file are equal + paths.push('.'); + } + else { + paths.push(`${relativeFile}`); + } } } catch (e_1_1) { e_1 = { error: e_1_1 }; } @@ -683,6 +689,7 @@ const util = __importStar(__nccwpck_require__(3837)); const utils = __importStar(__nccwpck_require__(1518)); const constants_1 = __nccwpck_require__(8840); const requestUtils_1 = __nccwpck_require__(3981); +const abort_controller_1 = __nccwpck_require__(2557); /** * Pipes the body of a HTTP response to a stream * @@ -866,15 +873,24 @@ function downloadCacheStorageSDK(archiveLocation, archivePath, options) { const fd = fs.openSync(archivePath, 'w'); try { downloadProgress.startDisplayTimer(); + const controller = new abort_controller_1.AbortController(); + const abortSignal = controller.signal; while (!downloadProgress.isDone()) { const segmentStart = downloadProgress.segmentOffset + downloadProgress.segmentSize; const segmentSize = Math.min(maxSegmentSize, contentLength - segmentStart); downloadProgress.nextSegment(segmentSize); - const result = yield client.downloadToBuffer(segmentStart, segmentSize, { + const result = yield promiseWithTimeout(options.segmentTimeoutInMs || 3600000, client.downloadToBuffer(segmentStart, segmentSize, { + abortSignal, concurrency: options.downloadConcurrency, onProgress: downloadProgress.onProgress() - }); - fs.writeFileSync(fd, result); + })); + if (result === 'timeout') { + controller.abort(); + throw new Error('Aborting cache download as the download time exceeded the timeout.'); + } + else if (Buffer.isBuffer(result)) { + fs.writeFileSync(fd, result); + } } } finally { @@ -885,6 +901,16 @@ function downloadCacheStorageSDK(archiveLocation, archivePath, options) { }); } exports.downloadCacheStorageSDK = downloadCacheStorageSDK; +const promiseWithTimeout = (timeoutMs, promise) => __awaiter(void 0, void 0, void 0, function* () { + let timeoutHandle; + const timeoutPromise = new Promise(resolve => { + timeoutHandle = setTimeout(() => resolve('timeout'), timeoutMs); + }); + return Promise.race([promise, timeoutPromise]).then(result => { + clearTimeout(timeoutHandle); + return result; + }); +}); //# sourceMappingURL=downloadUtils.js.map /***/ }), @@ -1044,6 +1070,7 @@ const fs_1 = __nccwpck_require__(7147); const path = __importStar(__nccwpck_require__(1017)); const utils = __importStar(__nccwpck_require__(1518)); const constants_1 = __nccwpck_require__(8840); +const IS_WINDOWS = process.platform === 'win32'; function getTarPath(args, compressionMethod) { return __awaiter(this, void 0, void 0, function* () { switch (process.platform) { @@ -1091,26 +1118,43 @@ function getWorkingDirectory() { var _a; return (_a = process.env['GITHUB_WORKSPACE']) !== null && _a !== void 0 ? _a : process.cwd(); } +// Common function for extractTar and listTar to get the compression method +function getCompressionProgram(compressionMethod) { + // -d: Decompress. + // unzstd is equivalent to 'zstd -d' + // --long=#: Enables long distance matching with # bits. Maximum is 30 (1GB) on 32-bit OS and 31 (2GB) on 64-bit. + // Using 30 here because we also support 32-bit self-hosted runners. + switch (compressionMethod) { + case constants_1.CompressionMethod.Zstd: + return [ + '--use-compress-program', + IS_WINDOWS ? 'zstd -d --long=30' : 'unzstd --long=30' + ]; + case constants_1.CompressionMethod.ZstdWithoutLong: + return ['--use-compress-program', IS_WINDOWS ? 'zstd -d' : 'unzstd']; + default: + return ['-z']; + } +} +function listTar(archivePath, compressionMethod) { + return __awaiter(this, void 0, void 0, function* () { + const args = [ + ...getCompressionProgram(compressionMethod), + '-tf', + archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), + '-P' + ]; + yield execTar(args, compressionMethod); + }); +} +exports.listTar = listTar; function extractTar(archivePath, compressionMethod) { return __awaiter(this, void 0, void 0, function* () { // Create directory to extract tar into const workingDirectory = getWorkingDirectory(); yield io.mkdirP(workingDirectory); - // --d: Decompress. - // --long=#: Enables long distance matching with # bits. Maximum is 30 (1GB) on 32-bit OS and 31 (2GB) on 64-bit. - // Using 30 here because we also support 32-bit self-hosted runners. - function getCompressionProgram() { - switch (compressionMethod) { - case constants_1.CompressionMethod.Zstd: - return ['--use-compress-program', 'zstd -d --long=30']; - case constants_1.CompressionMethod.ZstdWithoutLong: - return ['--use-compress-program', 'zstd -d']; - default: - return ['-z']; - } - } const args = [ - ...getCompressionProgram(), + ...getCompressionProgram(compressionMethod), '-xf', archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), '-P', @@ -1129,15 +1173,19 @@ function createTar(archiveFolder, sourceDirectories, compressionMethod) { fs_1.writeFileSync(path.join(archiveFolder, manifestFilename), sourceDirectories.join('\n')); const workingDirectory = getWorkingDirectory(); // -T#: Compress using # working thread. If # is 0, attempt to detect and use the number of physical CPU cores. + // zstdmt is equivalent to 'zstd -T0' // --long=#: Enables long distance matching with # bits. Maximum is 30 (1GB) on 32-bit OS and 31 (2GB) on 64-bit. // Using 30 here because we also support 32-bit self-hosted runners. // Long range mode is added to zstd in v1.3.2 release, so we will not use --long in older version of zstd. function getCompressionProgram() { switch (compressionMethod) { case constants_1.CompressionMethod.Zstd: - return ['--use-compress-program', 'zstd -T0 --long=30']; + return [ + '--use-compress-program', + IS_WINDOWS ? 'zstd -T0 --long=30' : 'zstdmt --long=30' + ]; case constants_1.CompressionMethod.ZstdWithoutLong: - return ['--use-compress-program', 'zstd -T0']; + return ['--use-compress-program', IS_WINDOWS ? 'zstd -T0' : 'zstdmt']; default: return ['-z']; } @@ -1159,32 +1207,6 @@ function createTar(archiveFolder, sourceDirectories, compressionMethod) { }); } exports.createTar = createTar; -function listTar(archivePath, compressionMethod) { - return __awaiter(this, void 0, void 0, function* () { - // --d: Decompress. - // --long=#: Enables long distance matching with # bits. - // Maximum is 30 (1GB) on 32-bit OS and 31 (2GB) on 64-bit. - // Using 30 here because we also support 32-bit self-hosted runners. - function getCompressionProgram() { - switch (compressionMethod) { - case constants_1.CompressionMethod.Zstd: - return ['--use-compress-program', 'zstd -d --long=30']; - case constants_1.CompressionMethod.ZstdWithoutLong: - return ['--use-compress-program', 'zstd -d']; - default: - return ['-z']; - } - } - const args = [ - ...getCompressionProgram(), - '-tf', - archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), - '-P' - ]; - yield execTar(args, compressionMethod); - }); -} -exports.listTar = listTar; //# sourceMappingURL=tar.js.map /***/ }), @@ -1235,7 +1257,8 @@ function getDownloadOptions(copy) { const result = { useAzureSdk: true, downloadConcurrency: 8, - timeoutInMs: 30000 + timeoutInMs: 30000, + segmentTimeoutInMs: 3600000 }; if (copy) { if (typeof copy.useAzureSdk === 'boolean') { @@ -1247,10 +1270,21 @@ function getDownloadOptions(copy) { if (typeof copy.timeoutInMs === 'number') { result.timeoutInMs = copy.timeoutInMs; } + if (typeof copy.segmentTimeoutInMs === 'number') { + result.segmentTimeoutInMs = copy.segmentTimeoutInMs; + } + } + const segmentDownloadTimeoutMins = process.env['SEGMENT_DOWNLOAD_TIMEOUT_MINS']; + if (segmentDownloadTimeoutMins && + !isNaN(Number(segmentDownloadTimeoutMins)) && + isFinite(Number(segmentDownloadTimeoutMins))) { + result.segmentTimeoutInMs = Number(segmentDownloadTimeoutMins) * 60 * 1000; } core.debug(`Use Azure SDK: ${result.useAzureSdk}`); core.debug(`Download concurrency: ${result.downloadConcurrency}`); core.debug(`Request timeout (ms): ${result.timeoutInMs}`); + core.debug(`Cache segment download timeout mins env var: ${process.env['SEGMENT_DOWNLOAD_TIMEOUT_MINS']}`); + core.debug(`Segment download timeout (ms): ${result.segmentTimeoutInMs}`); return result; } exports.getDownloadOptions = getDownloadOptions; diff --git a/package-lock.json b/package-lock.json index 5963fa8f..2409f72e 100644 --- a/package-lock.json +++ b/package-lock.json @@ -9,7 +9,7 @@ "version": "3.4.1", "license": "MIT", "dependencies": { - "@actions/cache": "^3.0.0", + "@actions/cache": "^3.0.4", "@actions/core": "^1.6.0", "@actions/exec": "^1.1.0", "@actions/github": "^1.1.0", @@ -32,9 +32,9 @@ } }, "node_modules/@actions/cache": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/@actions/cache/-/cache-3.0.0.tgz", - "integrity": "sha512-GL9CT1Fnu+pqs8TTB621q8Xa8Cilw2n9MwvbgMedetH7L1q2n6jY61gzbwGbKgtVbp3gVJ12aNMi4osSGXx3KQ==", + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/@actions/cache/-/cache-3.0.4.tgz", + "integrity": "sha512-9RwVL8/ISJoYWFNH1wR/C26E+M3HDkGPWmbFJMMCKwTkjbNZJreMT4XaR/EB1bheIvN4PREQxEQQVJ18IPnf/Q==", "dependencies": { "@actions/core": "^1.2.6", "@actions/exec": "^1.0.1", @@ -5087,9 +5087,9 @@ }, "dependencies": { "@actions/cache": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/@actions/cache/-/cache-3.0.0.tgz", - "integrity": "sha512-GL9CT1Fnu+pqs8TTB621q8Xa8Cilw2n9MwvbgMedetH7L1q2n6jY61gzbwGbKgtVbp3gVJ12aNMi4osSGXx3KQ==", + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/@actions/cache/-/cache-3.0.4.tgz", + "integrity": "sha512-9RwVL8/ISJoYWFNH1wR/C26E+M3HDkGPWmbFJMMCKwTkjbNZJreMT4XaR/EB1bheIvN4PREQxEQQVJ18IPnf/Q==", "requires": { "@actions/core": "^1.2.6", "@actions/exec": "^1.0.1", diff --git a/package.json b/package.json index ee15cc8c..38469c30 100644 --- a/package.json +++ b/package.json @@ -23,7 +23,7 @@ "author": "GitHub", "license": "MIT", "dependencies": { - "@actions/cache": "^3.0.0", + "@actions/cache": "^3.0.4", "@actions/core": "^1.6.0", "@actions/exec": "^1.1.0", "@actions/github": "^1.1.0", From 30f0e7dc5aa0e546410a28dc80a10d8114ee873e Mon Sep 17 00:00:00 2001 From: Dmitry Shibanov Date: Thu, 8 Sep 2022 15:15:16 +0200 Subject: [PATCH 29/32] Update @actions/core to 1.9.1 (#574) --- .licenses/npm/@actions/core.dep.yml | 2 +- dist/cache-save/index.js | 2476 +++++++++++++++++++-------- dist/setup/index.js | 1866 +++++++++++++++++++- package-lock.json | 49 +- 4 files changed, 3636 insertions(+), 757 deletions(-) diff --git a/.licenses/npm/@actions/core.dep.yml b/.licenses/npm/@actions/core.dep.yml index 43cedcd2..2e0762e4 100644 --- a/.licenses/npm/@actions/core.dep.yml +++ b/.licenses/npm/@actions/core.dep.yml @@ -1,6 +1,6 @@ --- name: "@actions/core" -version: 1.6.0 +version: 1.9.1 type: npm summary: Actions core lib homepage: https://github.com/actions/toolkit/tree/main/packages/core diff --git a/dist/cache-save/index.js b/dist/cache-save/index.js index 7ab6a4b1..4bac2864 100644 --- a/dist/cache-save/index.js +++ b/dist/cache-save/index.js @@ -3477,6 +3477,7 @@ const file_command_1 = __nccwpck_require__(717); const utils_1 = __nccwpck_require__(5278); const os = __importStar(__nccwpck_require__(2037)); const path = __importStar(__nccwpck_require__(1017)); +const uuid_1 = __nccwpck_require__(8974); const oidc_utils_1 = __nccwpck_require__(8041); /** * The code to exit an action @@ -3506,7 +3507,14 @@ function exportVariable(name, val) { process.env[name] = convertedVal; const filePath = process.env['GITHUB_ENV'] || ''; if (filePath) { - const delimiter = '_GitHubActionsFileCommandDelimeter_'; + const delimiter = `ghadelimiter_${uuid_1.v4()}`; + // These should realistically never happen, but just in case someone finds a way to exploit uuid generation let's not allow keys or values that contain the delimiter. + if (name.includes(delimiter)) { + throw new Error(`Unexpected input: name should not contain the delimiter "${delimiter}"`); + } + if (convertedVal.includes(delimiter)) { + throw new Error(`Unexpected input: value should not contain the delimiter "${delimiter}"`); + } const commandValue = `${name}<<${delimiter}${os.EOL}${convertedVal}${os.EOL}${delimiter}`; file_command_1.issueCommand('ENV', commandValue); } @@ -3752,6 +3760,23 @@ function getIDToken(aud) { }); } exports.getIDToken = getIDToken; +/** + * Summary exports + */ +var summary_1 = __nccwpck_require__(1327); +Object.defineProperty(exports, "summary", ({ enumerable: true, get: function () { return summary_1.summary; } })); +/** + * @deprecated use core.summary + */ +var summary_2 = __nccwpck_require__(1327); +Object.defineProperty(exports, "markdownSummary", ({ enumerable: true, get: function () { return summary_2.markdownSummary; } })); +/** + * Path exports + */ +var path_utils_1 = __nccwpck_require__(2981); +Object.defineProperty(exports, "toPosixPath", ({ enumerable: true, get: function () { return path_utils_1.toPosixPath; } })); +Object.defineProperty(exports, "toWin32Path", ({ enumerable: true, get: function () { return path_utils_1.toWin32Path; } })); +Object.defineProperty(exports, "toPlatformPath", ({ enumerable: true, get: function () { return path_utils_1.toPlatformPath; } })); //# sourceMappingURL=core.js.map /***/ }), @@ -3821,8 +3846,8 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge }; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.OidcClient = void 0; -const http_client_1 = __nccwpck_require__(9925); -const auth_1 = __nccwpck_require__(3702); +const http_client_1 = __nccwpck_require__(1404); +const auth_1 = __nccwpck_require__(6758); const core_1 = __nccwpck_require__(2186); class OidcClient { static createHttpClient(allowRetry = true, maxRetry = 10) { @@ -3889,6 +3914,361 @@ exports.OidcClient = OidcClient; /***/ }), +/***/ 2981: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.toPlatformPath = exports.toWin32Path = exports.toPosixPath = void 0; +const path = __importStar(__nccwpck_require__(1017)); +/** + * toPosixPath converts the given path to the posix form. On Windows, \\ will be + * replaced with /. + * + * @param pth. Path to transform. + * @return string Posix path. + */ +function toPosixPath(pth) { + return pth.replace(/[\\]/g, '/'); +} +exports.toPosixPath = toPosixPath; +/** + * toWin32Path converts the given path to the win32 form. On Linux, / will be + * replaced with \\. + * + * @param pth. Path to transform. + * @return string Win32 path. + */ +function toWin32Path(pth) { + return pth.replace(/[/]/g, '\\'); +} +exports.toWin32Path = toWin32Path; +/** + * toPlatformPath converts the given path to a platform-specific path. It does + * this by replacing instances of / and \ with the platform-specific path + * separator. + * + * @param pth The path to platformize. + * @return string The platform-specific path. + */ +function toPlatformPath(pth) { + return pth.replace(/[/\\]/g, path.sep); +} +exports.toPlatformPath = toPlatformPath; +//# sourceMappingURL=path-utils.js.map + +/***/ }), + +/***/ 1327: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.summary = exports.markdownSummary = exports.SUMMARY_DOCS_URL = exports.SUMMARY_ENV_VAR = void 0; +const os_1 = __nccwpck_require__(2037); +const fs_1 = __nccwpck_require__(7147); +const { access, appendFile, writeFile } = fs_1.promises; +exports.SUMMARY_ENV_VAR = 'GITHUB_STEP_SUMMARY'; +exports.SUMMARY_DOCS_URL = 'https://docs.github.com/actions/using-workflows/workflow-commands-for-github-actions#adding-a-job-summary'; +class Summary { + constructor() { + this._buffer = ''; + } + /** + * Finds the summary file path from the environment, rejects if env var is not found or file does not exist + * Also checks r/w permissions. + * + * @returns step summary file path + */ + filePath() { + return __awaiter(this, void 0, void 0, function* () { + if (this._filePath) { + return this._filePath; + } + const pathFromEnv = process.env[exports.SUMMARY_ENV_VAR]; + if (!pathFromEnv) { + throw new Error(`Unable to find environment variable for $${exports.SUMMARY_ENV_VAR}. Check if your runtime environment supports job summaries.`); + } + try { + yield access(pathFromEnv, fs_1.constants.R_OK | fs_1.constants.W_OK); + } + catch (_a) { + throw new Error(`Unable to access summary file: '${pathFromEnv}'. Check if the file has correct read/write permissions.`); + } + this._filePath = pathFromEnv; + return this._filePath; + }); + } + /** + * Wraps content in an HTML tag, adding any HTML attributes + * + * @param {string} tag HTML tag to wrap + * @param {string | null} content content within the tag + * @param {[attribute: string]: string} attrs key-value list of HTML attributes to add + * + * @returns {string} content wrapped in HTML element + */ + wrap(tag, content, attrs = {}) { + const htmlAttrs = Object.entries(attrs) + .map(([key, value]) => ` ${key}="${value}"`) + .join(''); + if (!content) { + return `<${tag}${htmlAttrs}>`; + } + return `<${tag}${htmlAttrs}>${content}`; + } + /** + * Writes text in the buffer to the summary buffer file and empties buffer. Will append by default. + * + * @param {SummaryWriteOptions} [options] (optional) options for write operation + * + * @returns {Promise} summary instance + */ + write(options) { + return __awaiter(this, void 0, void 0, function* () { + const overwrite = !!(options === null || options === void 0 ? void 0 : options.overwrite); + const filePath = yield this.filePath(); + const writeFunc = overwrite ? writeFile : appendFile; + yield writeFunc(filePath, this._buffer, { encoding: 'utf8' }); + return this.emptyBuffer(); + }); + } + /** + * Clears the summary buffer and wipes the summary file + * + * @returns {Summary} summary instance + */ + clear() { + return __awaiter(this, void 0, void 0, function* () { + return this.emptyBuffer().write({ overwrite: true }); + }); + } + /** + * Returns the current summary buffer as a string + * + * @returns {string} string of summary buffer + */ + stringify() { + return this._buffer; + } + /** + * If the summary buffer is empty + * + * @returns {boolen} true if the buffer is empty + */ + isEmptyBuffer() { + return this._buffer.length === 0; + } + /** + * Resets the summary buffer without writing to summary file + * + * @returns {Summary} summary instance + */ + emptyBuffer() { + this._buffer = ''; + return this; + } + /** + * Adds raw text to the summary buffer + * + * @param {string} text content to add + * @param {boolean} [addEOL=false] (optional) append an EOL to the raw text (default: false) + * + * @returns {Summary} summary instance + */ + addRaw(text, addEOL = false) { + this._buffer += text; + return addEOL ? this.addEOL() : this; + } + /** + * Adds the operating system-specific end-of-line marker to the buffer + * + * @returns {Summary} summary instance + */ + addEOL() { + return this.addRaw(os_1.EOL); + } + /** + * Adds an HTML codeblock to the summary buffer + * + * @param {string} code content to render within fenced code block + * @param {string} lang (optional) language to syntax highlight code + * + * @returns {Summary} summary instance + */ + addCodeBlock(code, lang) { + const attrs = Object.assign({}, (lang && { lang })); + const element = this.wrap('pre', this.wrap('code', code), attrs); + return this.addRaw(element).addEOL(); + } + /** + * Adds an HTML list to the summary buffer + * + * @param {string[]} items list of items to render + * @param {boolean} [ordered=false] (optional) if the rendered list should be ordered or not (default: false) + * + * @returns {Summary} summary instance + */ + addList(items, ordered = false) { + const tag = ordered ? 'ol' : 'ul'; + const listItems = items.map(item => this.wrap('li', item)).join(''); + const element = this.wrap(tag, listItems); + return this.addRaw(element).addEOL(); + } + /** + * Adds an HTML table to the summary buffer + * + * @param {SummaryTableCell[]} rows table rows + * + * @returns {Summary} summary instance + */ + addTable(rows) { + const tableBody = rows + .map(row => { + const cells = row + .map(cell => { + if (typeof cell === 'string') { + return this.wrap('td', cell); + } + const { header, data, colspan, rowspan } = cell; + const tag = header ? 'th' : 'td'; + const attrs = Object.assign(Object.assign({}, (colspan && { colspan })), (rowspan && { rowspan })); + return this.wrap(tag, data, attrs); + }) + .join(''); + return this.wrap('tr', cells); + }) + .join(''); + const element = this.wrap('table', tableBody); + return this.addRaw(element).addEOL(); + } + /** + * Adds a collapsable HTML details element to the summary buffer + * + * @param {string} label text for the closed state + * @param {string} content collapsable content + * + * @returns {Summary} summary instance + */ + addDetails(label, content) { + const element = this.wrap('details', this.wrap('summary', label) + content); + return this.addRaw(element).addEOL(); + } + /** + * Adds an HTML image tag to the summary buffer + * + * @param {string} src path to the image you to embed + * @param {string} alt text description of the image + * @param {SummaryImageOptions} options (optional) addition image attributes + * + * @returns {Summary} summary instance + */ + addImage(src, alt, options) { + const { width, height } = options || {}; + const attrs = Object.assign(Object.assign({}, (width && { width })), (height && { height })); + const element = this.wrap('img', null, Object.assign({ src, alt }, attrs)); + return this.addRaw(element).addEOL(); + } + /** + * Adds an HTML section heading element + * + * @param {string} text heading text + * @param {number | string} [level=1] (optional) the heading level, default: 1 + * + * @returns {Summary} summary instance + */ + addHeading(text, level) { + const tag = `h${level}`; + const allowedTag = ['h1', 'h2', 'h3', 'h4', 'h5', 'h6'].includes(tag) + ? tag + : 'h1'; + const element = this.wrap(allowedTag, text); + return this.addRaw(element).addEOL(); + } + /** + * Adds an HTML thematic break (
) to the summary buffer + * + * @returns {Summary} summary instance + */ + addSeparator() { + const element = this.wrap('hr', null); + return this.addRaw(element).addEOL(); + } + /** + * Adds an HTML line break (
) to the summary buffer + * + * @returns {Summary} summary instance + */ + addBreak() { + const element = this.wrap('br', null); + return this.addRaw(element).addEOL(); + } + /** + * Adds an HTML blockquote to the summary buffer + * + * @param {string} text quote text + * @param {string} cite (optional) citation url + * + * @returns {Summary} summary instance + */ + addQuote(text, cite) { + const attrs = Object.assign({}, (cite && { cite })); + const element = this.wrap('blockquote', text, attrs); + return this.addRaw(element).addEOL(); + } + /** + * Adds an HTML anchor tag to the summary buffer + * + * @param {string} text link text/content + * @param {string} href hyperlink + * + * @returns {Summary} summary instance + */ + addLink(text, href) { + const element = this.wrap('a', text, { href }); + return this.addRaw(element).addEOL(); + } +} +const _summary = new Summary(); +/** + * @deprecated use `core.summary` + */ +exports.markdownSummary = _summary; +exports.summary = _summary; +//# sourceMappingURL=summary.js.map + +/***/ }), + /***/ 5278: /***/ ((__unused_webpack_module, exports) => { @@ -3936,6 +4316,1420 @@ exports.toCommandProperties = toCommandProperties; /***/ }), +/***/ 6758: +/***/ (function(__unused_webpack_module, exports) { + +"use strict"; + +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.PersonalAccessTokenCredentialHandler = exports.BearerCredentialHandler = exports.BasicCredentialHandler = void 0; +class BasicCredentialHandler { + constructor(username, password) { + this.username = username; + this.password = password; + } + prepareRequest(options) { + if (!options.headers) { + throw Error('The request has no headers'); + } + options.headers['Authorization'] = `Basic ${Buffer.from(`${this.username}:${this.password}`).toString('base64')}`; + } + // This handler cannot handle 401 + canHandleAuthentication() { + return false; + } + handleAuthentication() { + return __awaiter(this, void 0, void 0, function* () { + throw new Error('not implemented'); + }); + } +} +exports.BasicCredentialHandler = BasicCredentialHandler; +class BearerCredentialHandler { + constructor(token) { + this.token = token; + } + // currently implements pre-authorization + // TODO: support preAuth = false where it hooks on 401 + prepareRequest(options) { + if (!options.headers) { + throw Error('The request has no headers'); + } + options.headers['Authorization'] = `Bearer ${this.token}`; + } + // This handler cannot handle 401 + canHandleAuthentication() { + return false; + } + handleAuthentication() { + return __awaiter(this, void 0, void 0, function* () { + throw new Error('not implemented'); + }); + } +} +exports.BearerCredentialHandler = BearerCredentialHandler; +class PersonalAccessTokenCredentialHandler { + constructor(token) { + this.token = token; + } + // currently implements pre-authorization + // TODO: support preAuth = false where it hooks on 401 + prepareRequest(options) { + if (!options.headers) { + throw Error('The request has no headers'); + } + options.headers['Authorization'] = `Basic ${Buffer.from(`PAT:${this.token}`).toString('base64')}`; + } + // This handler cannot handle 401 + canHandleAuthentication() { + return false; + } + handleAuthentication() { + return __awaiter(this, void 0, void 0, function* () { + throw new Error('not implemented'); + }); + } +} +exports.PersonalAccessTokenCredentialHandler = PersonalAccessTokenCredentialHandler; +//# sourceMappingURL=auth.js.map + +/***/ }), + +/***/ 1404: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +/* eslint-disable @typescript-eslint/no-explicit-any */ +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.HttpClient = exports.isHttps = exports.HttpClientResponse = exports.HttpClientError = exports.getProxyUrl = exports.MediaTypes = exports.Headers = exports.HttpCodes = void 0; +const http = __importStar(__nccwpck_require__(3685)); +const https = __importStar(__nccwpck_require__(5687)); +const pm = __importStar(__nccwpck_require__(2843)); +const tunnel = __importStar(__nccwpck_require__(4294)); +var HttpCodes; +(function (HttpCodes) { + HttpCodes[HttpCodes["OK"] = 200] = "OK"; + HttpCodes[HttpCodes["MultipleChoices"] = 300] = "MultipleChoices"; + HttpCodes[HttpCodes["MovedPermanently"] = 301] = "MovedPermanently"; + HttpCodes[HttpCodes["ResourceMoved"] = 302] = "ResourceMoved"; + HttpCodes[HttpCodes["SeeOther"] = 303] = "SeeOther"; + HttpCodes[HttpCodes["NotModified"] = 304] = "NotModified"; + HttpCodes[HttpCodes["UseProxy"] = 305] = "UseProxy"; + HttpCodes[HttpCodes["SwitchProxy"] = 306] = "SwitchProxy"; + HttpCodes[HttpCodes["TemporaryRedirect"] = 307] = "TemporaryRedirect"; + HttpCodes[HttpCodes["PermanentRedirect"] = 308] = "PermanentRedirect"; + HttpCodes[HttpCodes["BadRequest"] = 400] = "BadRequest"; + HttpCodes[HttpCodes["Unauthorized"] = 401] = "Unauthorized"; + HttpCodes[HttpCodes["PaymentRequired"] = 402] = "PaymentRequired"; + HttpCodes[HttpCodes["Forbidden"] = 403] = "Forbidden"; + HttpCodes[HttpCodes["NotFound"] = 404] = "NotFound"; + HttpCodes[HttpCodes["MethodNotAllowed"] = 405] = "MethodNotAllowed"; + HttpCodes[HttpCodes["NotAcceptable"] = 406] = "NotAcceptable"; + HttpCodes[HttpCodes["ProxyAuthenticationRequired"] = 407] = "ProxyAuthenticationRequired"; + HttpCodes[HttpCodes["RequestTimeout"] = 408] = "RequestTimeout"; + HttpCodes[HttpCodes["Conflict"] = 409] = "Conflict"; + HttpCodes[HttpCodes["Gone"] = 410] = "Gone"; + HttpCodes[HttpCodes["TooManyRequests"] = 429] = "TooManyRequests"; + HttpCodes[HttpCodes["InternalServerError"] = 500] = "InternalServerError"; + HttpCodes[HttpCodes["NotImplemented"] = 501] = "NotImplemented"; + HttpCodes[HttpCodes["BadGateway"] = 502] = "BadGateway"; + HttpCodes[HttpCodes["ServiceUnavailable"] = 503] = "ServiceUnavailable"; + HttpCodes[HttpCodes["GatewayTimeout"] = 504] = "GatewayTimeout"; +})(HttpCodes = exports.HttpCodes || (exports.HttpCodes = {})); +var Headers; +(function (Headers) { + Headers["Accept"] = "accept"; + Headers["ContentType"] = "content-type"; +})(Headers = exports.Headers || (exports.Headers = {})); +var MediaTypes; +(function (MediaTypes) { + MediaTypes["ApplicationJson"] = "application/json"; +})(MediaTypes = exports.MediaTypes || (exports.MediaTypes = {})); +/** + * Returns the proxy URL, depending upon the supplied url and proxy environment variables. + * @param serverUrl The server URL where the request will be sent. For example, https://api.github.com + */ +function getProxyUrl(serverUrl) { + const proxyUrl = pm.getProxyUrl(new URL(serverUrl)); + return proxyUrl ? proxyUrl.href : ''; +} +exports.getProxyUrl = getProxyUrl; +const HttpRedirectCodes = [ + HttpCodes.MovedPermanently, + HttpCodes.ResourceMoved, + HttpCodes.SeeOther, + HttpCodes.TemporaryRedirect, + HttpCodes.PermanentRedirect +]; +const HttpResponseRetryCodes = [ + HttpCodes.BadGateway, + HttpCodes.ServiceUnavailable, + HttpCodes.GatewayTimeout +]; +const RetryableHttpVerbs = ['OPTIONS', 'GET', 'DELETE', 'HEAD']; +const ExponentialBackoffCeiling = 10; +const ExponentialBackoffTimeSlice = 5; +class HttpClientError extends Error { + constructor(message, statusCode) { + super(message); + this.name = 'HttpClientError'; + this.statusCode = statusCode; + Object.setPrototypeOf(this, HttpClientError.prototype); + } +} +exports.HttpClientError = HttpClientError; +class HttpClientResponse { + constructor(message) { + this.message = message; + } + readBody() { + return __awaiter(this, void 0, void 0, function* () { + return new Promise((resolve) => __awaiter(this, void 0, void 0, function* () { + let output = Buffer.alloc(0); + this.message.on('data', (chunk) => { + output = Buffer.concat([output, chunk]); + }); + this.message.on('end', () => { + resolve(output.toString()); + }); + })); + }); + } +} +exports.HttpClientResponse = HttpClientResponse; +function isHttps(requestUrl) { + const parsedUrl = new URL(requestUrl); + return parsedUrl.protocol === 'https:'; +} +exports.isHttps = isHttps; +class HttpClient { + constructor(userAgent, handlers, requestOptions) { + this._ignoreSslError = false; + this._allowRedirects = true; + this._allowRedirectDowngrade = false; + this._maxRedirects = 50; + this._allowRetries = false; + this._maxRetries = 1; + this._keepAlive = false; + this._disposed = false; + this.userAgent = userAgent; + this.handlers = handlers || []; + this.requestOptions = requestOptions; + if (requestOptions) { + if (requestOptions.ignoreSslError != null) { + this._ignoreSslError = requestOptions.ignoreSslError; + } + this._socketTimeout = requestOptions.socketTimeout; + if (requestOptions.allowRedirects != null) { + this._allowRedirects = requestOptions.allowRedirects; + } + if (requestOptions.allowRedirectDowngrade != null) { + this._allowRedirectDowngrade = requestOptions.allowRedirectDowngrade; + } + if (requestOptions.maxRedirects != null) { + this._maxRedirects = Math.max(requestOptions.maxRedirects, 0); + } + if (requestOptions.keepAlive != null) { + this._keepAlive = requestOptions.keepAlive; + } + if (requestOptions.allowRetries != null) { + this._allowRetries = requestOptions.allowRetries; + } + if (requestOptions.maxRetries != null) { + this._maxRetries = requestOptions.maxRetries; + } + } + } + options(requestUrl, additionalHeaders) { + return __awaiter(this, void 0, void 0, function* () { + return this.request('OPTIONS', requestUrl, null, additionalHeaders || {}); + }); + } + get(requestUrl, additionalHeaders) { + return __awaiter(this, void 0, void 0, function* () { + return this.request('GET', requestUrl, null, additionalHeaders || {}); + }); + } + del(requestUrl, additionalHeaders) { + return __awaiter(this, void 0, void 0, function* () { + return this.request('DELETE', requestUrl, null, additionalHeaders || {}); + }); + } + post(requestUrl, data, additionalHeaders) { + return __awaiter(this, void 0, void 0, function* () { + return this.request('POST', requestUrl, data, additionalHeaders || {}); + }); + } + patch(requestUrl, data, additionalHeaders) { + return __awaiter(this, void 0, void 0, function* () { + return this.request('PATCH', requestUrl, data, additionalHeaders || {}); + }); + } + put(requestUrl, data, additionalHeaders) { + return __awaiter(this, void 0, void 0, function* () { + return this.request('PUT', requestUrl, data, additionalHeaders || {}); + }); + } + head(requestUrl, additionalHeaders) { + return __awaiter(this, void 0, void 0, function* () { + return this.request('HEAD', requestUrl, null, additionalHeaders || {}); + }); + } + sendStream(verb, requestUrl, stream, additionalHeaders) { + return __awaiter(this, void 0, void 0, function* () { + return this.request(verb, requestUrl, stream, additionalHeaders); + }); + } + /** + * Gets a typed object from an endpoint + * Be aware that not found returns a null. Other errors (4xx, 5xx) reject the promise + */ + getJson(requestUrl, additionalHeaders = {}) { + return __awaiter(this, void 0, void 0, function* () { + additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson); + const res = yield this.get(requestUrl, additionalHeaders); + return this._processResponse(res, this.requestOptions); + }); + } + postJson(requestUrl, obj, additionalHeaders = {}) { + return __awaiter(this, void 0, void 0, function* () { + const data = JSON.stringify(obj, null, 2); + additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson); + additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson); + const res = yield this.post(requestUrl, data, additionalHeaders); + return this._processResponse(res, this.requestOptions); + }); + } + putJson(requestUrl, obj, additionalHeaders = {}) { + return __awaiter(this, void 0, void 0, function* () { + const data = JSON.stringify(obj, null, 2); + additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson); + additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson); + const res = yield this.put(requestUrl, data, additionalHeaders); + return this._processResponse(res, this.requestOptions); + }); + } + patchJson(requestUrl, obj, additionalHeaders = {}) { + return __awaiter(this, void 0, void 0, function* () { + const data = JSON.stringify(obj, null, 2); + additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson); + additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson); + const res = yield this.patch(requestUrl, data, additionalHeaders); + return this._processResponse(res, this.requestOptions); + }); + } + /** + * Makes a raw http request. + * All other methods such as get, post, patch, and request ultimately call this. + * Prefer get, del, post and patch + */ + request(verb, requestUrl, data, headers) { + return __awaiter(this, void 0, void 0, function* () { + if (this._disposed) { + throw new Error('Client has already been disposed.'); + } + const parsedUrl = new URL(requestUrl); + let info = this._prepareRequest(verb, parsedUrl, headers); + // Only perform retries on reads since writes may not be idempotent. + const maxTries = this._allowRetries && RetryableHttpVerbs.includes(verb) + ? this._maxRetries + 1 + : 1; + let numTries = 0; + let response; + do { + response = yield this.requestRaw(info, data); + // Check if it's an authentication challenge + if (response && + response.message && + response.message.statusCode === HttpCodes.Unauthorized) { + let authenticationHandler; + for (const handler of this.handlers) { + if (handler.canHandleAuthentication(response)) { + authenticationHandler = handler; + break; + } + } + if (authenticationHandler) { + return authenticationHandler.handleAuthentication(this, info, data); + } + else { + // We have received an unauthorized response but have no handlers to handle it. + // Let the response return to the caller. + return response; + } + } + let redirectsRemaining = this._maxRedirects; + while (response.message.statusCode && + HttpRedirectCodes.includes(response.message.statusCode) && + this._allowRedirects && + redirectsRemaining > 0) { + const redirectUrl = response.message.headers['location']; + if (!redirectUrl) { + // if there's no location to redirect to, we won't + break; + } + const parsedRedirectUrl = new URL(redirectUrl); + if (parsedUrl.protocol === 'https:' && + parsedUrl.protocol !== parsedRedirectUrl.protocol && + !this._allowRedirectDowngrade) { + throw new Error('Redirect from HTTPS to HTTP protocol. This downgrade is not allowed for security reasons. If you want to allow this behavior, set the allowRedirectDowngrade option to true.'); + } + // we need to finish reading the response before reassigning response + // which will leak the open socket. + yield response.readBody(); + // strip authorization header if redirected to a different hostname + if (parsedRedirectUrl.hostname !== parsedUrl.hostname) { + for (const header in headers) { + // header names are case insensitive + if (header.toLowerCase() === 'authorization') { + delete headers[header]; + } + } + } + // let's make the request with the new redirectUrl + info = this._prepareRequest(verb, parsedRedirectUrl, headers); + response = yield this.requestRaw(info, data); + redirectsRemaining--; + } + if (!response.message.statusCode || + !HttpResponseRetryCodes.includes(response.message.statusCode)) { + // If not a retry code, return immediately instead of retrying + return response; + } + numTries += 1; + if (numTries < maxTries) { + yield response.readBody(); + yield this._performExponentialBackoff(numTries); + } + } while (numTries < maxTries); + return response; + }); + } + /** + * Needs to be called if keepAlive is set to true in request options. + */ + dispose() { + if (this._agent) { + this._agent.destroy(); + } + this._disposed = true; + } + /** + * Raw request. + * @param info + * @param data + */ + requestRaw(info, data) { + return __awaiter(this, void 0, void 0, function* () { + return new Promise((resolve, reject) => { + function callbackForResult(err, res) { + if (err) { + reject(err); + } + else if (!res) { + // If `err` is not passed, then `res` must be passed. + reject(new Error('Unknown error')); + } + else { + resolve(res); + } + } + this.requestRawWithCallback(info, data, callbackForResult); + }); + }); + } + /** + * Raw request with callback. + * @param info + * @param data + * @param onResult + */ + requestRawWithCallback(info, data, onResult) { + if (typeof data === 'string') { + if (!info.options.headers) { + info.options.headers = {}; + } + info.options.headers['Content-Length'] = Buffer.byteLength(data, 'utf8'); + } + let callbackCalled = false; + function handleResult(err, res) { + if (!callbackCalled) { + callbackCalled = true; + onResult(err, res); + } + } + const req = info.httpModule.request(info.options, (msg) => { + const res = new HttpClientResponse(msg); + handleResult(undefined, res); + }); + let socket; + req.on('socket', sock => { + socket = sock; + }); + // If we ever get disconnected, we want the socket to timeout eventually + req.setTimeout(this._socketTimeout || 3 * 60000, () => { + if (socket) { + socket.end(); + } + handleResult(new Error(`Request timeout: ${info.options.path}`)); + }); + req.on('error', function (err) { + // err has statusCode property + // res should have headers + handleResult(err); + }); + if (data && typeof data === 'string') { + req.write(data, 'utf8'); + } + if (data && typeof data !== 'string') { + data.on('close', function () { + req.end(); + }); + data.pipe(req); + } + else { + req.end(); + } + } + /** + * Gets an http agent. This function is useful when you need an http agent that handles + * routing through a proxy server - depending upon the url and proxy environment variables. + * @param serverUrl The server URL where the request will be sent. For example, https://api.github.com + */ + getAgent(serverUrl) { + const parsedUrl = new URL(serverUrl); + return this._getAgent(parsedUrl); + } + _prepareRequest(method, requestUrl, headers) { + const info = {}; + info.parsedUrl = requestUrl; + const usingSsl = info.parsedUrl.protocol === 'https:'; + info.httpModule = usingSsl ? https : http; + const defaultPort = usingSsl ? 443 : 80; + info.options = {}; + info.options.host = info.parsedUrl.hostname; + info.options.port = info.parsedUrl.port + ? parseInt(info.parsedUrl.port) + : defaultPort; + info.options.path = + (info.parsedUrl.pathname || '') + (info.parsedUrl.search || ''); + info.options.method = method; + info.options.headers = this._mergeHeaders(headers); + if (this.userAgent != null) { + info.options.headers['user-agent'] = this.userAgent; + } + info.options.agent = this._getAgent(info.parsedUrl); + // gives handlers an opportunity to participate + if (this.handlers) { + for (const handler of this.handlers) { + handler.prepareRequest(info.options); + } + } + return info; + } + _mergeHeaders(headers) { + if (this.requestOptions && this.requestOptions.headers) { + return Object.assign({}, lowercaseKeys(this.requestOptions.headers), lowercaseKeys(headers || {})); + } + return lowercaseKeys(headers || {}); + } + _getExistingOrDefaultHeader(additionalHeaders, header, _default) { + let clientHeader; + if (this.requestOptions && this.requestOptions.headers) { + clientHeader = lowercaseKeys(this.requestOptions.headers)[header]; + } + return additionalHeaders[header] || clientHeader || _default; + } + _getAgent(parsedUrl) { + let agent; + const proxyUrl = pm.getProxyUrl(parsedUrl); + const useProxy = proxyUrl && proxyUrl.hostname; + if (this._keepAlive && useProxy) { + agent = this._proxyAgent; + } + if (this._keepAlive && !useProxy) { + agent = this._agent; + } + // if agent is already assigned use that agent. + if (agent) { + return agent; + } + const usingSsl = parsedUrl.protocol === 'https:'; + let maxSockets = 100; + if (this.requestOptions) { + maxSockets = this.requestOptions.maxSockets || http.globalAgent.maxSockets; + } + // This is `useProxy` again, but we need to check `proxyURl` directly for TypeScripts's flow analysis. + if (proxyUrl && proxyUrl.hostname) { + const agentOptions = { + maxSockets, + keepAlive: this._keepAlive, + proxy: Object.assign(Object.assign({}, ((proxyUrl.username || proxyUrl.password) && { + proxyAuth: `${proxyUrl.username}:${proxyUrl.password}` + })), { host: proxyUrl.hostname, port: proxyUrl.port }) + }; + let tunnelAgent; + const overHttps = proxyUrl.protocol === 'https:'; + if (usingSsl) { + tunnelAgent = overHttps ? tunnel.httpsOverHttps : tunnel.httpsOverHttp; + } + else { + tunnelAgent = overHttps ? tunnel.httpOverHttps : tunnel.httpOverHttp; + } + agent = tunnelAgent(agentOptions); + this._proxyAgent = agent; + } + // if reusing agent across request and tunneling agent isn't assigned create a new agent + if (this._keepAlive && !agent) { + const options = { keepAlive: this._keepAlive, maxSockets }; + agent = usingSsl ? new https.Agent(options) : new http.Agent(options); + this._agent = agent; + } + // if not using private agent and tunnel agent isn't setup then use global agent + if (!agent) { + agent = usingSsl ? https.globalAgent : http.globalAgent; + } + if (usingSsl && this._ignoreSslError) { + // we don't want to set NODE_TLS_REJECT_UNAUTHORIZED=0 since that will affect request for entire process + // http.RequestOptions doesn't expose a way to modify RequestOptions.agent.options + // we have to cast it to any and change it directly + agent.options = Object.assign(agent.options || {}, { + rejectUnauthorized: false + }); + } + return agent; + } + _performExponentialBackoff(retryNumber) { + return __awaiter(this, void 0, void 0, function* () { + retryNumber = Math.min(ExponentialBackoffCeiling, retryNumber); + const ms = ExponentialBackoffTimeSlice * Math.pow(2, retryNumber); + return new Promise(resolve => setTimeout(() => resolve(), ms)); + }); + } + _processResponse(res, options) { + return __awaiter(this, void 0, void 0, function* () { + return new Promise((resolve, reject) => __awaiter(this, void 0, void 0, function* () { + const statusCode = res.message.statusCode || 0; + const response = { + statusCode, + result: null, + headers: {} + }; + // not found leads to null obj returned + if (statusCode === HttpCodes.NotFound) { + resolve(response); + } + // get the result from the body + function dateTimeDeserializer(key, value) { + if (typeof value === 'string') { + const a = new Date(value); + if (!isNaN(a.valueOf())) { + return a; + } + } + return value; + } + let obj; + let contents; + try { + contents = yield res.readBody(); + if (contents && contents.length > 0) { + if (options && options.deserializeDates) { + obj = JSON.parse(contents, dateTimeDeserializer); + } + else { + obj = JSON.parse(contents); + } + response.result = obj; + } + response.headers = res.message.headers; + } + catch (err) { + // Invalid resource (contents not json); leaving result obj null + } + // note that 3xx redirects are handled by the http layer. + if (statusCode > 299) { + let msg; + // if exception/error in body, attempt to get better error + if (obj && obj.message) { + msg = obj.message; + } + else if (contents && contents.length > 0) { + // it may be the case that the exception is in the body message as string + msg = contents; + } + else { + msg = `Failed request: (${statusCode})`; + } + const err = new HttpClientError(msg, statusCode); + err.result = response.result; + reject(err); + } + else { + resolve(response); + } + })); + }); + } +} +exports.HttpClient = HttpClient; +const lowercaseKeys = (obj) => Object.keys(obj).reduce((c, k) => ((c[k.toLowerCase()] = obj[k]), c), {}); +//# sourceMappingURL=index.js.map + +/***/ }), + +/***/ 2843: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.checkBypass = exports.getProxyUrl = void 0; +function getProxyUrl(reqUrl) { + const usingSsl = reqUrl.protocol === 'https:'; + if (checkBypass(reqUrl)) { + return undefined; + } + const proxyVar = (() => { + if (usingSsl) { + return process.env['https_proxy'] || process.env['HTTPS_PROXY']; + } + else { + return process.env['http_proxy'] || process.env['HTTP_PROXY']; + } + })(); + if (proxyVar) { + return new URL(proxyVar); + } + else { + return undefined; + } +} +exports.getProxyUrl = getProxyUrl; +function checkBypass(reqUrl) { + if (!reqUrl.hostname) { + return false; + } + const noProxy = process.env['no_proxy'] || process.env['NO_PROXY'] || ''; + if (!noProxy) { + return false; + } + // Determine the request port + let reqPort; + if (reqUrl.port) { + reqPort = Number(reqUrl.port); + } + else if (reqUrl.protocol === 'http:') { + reqPort = 80; + } + else if (reqUrl.protocol === 'https:') { + reqPort = 443; + } + // Format the request hostname and hostname with port + const upperReqHosts = [reqUrl.hostname.toUpperCase()]; + if (typeof reqPort === 'number') { + upperReqHosts.push(`${upperReqHosts[0]}:${reqPort}`); + } + // Compare request host against noproxy + for (const upperNoProxyItem of noProxy + .split(',') + .map(x => x.trim().toUpperCase()) + .filter(x => x)) { + if (upperReqHosts.some(x => x === upperNoProxyItem)) { + return true; + } + } + return false; +} +exports.checkBypass = checkBypass; +//# sourceMappingURL=proxy.js.map + +/***/ }), + +/***/ 8974: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +Object.defineProperty(exports, "v1", ({ + enumerable: true, + get: function () { + return _v.default; + } +})); +Object.defineProperty(exports, "v3", ({ + enumerable: true, + get: function () { + return _v2.default; + } +})); +Object.defineProperty(exports, "v4", ({ + enumerable: true, + get: function () { + return _v3.default; + } +})); +Object.defineProperty(exports, "v5", ({ + enumerable: true, + get: function () { + return _v4.default; + } +})); +Object.defineProperty(exports, "NIL", ({ + enumerable: true, + get: function () { + return _nil.default; + } +})); +Object.defineProperty(exports, "version", ({ + enumerable: true, + get: function () { + return _version.default; + } +})); +Object.defineProperty(exports, "validate", ({ + enumerable: true, + get: function () { + return _validate.default; + } +})); +Object.defineProperty(exports, "stringify", ({ + enumerable: true, + get: function () { + return _stringify.default; + } +})); +Object.defineProperty(exports, "parse", ({ + enumerable: true, + get: function () { + return _parse.default; + } +})); + +var _v = _interopRequireDefault(__nccwpck_require__(1595)); + +var _v2 = _interopRequireDefault(__nccwpck_require__(6993)); + +var _v3 = _interopRequireDefault(__nccwpck_require__(1472)); + +var _v4 = _interopRequireDefault(__nccwpck_require__(6217)); + +var _nil = _interopRequireDefault(__nccwpck_require__(2381)); + +var _version = _interopRequireDefault(__nccwpck_require__(427)); + +var _validate = _interopRequireDefault(__nccwpck_require__(2609)); + +var _stringify = _interopRequireDefault(__nccwpck_require__(1458)); + +var _parse = _interopRequireDefault(__nccwpck_require__(6385)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/***/ }), + +/***/ 5842: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _crypto = _interopRequireDefault(__nccwpck_require__(6113)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function md5(bytes) { + if (Array.isArray(bytes)) { + bytes = Buffer.from(bytes); + } else if (typeof bytes === 'string') { + bytes = Buffer.from(bytes, 'utf8'); + } + + return _crypto.default.createHash('md5').update(bytes).digest(); +} + +var _default = md5; +exports["default"] = _default; + +/***/ }), + +/***/ 2381: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; +var _default = '00000000-0000-0000-0000-000000000000'; +exports["default"] = _default; + +/***/ }), + +/***/ 6385: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _validate = _interopRequireDefault(__nccwpck_require__(2609)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function parse(uuid) { + if (!(0, _validate.default)(uuid)) { + throw TypeError('Invalid UUID'); + } + + let v; + const arr = new Uint8Array(16); // Parse ########-....-....-....-............ + + arr[0] = (v = parseInt(uuid.slice(0, 8), 16)) >>> 24; + arr[1] = v >>> 16 & 0xff; + arr[2] = v >>> 8 & 0xff; + arr[3] = v & 0xff; // Parse ........-####-....-....-............ + + arr[4] = (v = parseInt(uuid.slice(9, 13), 16)) >>> 8; + arr[5] = v & 0xff; // Parse ........-....-####-....-............ + + arr[6] = (v = parseInt(uuid.slice(14, 18), 16)) >>> 8; + arr[7] = v & 0xff; // Parse ........-....-....-####-............ + + arr[8] = (v = parseInt(uuid.slice(19, 23), 16)) >>> 8; + arr[9] = v & 0xff; // Parse ........-....-....-....-############ + // (Use "/" to avoid 32-bit truncation when bit-shifting high-order bytes) + + arr[10] = (v = parseInt(uuid.slice(24, 36), 16)) / 0x10000000000 & 0xff; + arr[11] = v / 0x100000000 & 0xff; + arr[12] = v >>> 24 & 0xff; + arr[13] = v >>> 16 & 0xff; + arr[14] = v >>> 8 & 0xff; + arr[15] = v & 0xff; + return arr; +} + +var _default = parse; +exports["default"] = _default; + +/***/ }), + +/***/ 6230: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; +var _default = /^(?:[0-9a-f]{8}-[0-9a-f]{4}-[1-5][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}|00000000-0000-0000-0000-000000000000)$/i; +exports["default"] = _default; + +/***/ }), + +/***/ 9784: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = rng; + +var _crypto = _interopRequireDefault(__nccwpck_require__(6113)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +const rnds8Pool = new Uint8Array(256); // # of random values to pre-allocate + +let poolPtr = rnds8Pool.length; + +function rng() { + if (poolPtr > rnds8Pool.length - 16) { + _crypto.default.randomFillSync(rnds8Pool); + + poolPtr = 0; + } + + return rnds8Pool.slice(poolPtr, poolPtr += 16); +} + +/***/ }), + +/***/ 8844: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _crypto = _interopRequireDefault(__nccwpck_require__(6113)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function sha1(bytes) { + if (Array.isArray(bytes)) { + bytes = Buffer.from(bytes); + } else if (typeof bytes === 'string') { + bytes = Buffer.from(bytes, 'utf8'); + } + + return _crypto.default.createHash('sha1').update(bytes).digest(); +} + +var _default = sha1; +exports["default"] = _default; + +/***/ }), + +/***/ 1458: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _validate = _interopRequireDefault(__nccwpck_require__(2609)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * Convert array of 16 byte values to UUID string format of the form: + * XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX + */ +const byteToHex = []; + +for (let i = 0; i < 256; ++i) { + byteToHex.push((i + 0x100).toString(16).substr(1)); +} + +function stringify(arr, offset = 0) { + // Note: Be careful editing this code! It's been tuned for performance + // and works in ways you may not expect. See https://github.com/uuidjs/uuid/pull/434 + const uuid = (byteToHex[arr[offset + 0]] + byteToHex[arr[offset + 1]] + byteToHex[arr[offset + 2]] + byteToHex[arr[offset + 3]] + '-' + byteToHex[arr[offset + 4]] + byteToHex[arr[offset + 5]] + '-' + byteToHex[arr[offset + 6]] + byteToHex[arr[offset + 7]] + '-' + byteToHex[arr[offset + 8]] + byteToHex[arr[offset + 9]] + '-' + byteToHex[arr[offset + 10]] + byteToHex[arr[offset + 11]] + byteToHex[arr[offset + 12]] + byteToHex[arr[offset + 13]] + byteToHex[arr[offset + 14]] + byteToHex[arr[offset + 15]]).toLowerCase(); // Consistency check for valid UUID. If this throws, it's likely due to one + // of the following: + // - One or more input array values don't map to a hex octet (leading to + // "undefined" in the uuid) + // - Invalid input values for the RFC `version` or `variant` fields + + if (!(0, _validate.default)(uuid)) { + throw TypeError('Stringified UUID is invalid'); + } + + return uuid; +} + +var _default = stringify; +exports["default"] = _default; + +/***/ }), + +/***/ 1595: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _rng = _interopRequireDefault(__nccwpck_require__(9784)); + +var _stringify = _interopRequireDefault(__nccwpck_require__(1458)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +// **`v1()` - Generate time-based UUID** +// +// Inspired by https://github.com/LiosK/UUID.js +// and http://docs.python.org/library/uuid.html +let _nodeId; + +let _clockseq; // Previous uuid creation time + + +let _lastMSecs = 0; +let _lastNSecs = 0; // See https://github.com/uuidjs/uuid for API details + +function v1(options, buf, offset) { + let i = buf && offset || 0; + const b = buf || new Array(16); + options = options || {}; + let node = options.node || _nodeId; + let clockseq = options.clockseq !== undefined ? options.clockseq : _clockseq; // node and clockseq need to be initialized to random values if they're not + // specified. We do this lazily to minimize issues related to insufficient + // system entropy. See #189 + + if (node == null || clockseq == null) { + const seedBytes = options.random || (options.rng || _rng.default)(); + + if (node == null) { + // Per 4.5, create and 48-bit node id, (47 random bits + multicast bit = 1) + node = _nodeId = [seedBytes[0] | 0x01, seedBytes[1], seedBytes[2], seedBytes[3], seedBytes[4], seedBytes[5]]; + } + + if (clockseq == null) { + // Per 4.2.2, randomize (14 bit) clockseq + clockseq = _clockseq = (seedBytes[6] << 8 | seedBytes[7]) & 0x3fff; + } + } // UUID timestamps are 100 nano-second units since the Gregorian epoch, + // (1582-10-15 00:00). JSNumbers aren't precise enough for this, so + // time is handled internally as 'msecs' (integer milliseconds) and 'nsecs' + // (100-nanoseconds offset from msecs) since unix epoch, 1970-01-01 00:00. + + + let msecs = options.msecs !== undefined ? options.msecs : Date.now(); // Per 4.2.1.2, use count of uuid's generated during the current clock + // cycle to simulate higher resolution clock + + let nsecs = options.nsecs !== undefined ? options.nsecs : _lastNSecs + 1; // Time since last uuid creation (in msecs) + + const dt = msecs - _lastMSecs + (nsecs - _lastNSecs) / 10000; // Per 4.2.1.2, Bump clockseq on clock regression + + if (dt < 0 && options.clockseq === undefined) { + clockseq = clockseq + 1 & 0x3fff; + } // Reset nsecs if clock regresses (new clockseq) or we've moved onto a new + // time interval + + + if ((dt < 0 || msecs > _lastMSecs) && options.nsecs === undefined) { + nsecs = 0; + } // Per 4.2.1.2 Throw error if too many uuids are requested + + + if (nsecs >= 10000) { + throw new Error("uuid.v1(): Can't create more than 10M uuids/sec"); + } + + _lastMSecs = msecs; + _lastNSecs = nsecs; + _clockseq = clockseq; // Per 4.1.4 - Convert from unix epoch to Gregorian epoch + + msecs += 12219292800000; // `time_low` + + const tl = ((msecs & 0xfffffff) * 10000 + nsecs) % 0x100000000; + b[i++] = tl >>> 24 & 0xff; + b[i++] = tl >>> 16 & 0xff; + b[i++] = tl >>> 8 & 0xff; + b[i++] = tl & 0xff; // `time_mid` + + const tmh = msecs / 0x100000000 * 10000 & 0xfffffff; + b[i++] = tmh >>> 8 & 0xff; + b[i++] = tmh & 0xff; // `time_high_and_version` + + b[i++] = tmh >>> 24 & 0xf | 0x10; // include version + + b[i++] = tmh >>> 16 & 0xff; // `clock_seq_hi_and_reserved` (Per 4.2.2 - include variant) + + b[i++] = clockseq >>> 8 | 0x80; // `clock_seq_low` + + b[i++] = clockseq & 0xff; // `node` + + for (let n = 0; n < 6; ++n) { + b[i + n] = node[n]; + } + + return buf || (0, _stringify.default)(b); +} + +var _default = v1; +exports["default"] = _default; + +/***/ }), + +/***/ 6993: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _v = _interopRequireDefault(__nccwpck_require__(5920)); + +var _md = _interopRequireDefault(__nccwpck_require__(5842)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +const v3 = (0, _v.default)('v3', 0x30, _md.default); +var _default = v3; +exports["default"] = _default; + +/***/ }), + +/***/ 5920: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = _default; +exports.URL = exports.DNS = void 0; + +var _stringify = _interopRequireDefault(__nccwpck_require__(1458)); + +var _parse = _interopRequireDefault(__nccwpck_require__(6385)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function stringToBytes(str) { + str = unescape(encodeURIComponent(str)); // UTF8 escape + + const bytes = []; + + for (let i = 0; i < str.length; ++i) { + bytes.push(str.charCodeAt(i)); + } + + return bytes; +} + +const DNS = '6ba7b810-9dad-11d1-80b4-00c04fd430c8'; +exports.DNS = DNS; +const URL = '6ba7b811-9dad-11d1-80b4-00c04fd430c8'; +exports.URL = URL; + +function _default(name, version, hashfunc) { + function generateUUID(value, namespace, buf, offset) { + if (typeof value === 'string') { + value = stringToBytes(value); + } + + if (typeof namespace === 'string') { + namespace = (0, _parse.default)(namespace); + } + + if (namespace.length !== 16) { + throw TypeError('Namespace must be array-like (16 iterable integer values, 0-255)'); + } // Compute hash of namespace and value, Per 4.3 + // Future: Use spread syntax when supported on all platforms, e.g. `bytes = + // hashfunc([...namespace, ... value])` + + + let bytes = new Uint8Array(16 + value.length); + bytes.set(namespace); + bytes.set(value, namespace.length); + bytes = hashfunc(bytes); + bytes[6] = bytes[6] & 0x0f | version; + bytes[8] = bytes[8] & 0x3f | 0x80; + + if (buf) { + offset = offset || 0; + + for (let i = 0; i < 16; ++i) { + buf[offset + i] = bytes[i]; + } + + return buf; + } + + return (0, _stringify.default)(bytes); + } // Function#name is not settable on some platforms (#270) + + + try { + generateUUID.name = name; // eslint-disable-next-line no-empty + } catch (err) {} // For CommonJS default export support + + + generateUUID.DNS = DNS; + generateUUID.URL = URL; + return generateUUID; +} + +/***/ }), + +/***/ 1472: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _rng = _interopRequireDefault(__nccwpck_require__(9784)); + +var _stringify = _interopRequireDefault(__nccwpck_require__(1458)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function v4(options, buf, offset) { + options = options || {}; + + const rnds = options.random || (options.rng || _rng.default)(); // Per 4.4, set bits for version and `clock_seq_hi_and_reserved` + + + rnds[6] = rnds[6] & 0x0f | 0x40; + rnds[8] = rnds[8] & 0x3f | 0x80; // Copy bytes to buffer, if provided + + if (buf) { + offset = offset || 0; + + for (let i = 0; i < 16; ++i) { + buf[offset + i] = rnds[i]; + } + + return buf; + } + + return (0, _stringify.default)(rnds); +} + +var _default = v4; +exports["default"] = _default; + +/***/ }), + +/***/ 6217: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _v = _interopRequireDefault(__nccwpck_require__(5920)); + +var _sha = _interopRequireDefault(__nccwpck_require__(8844)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +const v5 = (0, _v.default)('v5', 0x50, _sha.default); +var _default = v5; +exports["default"] = _default; + +/***/ }), + +/***/ 2609: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _regex = _interopRequireDefault(__nccwpck_require__(6230)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function validate(uuid) { + return typeof uuid === 'string' && _regex.default.test(uuid); +} + +var _default = validate; +exports["default"] = _default; + +/***/ }), + +/***/ 427: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _validate = _interopRequireDefault(__nccwpck_require__(2609)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function version(uuid) { + if (!(0, _validate.default)(uuid)) { + throw TypeError('Invalid UUID'); + } + + return parseInt(uuid.substr(14, 1), 16); +} + +var _default = version; +exports["default"] = _default; + +/***/ }), + /***/ 1514: /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { @@ -4669,682 +6463,6 @@ class ExecState extends events.EventEmitter { } //# sourceMappingURL=toolrunner.js.map -/***/ }), - -/***/ 3702: -/***/ ((__unused_webpack_module, exports) => { - -"use strict"; - -Object.defineProperty(exports, "__esModule", ({ value: true })); -class BasicCredentialHandler { - constructor(username, password) { - this.username = username; - this.password = password; - } - prepareRequest(options) { - options.headers['Authorization'] = - 'Basic ' + - Buffer.from(this.username + ':' + this.password).toString('base64'); - } - // This handler cannot handle 401 - canHandleAuthentication(response) { - return false; - } - handleAuthentication(httpClient, requestInfo, objs) { - return null; - } -} -exports.BasicCredentialHandler = BasicCredentialHandler; -class BearerCredentialHandler { - constructor(token) { - this.token = token; - } - // currently implements pre-authorization - // TODO: support preAuth = false where it hooks on 401 - prepareRequest(options) { - options.headers['Authorization'] = 'Bearer ' + this.token; - } - // This handler cannot handle 401 - canHandleAuthentication(response) { - return false; - } - handleAuthentication(httpClient, requestInfo, objs) { - return null; - } -} -exports.BearerCredentialHandler = BearerCredentialHandler; -class PersonalAccessTokenCredentialHandler { - constructor(token) { - this.token = token; - } - // currently implements pre-authorization - // TODO: support preAuth = false where it hooks on 401 - prepareRequest(options) { - options.headers['Authorization'] = - 'Basic ' + Buffer.from('PAT:' + this.token).toString('base64'); - } - // This handler cannot handle 401 - canHandleAuthentication(response) { - return false; - } - handleAuthentication(httpClient, requestInfo, objs) { - return null; - } -} -exports.PersonalAccessTokenCredentialHandler = PersonalAccessTokenCredentialHandler; - - -/***/ }), - -/***/ 9925: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -Object.defineProperty(exports, "__esModule", ({ value: true })); -const http = __nccwpck_require__(3685); -const https = __nccwpck_require__(5687); -const pm = __nccwpck_require__(6443); -let tunnel; -var HttpCodes; -(function (HttpCodes) { - HttpCodes[HttpCodes["OK"] = 200] = "OK"; - HttpCodes[HttpCodes["MultipleChoices"] = 300] = "MultipleChoices"; - HttpCodes[HttpCodes["MovedPermanently"] = 301] = "MovedPermanently"; - HttpCodes[HttpCodes["ResourceMoved"] = 302] = "ResourceMoved"; - HttpCodes[HttpCodes["SeeOther"] = 303] = "SeeOther"; - HttpCodes[HttpCodes["NotModified"] = 304] = "NotModified"; - HttpCodes[HttpCodes["UseProxy"] = 305] = "UseProxy"; - HttpCodes[HttpCodes["SwitchProxy"] = 306] = "SwitchProxy"; - HttpCodes[HttpCodes["TemporaryRedirect"] = 307] = "TemporaryRedirect"; - HttpCodes[HttpCodes["PermanentRedirect"] = 308] = "PermanentRedirect"; - HttpCodes[HttpCodes["BadRequest"] = 400] = "BadRequest"; - HttpCodes[HttpCodes["Unauthorized"] = 401] = "Unauthorized"; - HttpCodes[HttpCodes["PaymentRequired"] = 402] = "PaymentRequired"; - HttpCodes[HttpCodes["Forbidden"] = 403] = "Forbidden"; - HttpCodes[HttpCodes["NotFound"] = 404] = "NotFound"; - HttpCodes[HttpCodes["MethodNotAllowed"] = 405] = "MethodNotAllowed"; - HttpCodes[HttpCodes["NotAcceptable"] = 406] = "NotAcceptable"; - HttpCodes[HttpCodes["ProxyAuthenticationRequired"] = 407] = "ProxyAuthenticationRequired"; - HttpCodes[HttpCodes["RequestTimeout"] = 408] = "RequestTimeout"; - HttpCodes[HttpCodes["Conflict"] = 409] = "Conflict"; - HttpCodes[HttpCodes["Gone"] = 410] = "Gone"; - HttpCodes[HttpCodes["TooManyRequests"] = 429] = "TooManyRequests"; - HttpCodes[HttpCodes["InternalServerError"] = 500] = "InternalServerError"; - HttpCodes[HttpCodes["NotImplemented"] = 501] = "NotImplemented"; - HttpCodes[HttpCodes["BadGateway"] = 502] = "BadGateway"; - HttpCodes[HttpCodes["ServiceUnavailable"] = 503] = "ServiceUnavailable"; - HttpCodes[HttpCodes["GatewayTimeout"] = 504] = "GatewayTimeout"; -})(HttpCodes = exports.HttpCodes || (exports.HttpCodes = {})); -var Headers; -(function (Headers) { - Headers["Accept"] = "accept"; - Headers["ContentType"] = "content-type"; -})(Headers = exports.Headers || (exports.Headers = {})); -var MediaTypes; -(function (MediaTypes) { - MediaTypes["ApplicationJson"] = "application/json"; -})(MediaTypes = exports.MediaTypes || (exports.MediaTypes = {})); -/** - * Returns the proxy URL, depending upon the supplied url and proxy environment variables. - * @param serverUrl The server URL where the request will be sent. For example, https://api.github.com - */ -function getProxyUrl(serverUrl) { - let proxyUrl = pm.getProxyUrl(new URL(serverUrl)); - return proxyUrl ? proxyUrl.href : ''; -} -exports.getProxyUrl = getProxyUrl; -const HttpRedirectCodes = [ - HttpCodes.MovedPermanently, - HttpCodes.ResourceMoved, - HttpCodes.SeeOther, - HttpCodes.TemporaryRedirect, - HttpCodes.PermanentRedirect -]; -const HttpResponseRetryCodes = [ - HttpCodes.BadGateway, - HttpCodes.ServiceUnavailable, - HttpCodes.GatewayTimeout -]; -const RetryableHttpVerbs = ['OPTIONS', 'GET', 'DELETE', 'HEAD']; -const ExponentialBackoffCeiling = 10; -const ExponentialBackoffTimeSlice = 5; -class HttpClientError extends Error { - constructor(message, statusCode) { - super(message); - this.name = 'HttpClientError'; - this.statusCode = statusCode; - Object.setPrototypeOf(this, HttpClientError.prototype); - } -} -exports.HttpClientError = HttpClientError; -class HttpClientResponse { - constructor(message) { - this.message = message; - } - readBody() { - return new Promise(async (resolve, reject) => { - let output = Buffer.alloc(0); - this.message.on('data', (chunk) => { - output = Buffer.concat([output, chunk]); - }); - this.message.on('end', () => { - resolve(output.toString()); - }); - }); - } -} -exports.HttpClientResponse = HttpClientResponse; -function isHttps(requestUrl) { - let parsedUrl = new URL(requestUrl); - return parsedUrl.protocol === 'https:'; -} -exports.isHttps = isHttps; -class HttpClient { - constructor(userAgent, handlers, requestOptions) { - this._ignoreSslError = false; - this._allowRedirects = true; - this._allowRedirectDowngrade = false; - this._maxRedirects = 50; - this._allowRetries = false; - this._maxRetries = 1; - this._keepAlive = false; - this._disposed = false; - this.userAgent = userAgent; - this.handlers = handlers || []; - this.requestOptions = requestOptions; - if (requestOptions) { - if (requestOptions.ignoreSslError != null) { - this._ignoreSslError = requestOptions.ignoreSslError; - } - this._socketTimeout = requestOptions.socketTimeout; - if (requestOptions.allowRedirects != null) { - this._allowRedirects = requestOptions.allowRedirects; - } - if (requestOptions.allowRedirectDowngrade != null) { - this._allowRedirectDowngrade = requestOptions.allowRedirectDowngrade; - } - if (requestOptions.maxRedirects != null) { - this._maxRedirects = Math.max(requestOptions.maxRedirects, 0); - } - if (requestOptions.keepAlive != null) { - this._keepAlive = requestOptions.keepAlive; - } - if (requestOptions.allowRetries != null) { - this._allowRetries = requestOptions.allowRetries; - } - if (requestOptions.maxRetries != null) { - this._maxRetries = requestOptions.maxRetries; - } - } - } - options(requestUrl, additionalHeaders) { - return this.request('OPTIONS', requestUrl, null, additionalHeaders || {}); - } - get(requestUrl, additionalHeaders) { - return this.request('GET', requestUrl, null, additionalHeaders || {}); - } - del(requestUrl, additionalHeaders) { - return this.request('DELETE', requestUrl, null, additionalHeaders || {}); - } - post(requestUrl, data, additionalHeaders) { - return this.request('POST', requestUrl, data, additionalHeaders || {}); - } - patch(requestUrl, data, additionalHeaders) { - return this.request('PATCH', requestUrl, data, additionalHeaders || {}); - } - put(requestUrl, data, additionalHeaders) { - return this.request('PUT', requestUrl, data, additionalHeaders || {}); - } - head(requestUrl, additionalHeaders) { - return this.request('HEAD', requestUrl, null, additionalHeaders || {}); - } - sendStream(verb, requestUrl, stream, additionalHeaders) { - return this.request(verb, requestUrl, stream, additionalHeaders); - } - /** - * Gets a typed object from an endpoint - * Be aware that not found returns a null. Other errors (4xx, 5xx) reject the promise - */ - async getJson(requestUrl, additionalHeaders = {}) { - additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson); - let res = await this.get(requestUrl, additionalHeaders); - return this._processResponse(res, this.requestOptions); - } - async postJson(requestUrl, obj, additionalHeaders = {}) { - let data = JSON.stringify(obj, null, 2); - additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson); - additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson); - let res = await this.post(requestUrl, data, additionalHeaders); - return this._processResponse(res, this.requestOptions); - } - async putJson(requestUrl, obj, additionalHeaders = {}) { - let data = JSON.stringify(obj, null, 2); - additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson); - additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson); - let res = await this.put(requestUrl, data, additionalHeaders); - return this._processResponse(res, this.requestOptions); - } - async patchJson(requestUrl, obj, additionalHeaders = {}) { - let data = JSON.stringify(obj, null, 2); - additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson); - additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson); - let res = await this.patch(requestUrl, data, additionalHeaders); - return this._processResponse(res, this.requestOptions); - } - /** - * Makes a raw http request. - * All other methods such as get, post, patch, and request ultimately call this. - * Prefer get, del, post and patch - */ - async request(verb, requestUrl, data, headers) { - if (this._disposed) { - throw new Error('Client has already been disposed.'); - } - let parsedUrl = new URL(requestUrl); - let info = this._prepareRequest(verb, parsedUrl, headers); - // Only perform retries on reads since writes may not be idempotent. - let maxTries = this._allowRetries && RetryableHttpVerbs.indexOf(verb) != -1 - ? this._maxRetries + 1 - : 1; - let numTries = 0; - let response; - while (numTries < maxTries) { - response = await this.requestRaw(info, data); - // Check if it's an authentication challenge - if (response && - response.message && - response.message.statusCode === HttpCodes.Unauthorized) { - let authenticationHandler; - for (let i = 0; i < this.handlers.length; i++) { - if (this.handlers[i].canHandleAuthentication(response)) { - authenticationHandler = this.handlers[i]; - break; - } - } - if (authenticationHandler) { - return authenticationHandler.handleAuthentication(this, info, data); - } - else { - // We have received an unauthorized response but have no handlers to handle it. - // Let the response return to the caller. - return response; - } - } - let redirectsRemaining = this._maxRedirects; - while (HttpRedirectCodes.indexOf(response.message.statusCode) != -1 && - this._allowRedirects && - redirectsRemaining > 0) { - const redirectUrl = response.message.headers['location']; - if (!redirectUrl) { - // if there's no location to redirect to, we won't - break; - } - let parsedRedirectUrl = new URL(redirectUrl); - if (parsedUrl.protocol == 'https:' && - parsedUrl.protocol != parsedRedirectUrl.protocol && - !this._allowRedirectDowngrade) { - throw new Error('Redirect from HTTPS to HTTP protocol. This downgrade is not allowed for security reasons. If you want to allow this behavior, set the allowRedirectDowngrade option to true.'); - } - // we need to finish reading the response before reassigning response - // which will leak the open socket. - await response.readBody(); - // strip authorization header if redirected to a different hostname - if (parsedRedirectUrl.hostname !== parsedUrl.hostname) { - for (let header in headers) { - // header names are case insensitive - if (header.toLowerCase() === 'authorization') { - delete headers[header]; - } - } - } - // let's make the request with the new redirectUrl - info = this._prepareRequest(verb, parsedRedirectUrl, headers); - response = await this.requestRaw(info, data); - redirectsRemaining--; - } - if (HttpResponseRetryCodes.indexOf(response.message.statusCode) == -1) { - // If not a retry code, return immediately instead of retrying - return response; - } - numTries += 1; - if (numTries < maxTries) { - await response.readBody(); - await this._performExponentialBackoff(numTries); - } - } - return response; - } - /** - * Needs to be called if keepAlive is set to true in request options. - */ - dispose() { - if (this._agent) { - this._agent.destroy(); - } - this._disposed = true; - } - /** - * Raw request. - * @param info - * @param data - */ - requestRaw(info, data) { - return new Promise((resolve, reject) => { - let callbackForResult = function (err, res) { - if (err) { - reject(err); - } - resolve(res); - }; - this.requestRawWithCallback(info, data, callbackForResult); - }); - } - /** - * Raw request with callback. - * @param info - * @param data - * @param onResult - */ - requestRawWithCallback(info, data, onResult) { - let socket; - if (typeof data === 'string') { - info.options.headers['Content-Length'] = Buffer.byteLength(data, 'utf8'); - } - let callbackCalled = false; - let handleResult = (err, res) => { - if (!callbackCalled) { - callbackCalled = true; - onResult(err, res); - } - }; - let req = info.httpModule.request(info.options, (msg) => { - let res = new HttpClientResponse(msg); - handleResult(null, res); - }); - req.on('socket', sock => { - socket = sock; - }); - // If we ever get disconnected, we want the socket to timeout eventually - req.setTimeout(this._socketTimeout || 3 * 60000, () => { - if (socket) { - socket.end(); - } - handleResult(new Error('Request timeout: ' + info.options.path), null); - }); - req.on('error', function (err) { - // err has statusCode property - // res should have headers - handleResult(err, null); - }); - if (data && typeof data === 'string') { - req.write(data, 'utf8'); - } - if (data && typeof data !== 'string') { - data.on('close', function () { - req.end(); - }); - data.pipe(req); - } - else { - req.end(); - } - } - /** - * Gets an http agent. This function is useful when you need an http agent that handles - * routing through a proxy server - depending upon the url and proxy environment variables. - * @param serverUrl The server URL where the request will be sent. For example, https://api.github.com - */ - getAgent(serverUrl) { - let parsedUrl = new URL(serverUrl); - return this._getAgent(parsedUrl); - } - _prepareRequest(method, requestUrl, headers) { - const info = {}; - info.parsedUrl = requestUrl; - const usingSsl = info.parsedUrl.protocol === 'https:'; - info.httpModule = usingSsl ? https : http; - const defaultPort = usingSsl ? 443 : 80; - info.options = {}; - info.options.host = info.parsedUrl.hostname; - info.options.port = info.parsedUrl.port - ? parseInt(info.parsedUrl.port) - : defaultPort; - info.options.path = - (info.parsedUrl.pathname || '') + (info.parsedUrl.search || ''); - info.options.method = method; - info.options.headers = this._mergeHeaders(headers); - if (this.userAgent != null) { - info.options.headers['user-agent'] = this.userAgent; - } - info.options.agent = this._getAgent(info.parsedUrl); - // gives handlers an opportunity to participate - if (this.handlers) { - this.handlers.forEach(handler => { - handler.prepareRequest(info.options); - }); - } - return info; - } - _mergeHeaders(headers) { - const lowercaseKeys = obj => Object.keys(obj).reduce((c, k) => ((c[k.toLowerCase()] = obj[k]), c), {}); - if (this.requestOptions && this.requestOptions.headers) { - return Object.assign({}, lowercaseKeys(this.requestOptions.headers), lowercaseKeys(headers)); - } - return lowercaseKeys(headers || {}); - } - _getExistingOrDefaultHeader(additionalHeaders, header, _default) { - const lowercaseKeys = obj => Object.keys(obj).reduce((c, k) => ((c[k.toLowerCase()] = obj[k]), c), {}); - let clientHeader; - if (this.requestOptions && this.requestOptions.headers) { - clientHeader = lowercaseKeys(this.requestOptions.headers)[header]; - } - return additionalHeaders[header] || clientHeader || _default; - } - _getAgent(parsedUrl) { - let agent; - let proxyUrl = pm.getProxyUrl(parsedUrl); - let useProxy = proxyUrl && proxyUrl.hostname; - if (this._keepAlive && useProxy) { - agent = this._proxyAgent; - } - if (this._keepAlive && !useProxy) { - agent = this._agent; - } - // if agent is already assigned use that agent. - if (!!agent) { - return agent; - } - const usingSsl = parsedUrl.protocol === 'https:'; - let maxSockets = 100; - if (!!this.requestOptions) { - maxSockets = this.requestOptions.maxSockets || http.globalAgent.maxSockets; - } - if (useProxy) { - // If using proxy, need tunnel - if (!tunnel) { - tunnel = __nccwpck_require__(4294); - } - const agentOptions = { - maxSockets: maxSockets, - keepAlive: this._keepAlive, - proxy: { - ...((proxyUrl.username || proxyUrl.password) && { - proxyAuth: `${proxyUrl.username}:${proxyUrl.password}` - }), - host: proxyUrl.hostname, - port: proxyUrl.port - } - }; - let tunnelAgent; - const overHttps = proxyUrl.protocol === 'https:'; - if (usingSsl) { - tunnelAgent = overHttps ? tunnel.httpsOverHttps : tunnel.httpsOverHttp; - } - else { - tunnelAgent = overHttps ? tunnel.httpOverHttps : tunnel.httpOverHttp; - } - agent = tunnelAgent(agentOptions); - this._proxyAgent = agent; - } - // if reusing agent across request and tunneling agent isn't assigned create a new agent - if (this._keepAlive && !agent) { - const options = { keepAlive: this._keepAlive, maxSockets: maxSockets }; - agent = usingSsl ? new https.Agent(options) : new http.Agent(options); - this._agent = agent; - } - // if not using private agent and tunnel agent isn't setup then use global agent - if (!agent) { - agent = usingSsl ? https.globalAgent : http.globalAgent; - } - if (usingSsl && this._ignoreSslError) { - // we don't want to set NODE_TLS_REJECT_UNAUTHORIZED=0 since that will affect request for entire process - // http.RequestOptions doesn't expose a way to modify RequestOptions.agent.options - // we have to cast it to any and change it directly - agent.options = Object.assign(agent.options || {}, { - rejectUnauthorized: false - }); - } - return agent; - } - _performExponentialBackoff(retryNumber) { - retryNumber = Math.min(ExponentialBackoffCeiling, retryNumber); - const ms = ExponentialBackoffTimeSlice * Math.pow(2, retryNumber); - return new Promise(resolve => setTimeout(() => resolve(), ms)); - } - static dateTimeDeserializer(key, value) { - if (typeof value === 'string') { - let a = new Date(value); - if (!isNaN(a.valueOf())) { - return a; - } - } - return value; - } - async _processResponse(res, options) { - return new Promise(async (resolve, reject) => { - const statusCode = res.message.statusCode; - const response = { - statusCode: statusCode, - result: null, - headers: {} - }; - // not found leads to null obj returned - if (statusCode == HttpCodes.NotFound) { - resolve(response); - } - let obj; - let contents; - // get the result from the body - try { - contents = await res.readBody(); - if (contents && contents.length > 0) { - if (options && options.deserializeDates) { - obj = JSON.parse(contents, HttpClient.dateTimeDeserializer); - } - else { - obj = JSON.parse(contents); - } - response.result = obj; - } - response.headers = res.message.headers; - } - catch (err) { - // Invalid resource (contents not json); leaving result obj null - } - // note that 3xx redirects are handled by the http layer. - if (statusCode > 299) { - let msg; - // if exception/error in body, attempt to get better error - if (obj && obj.message) { - msg = obj.message; - } - else if (contents && contents.length > 0) { - // it may be the case that the exception is in the body message as string - msg = contents; - } - else { - msg = 'Failed request: (' + statusCode + ')'; - } - let err = new HttpClientError(msg, statusCode); - err.result = response.result; - reject(err); - } - else { - resolve(response); - } - }); - } -} -exports.HttpClient = HttpClient; - - -/***/ }), - -/***/ 6443: -/***/ ((__unused_webpack_module, exports) => { - -"use strict"; - -Object.defineProperty(exports, "__esModule", ({ value: true })); -function getProxyUrl(reqUrl) { - let usingSsl = reqUrl.protocol === 'https:'; - let proxyUrl; - if (checkBypass(reqUrl)) { - return proxyUrl; - } - let proxyVar; - if (usingSsl) { - proxyVar = process.env['https_proxy'] || process.env['HTTPS_PROXY']; - } - else { - proxyVar = process.env['http_proxy'] || process.env['HTTP_PROXY']; - } - if (proxyVar) { - proxyUrl = new URL(proxyVar); - } - return proxyUrl; -} -exports.getProxyUrl = getProxyUrl; -function checkBypass(reqUrl) { - if (!reqUrl.hostname) { - return false; - } - let noProxy = process.env['no_proxy'] || process.env['NO_PROXY'] || ''; - if (!noProxy) { - return false; - } - // Determine the request port - let reqPort; - if (reqUrl.port) { - reqPort = Number(reqUrl.port); - } - else if (reqUrl.protocol === 'http:') { - reqPort = 80; - } - else if (reqUrl.protocol === 'https:') { - reqPort = 443; - } - // Format the request hostname and hostname with port - let upperReqHosts = [reqUrl.hostname.toUpperCase()]; - if (typeof reqPort === 'number') { - upperReqHosts.push(`${upperReqHosts[0]}:${reqPort}`); - } - // Compare request host against noproxy - for (let upperNoProxyItem of noProxy - .split(',') - .map(x => x.trim().toUpperCase()) - .filter(x => x)) { - if (upperReqHosts.some(x => x === upperNoProxyItem)) { - return true; - } - } - return false; -} -exports.checkBypass = checkBypass; - - /***/ }), /***/ 1962: diff --git a/dist/setup/index.js b/dist/setup/index.js index 90f833a8..899b0e2e 100644 --- a/dist/setup/index.js +++ b/dist/setup/index.js @@ -3477,6 +3477,7 @@ const file_command_1 = __nccwpck_require__(717); const utils_1 = __nccwpck_require__(5278); const os = __importStar(__nccwpck_require__(2037)); const path = __importStar(__nccwpck_require__(1017)); +const uuid_1 = __nccwpck_require__(8974); const oidc_utils_1 = __nccwpck_require__(8041); /** * The code to exit an action @@ -3506,7 +3507,14 @@ function exportVariable(name, val) { process.env[name] = convertedVal; const filePath = process.env['GITHUB_ENV'] || ''; if (filePath) { - const delimiter = '_GitHubActionsFileCommandDelimeter_'; + const delimiter = `ghadelimiter_${uuid_1.v4()}`; + // These should realistically never happen, but just in case someone finds a way to exploit uuid generation let's not allow keys or values that contain the delimiter. + if (name.includes(delimiter)) { + throw new Error(`Unexpected input: name should not contain the delimiter "${delimiter}"`); + } + if (convertedVal.includes(delimiter)) { + throw new Error(`Unexpected input: value should not contain the delimiter "${delimiter}"`); + } const commandValue = `${name}<<${delimiter}${os.EOL}${convertedVal}${os.EOL}${delimiter}`; file_command_1.issueCommand('ENV', commandValue); } @@ -3752,6 +3760,23 @@ function getIDToken(aud) { }); } exports.getIDToken = getIDToken; +/** + * Summary exports + */ +var summary_1 = __nccwpck_require__(1327); +Object.defineProperty(exports, "summary", ({ enumerable: true, get: function () { return summary_1.summary; } })); +/** + * @deprecated use core.summary + */ +var summary_2 = __nccwpck_require__(1327); +Object.defineProperty(exports, "markdownSummary", ({ enumerable: true, get: function () { return summary_2.markdownSummary; } })); +/** + * Path exports + */ +var path_utils_1 = __nccwpck_require__(2981); +Object.defineProperty(exports, "toPosixPath", ({ enumerable: true, get: function () { return path_utils_1.toPosixPath; } })); +Object.defineProperty(exports, "toWin32Path", ({ enumerable: true, get: function () { return path_utils_1.toWin32Path; } })); +Object.defineProperty(exports, "toPlatformPath", ({ enumerable: true, get: function () { return path_utils_1.toPlatformPath; } })); //# sourceMappingURL=core.js.map /***/ }), @@ -3821,8 +3846,8 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge }; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.OidcClient = void 0; -const http_client_1 = __nccwpck_require__(9925); -const auth_1 = __nccwpck_require__(3702); +const http_client_1 = __nccwpck_require__(1404); +const auth_1 = __nccwpck_require__(6758); const core_1 = __nccwpck_require__(2186); class OidcClient { static createHttpClient(allowRetry = true, maxRetry = 10) { @@ -3889,6 +3914,361 @@ exports.OidcClient = OidcClient; /***/ }), +/***/ 2981: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.toPlatformPath = exports.toWin32Path = exports.toPosixPath = void 0; +const path = __importStar(__nccwpck_require__(1017)); +/** + * toPosixPath converts the given path to the posix form. On Windows, \\ will be + * replaced with /. + * + * @param pth. Path to transform. + * @return string Posix path. + */ +function toPosixPath(pth) { + return pth.replace(/[\\]/g, '/'); +} +exports.toPosixPath = toPosixPath; +/** + * toWin32Path converts the given path to the win32 form. On Linux, / will be + * replaced with \\. + * + * @param pth. Path to transform. + * @return string Win32 path. + */ +function toWin32Path(pth) { + return pth.replace(/[/]/g, '\\'); +} +exports.toWin32Path = toWin32Path; +/** + * toPlatformPath converts the given path to a platform-specific path. It does + * this by replacing instances of / and \ with the platform-specific path + * separator. + * + * @param pth The path to platformize. + * @return string The platform-specific path. + */ +function toPlatformPath(pth) { + return pth.replace(/[/\\]/g, path.sep); +} +exports.toPlatformPath = toPlatformPath; +//# sourceMappingURL=path-utils.js.map + +/***/ }), + +/***/ 1327: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.summary = exports.markdownSummary = exports.SUMMARY_DOCS_URL = exports.SUMMARY_ENV_VAR = void 0; +const os_1 = __nccwpck_require__(2037); +const fs_1 = __nccwpck_require__(7147); +const { access, appendFile, writeFile } = fs_1.promises; +exports.SUMMARY_ENV_VAR = 'GITHUB_STEP_SUMMARY'; +exports.SUMMARY_DOCS_URL = 'https://docs.github.com/actions/using-workflows/workflow-commands-for-github-actions#adding-a-job-summary'; +class Summary { + constructor() { + this._buffer = ''; + } + /** + * Finds the summary file path from the environment, rejects if env var is not found or file does not exist + * Also checks r/w permissions. + * + * @returns step summary file path + */ + filePath() { + return __awaiter(this, void 0, void 0, function* () { + if (this._filePath) { + return this._filePath; + } + const pathFromEnv = process.env[exports.SUMMARY_ENV_VAR]; + if (!pathFromEnv) { + throw new Error(`Unable to find environment variable for $${exports.SUMMARY_ENV_VAR}. Check if your runtime environment supports job summaries.`); + } + try { + yield access(pathFromEnv, fs_1.constants.R_OK | fs_1.constants.W_OK); + } + catch (_a) { + throw new Error(`Unable to access summary file: '${pathFromEnv}'. Check if the file has correct read/write permissions.`); + } + this._filePath = pathFromEnv; + return this._filePath; + }); + } + /** + * Wraps content in an HTML tag, adding any HTML attributes + * + * @param {string} tag HTML tag to wrap + * @param {string | null} content content within the tag + * @param {[attribute: string]: string} attrs key-value list of HTML attributes to add + * + * @returns {string} content wrapped in HTML element + */ + wrap(tag, content, attrs = {}) { + const htmlAttrs = Object.entries(attrs) + .map(([key, value]) => ` ${key}="${value}"`) + .join(''); + if (!content) { + return `<${tag}${htmlAttrs}>`; + } + return `<${tag}${htmlAttrs}>${content}`; + } + /** + * Writes text in the buffer to the summary buffer file and empties buffer. Will append by default. + * + * @param {SummaryWriteOptions} [options] (optional) options for write operation + * + * @returns {Promise} summary instance + */ + write(options) { + return __awaiter(this, void 0, void 0, function* () { + const overwrite = !!(options === null || options === void 0 ? void 0 : options.overwrite); + const filePath = yield this.filePath(); + const writeFunc = overwrite ? writeFile : appendFile; + yield writeFunc(filePath, this._buffer, { encoding: 'utf8' }); + return this.emptyBuffer(); + }); + } + /** + * Clears the summary buffer and wipes the summary file + * + * @returns {Summary} summary instance + */ + clear() { + return __awaiter(this, void 0, void 0, function* () { + return this.emptyBuffer().write({ overwrite: true }); + }); + } + /** + * Returns the current summary buffer as a string + * + * @returns {string} string of summary buffer + */ + stringify() { + return this._buffer; + } + /** + * If the summary buffer is empty + * + * @returns {boolen} true if the buffer is empty + */ + isEmptyBuffer() { + return this._buffer.length === 0; + } + /** + * Resets the summary buffer without writing to summary file + * + * @returns {Summary} summary instance + */ + emptyBuffer() { + this._buffer = ''; + return this; + } + /** + * Adds raw text to the summary buffer + * + * @param {string} text content to add + * @param {boolean} [addEOL=false] (optional) append an EOL to the raw text (default: false) + * + * @returns {Summary} summary instance + */ + addRaw(text, addEOL = false) { + this._buffer += text; + return addEOL ? this.addEOL() : this; + } + /** + * Adds the operating system-specific end-of-line marker to the buffer + * + * @returns {Summary} summary instance + */ + addEOL() { + return this.addRaw(os_1.EOL); + } + /** + * Adds an HTML codeblock to the summary buffer + * + * @param {string} code content to render within fenced code block + * @param {string} lang (optional) language to syntax highlight code + * + * @returns {Summary} summary instance + */ + addCodeBlock(code, lang) { + const attrs = Object.assign({}, (lang && { lang })); + const element = this.wrap('pre', this.wrap('code', code), attrs); + return this.addRaw(element).addEOL(); + } + /** + * Adds an HTML list to the summary buffer + * + * @param {string[]} items list of items to render + * @param {boolean} [ordered=false] (optional) if the rendered list should be ordered or not (default: false) + * + * @returns {Summary} summary instance + */ + addList(items, ordered = false) { + const tag = ordered ? 'ol' : 'ul'; + const listItems = items.map(item => this.wrap('li', item)).join(''); + const element = this.wrap(tag, listItems); + return this.addRaw(element).addEOL(); + } + /** + * Adds an HTML table to the summary buffer + * + * @param {SummaryTableCell[]} rows table rows + * + * @returns {Summary} summary instance + */ + addTable(rows) { + const tableBody = rows + .map(row => { + const cells = row + .map(cell => { + if (typeof cell === 'string') { + return this.wrap('td', cell); + } + const { header, data, colspan, rowspan } = cell; + const tag = header ? 'th' : 'td'; + const attrs = Object.assign(Object.assign({}, (colspan && { colspan })), (rowspan && { rowspan })); + return this.wrap(tag, data, attrs); + }) + .join(''); + return this.wrap('tr', cells); + }) + .join(''); + const element = this.wrap('table', tableBody); + return this.addRaw(element).addEOL(); + } + /** + * Adds a collapsable HTML details element to the summary buffer + * + * @param {string} label text for the closed state + * @param {string} content collapsable content + * + * @returns {Summary} summary instance + */ + addDetails(label, content) { + const element = this.wrap('details', this.wrap('summary', label) + content); + return this.addRaw(element).addEOL(); + } + /** + * Adds an HTML image tag to the summary buffer + * + * @param {string} src path to the image you to embed + * @param {string} alt text description of the image + * @param {SummaryImageOptions} options (optional) addition image attributes + * + * @returns {Summary} summary instance + */ + addImage(src, alt, options) { + const { width, height } = options || {}; + const attrs = Object.assign(Object.assign({}, (width && { width })), (height && { height })); + const element = this.wrap('img', null, Object.assign({ src, alt }, attrs)); + return this.addRaw(element).addEOL(); + } + /** + * Adds an HTML section heading element + * + * @param {string} text heading text + * @param {number | string} [level=1] (optional) the heading level, default: 1 + * + * @returns {Summary} summary instance + */ + addHeading(text, level) { + const tag = `h${level}`; + const allowedTag = ['h1', 'h2', 'h3', 'h4', 'h5', 'h6'].includes(tag) + ? tag + : 'h1'; + const element = this.wrap(allowedTag, text); + return this.addRaw(element).addEOL(); + } + /** + * Adds an HTML thematic break (
) to the summary buffer + * + * @returns {Summary} summary instance + */ + addSeparator() { + const element = this.wrap('hr', null); + return this.addRaw(element).addEOL(); + } + /** + * Adds an HTML line break (
) to the summary buffer + * + * @returns {Summary} summary instance + */ + addBreak() { + const element = this.wrap('br', null); + return this.addRaw(element).addEOL(); + } + /** + * Adds an HTML blockquote to the summary buffer + * + * @param {string} text quote text + * @param {string} cite (optional) citation url + * + * @returns {Summary} summary instance + */ + addQuote(text, cite) { + const attrs = Object.assign({}, (cite && { cite })); + const element = this.wrap('blockquote', text, attrs); + return this.addRaw(element).addEOL(); + } + /** + * Adds an HTML anchor tag to the summary buffer + * + * @param {string} text link text/content + * @param {string} href hyperlink + * + * @returns {Summary} summary instance + */ + addLink(text, href) { + const element = this.wrap('a', text, { href }); + return this.addRaw(element).addEOL(); + } +} +const _summary = new Summary(); +/** + * @deprecated use `core.summary` + */ +exports.markdownSummary = _summary; +exports.summary = _summary; +//# sourceMappingURL=summary.js.map + +/***/ }), + /***/ 5278: /***/ ((__unused_webpack_module, exports) => { @@ -3936,6 +4316,1420 @@ exports.toCommandProperties = toCommandProperties; /***/ }), +/***/ 6758: +/***/ (function(__unused_webpack_module, exports) { + +"use strict"; + +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.PersonalAccessTokenCredentialHandler = exports.BearerCredentialHandler = exports.BasicCredentialHandler = void 0; +class BasicCredentialHandler { + constructor(username, password) { + this.username = username; + this.password = password; + } + prepareRequest(options) { + if (!options.headers) { + throw Error('The request has no headers'); + } + options.headers['Authorization'] = `Basic ${Buffer.from(`${this.username}:${this.password}`).toString('base64')}`; + } + // This handler cannot handle 401 + canHandleAuthentication() { + return false; + } + handleAuthentication() { + return __awaiter(this, void 0, void 0, function* () { + throw new Error('not implemented'); + }); + } +} +exports.BasicCredentialHandler = BasicCredentialHandler; +class BearerCredentialHandler { + constructor(token) { + this.token = token; + } + // currently implements pre-authorization + // TODO: support preAuth = false where it hooks on 401 + prepareRequest(options) { + if (!options.headers) { + throw Error('The request has no headers'); + } + options.headers['Authorization'] = `Bearer ${this.token}`; + } + // This handler cannot handle 401 + canHandleAuthentication() { + return false; + } + handleAuthentication() { + return __awaiter(this, void 0, void 0, function* () { + throw new Error('not implemented'); + }); + } +} +exports.BearerCredentialHandler = BearerCredentialHandler; +class PersonalAccessTokenCredentialHandler { + constructor(token) { + this.token = token; + } + // currently implements pre-authorization + // TODO: support preAuth = false where it hooks on 401 + prepareRequest(options) { + if (!options.headers) { + throw Error('The request has no headers'); + } + options.headers['Authorization'] = `Basic ${Buffer.from(`PAT:${this.token}`).toString('base64')}`; + } + // This handler cannot handle 401 + canHandleAuthentication() { + return false; + } + handleAuthentication() { + return __awaiter(this, void 0, void 0, function* () { + throw new Error('not implemented'); + }); + } +} +exports.PersonalAccessTokenCredentialHandler = PersonalAccessTokenCredentialHandler; +//# sourceMappingURL=auth.js.map + +/***/ }), + +/***/ 1404: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +/* eslint-disable @typescript-eslint/no-explicit-any */ +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.HttpClient = exports.isHttps = exports.HttpClientResponse = exports.HttpClientError = exports.getProxyUrl = exports.MediaTypes = exports.Headers = exports.HttpCodes = void 0; +const http = __importStar(__nccwpck_require__(3685)); +const https = __importStar(__nccwpck_require__(5687)); +const pm = __importStar(__nccwpck_require__(2843)); +const tunnel = __importStar(__nccwpck_require__(4294)); +var HttpCodes; +(function (HttpCodes) { + HttpCodes[HttpCodes["OK"] = 200] = "OK"; + HttpCodes[HttpCodes["MultipleChoices"] = 300] = "MultipleChoices"; + HttpCodes[HttpCodes["MovedPermanently"] = 301] = "MovedPermanently"; + HttpCodes[HttpCodes["ResourceMoved"] = 302] = "ResourceMoved"; + HttpCodes[HttpCodes["SeeOther"] = 303] = "SeeOther"; + HttpCodes[HttpCodes["NotModified"] = 304] = "NotModified"; + HttpCodes[HttpCodes["UseProxy"] = 305] = "UseProxy"; + HttpCodes[HttpCodes["SwitchProxy"] = 306] = "SwitchProxy"; + HttpCodes[HttpCodes["TemporaryRedirect"] = 307] = "TemporaryRedirect"; + HttpCodes[HttpCodes["PermanentRedirect"] = 308] = "PermanentRedirect"; + HttpCodes[HttpCodes["BadRequest"] = 400] = "BadRequest"; + HttpCodes[HttpCodes["Unauthorized"] = 401] = "Unauthorized"; + HttpCodes[HttpCodes["PaymentRequired"] = 402] = "PaymentRequired"; + HttpCodes[HttpCodes["Forbidden"] = 403] = "Forbidden"; + HttpCodes[HttpCodes["NotFound"] = 404] = "NotFound"; + HttpCodes[HttpCodes["MethodNotAllowed"] = 405] = "MethodNotAllowed"; + HttpCodes[HttpCodes["NotAcceptable"] = 406] = "NotAcceptable"; + HttpCodes[HttpCodes["ProxyAuthenticationRequired"] = 407] = "ProxyAuthenticationRequired"; + HttpCodes[HttpCodes["RequestTimeout"] = 408] = "RequestTimeout"; + HttpCodes[HttpCodes["Conflict"] = 409] = "Conflict"; + HttpCodes[HttpCodes["Gone"] = 410] = "Gone"; + HttpCodes[HttpCodes["TooManyRequests"] = 429] = "TooManyRequests"; + HttpCodes[HttpCodes["InternalServerError"] = 500] = "InternalServerError"; + HttpCodes[HttpCodes["NotImplemented"] = 501] = "NotImplemented"; + HttpCodes[HttpCodes["BadGateway"] = 502] = "BadGateway"; + HttpCodes[HttpCodes["ServiceUnavailable"] = 503] = "ServiceUnavailable"; + HttpCodes[HttpCodes["GatewayTimeout"] = 504] = "GatewayTimeout"; +})(HttpCodes = exports.HttpCodes || (exports.HttpCodes = {})); +var Headers; +(function (Headers) { + Headers["Accept"] = "accept"; + Headers["ContentType"] = "content-type"; +})(Headers = exports.Headers || (exports.Headers = {})); +var MediaTypes; +(function (MediaTypes) { + MediaTypes["ApplicationJson"] = "application/json"; +})(MediaTypes = exports.MediaTypes || (exports.MediaTypes = {})); +/** + * Returns the proxy URL, depending upon the supplied url and proxy environment variables. + * @param serverUrl The server URL where the request will be sent. For example, https://api.github.com + */ +function getProxyUrl(serverUrl) { + const proxyUrl = pm.getProxyUrl(new URL(serverUrl)); + return proxyUrl ? proxyUrl.href : ''; +} +exports.getProxyUrl = getProxyUrl; +const HttpRedirectCodes = [ + HttpCodes.MovedPermanently, + HttpCodes.ResourceMoved, + HttpCodes.SeeOther, + HttpCodes.TemporaryRedirect, + HttpCodes.PermanentRedirect +]; +const HttpResponseRetryCodes = [ + HttpCodes.BadGateway, + HttpCodes.ServiceUnavailable, + HttpCodes.GatewayTimeout +]; +const RetryableHttpVerbs = ['OPTIONS', 'GET', 'DELETE', 'HEAD']; +const ExponentialBackoffCeiling = 10; +const ExponentialBackoffTimeSlice = 5; +class HttpClientError extends Error { + constructor(message, statusCode) { + super(message); + this.name = 'HttpClientError'; + this.statusCode = statusCode; + Object.setPrototypeOf(this, HttpClientError.prototype); + } +} +exports.HttpClientError = HttpClientError; +class HttpClientResponse { + constructor(message) { + this.message = message; + } + readBody() { + return __awaiter(this, void 0, void 0, function* () { + return new Promise((resolve) => __awaiter(this, void 0, void 0, function* () { + let output = Buffer.alloc(0); + this.message.on('data', (chunk) => { + output = Buffer.concat([output, chunk]); + }); + this.message.on('end', () => { + resolve(output.toString()); + }); + })); + }); + } +} +exports.HttpClientResponse = HttpClientResponse; +function isHttps(requestUrl) { + const parsedUrl = new URL(requestUrl); + return parsedUrl.protocol === 'https:'; +} +exports.isHttps = isHttps; +class HttpClient { + constructor(userAgent, handlers, requestOptions) { + this._ignoreSslError = false; + this._allowRedirects = true; + this._allowRedirectDowngrade = false; + this._maxRedirects = 50; + this._allowRetries = false; + this._maxRetries = 1; + this._keepAlive = false; + this._disposed = false; + this.userAgent = userAgent; + this.handlers = handlers || []; + this.requestOptions = requestOptions; + if (requestOptions) { + if (requestOptions.ignoreSslError != null) { + this._ignoreSslError = requestOptions.ignoreSslError; + } + this._socketTimeout = requestOptions.socketTimeout; + if (requestOptions.allowRedirects != null) { + this._allowRedirects = requestOptions.allowRedirects; + } + if (requestOptions.allowRedirectDowngrade != null) { + this._allowRedirectDowngrade = requestOptions.allowRedirectDowngrade; + } + if (requestOptions.maxRedirects != null) { + this._maxRedirects = Math.max(requestOptions.maxRedirects, 0); + } + if (requestOptions.keepAlive != null) { + this._keepAlive = requestOptions.keepAlive; + } + if (requestOptions.allowRetries != null) { + this._allowRetries = requestOptions.allowRetries; + } + if (requestOptions.maxRetries != null) { + this._maxRetries = requestOptions.maxRetries; + } + } + } + options(requestUrl, additionalHeaders) { + return __awaiter(this, void 0, void 0, function* () { + return this.request('OPTIONS', requestUrl, null, additionalHeaders || {}); + }); + } + get(requestUrl, additionalHeaders) { + return __awaiter(this, void 0, void 0, function* () { + return this.request('GET', requestUrl, null, additionalHeaders || {}); + }); + } + del(requestUrl, additionalHeaders) { + return __awaiter(this, void 0, void 0, function* () { + return this.request('DELETE', requestUrl, null, additionalHeaders || {}); + }); + } + post(requestUrl, data, additionalHeaders) { + return __awaiter(this, void 0, void 0, function* () { + return this.request('POST', requestUrl, data, additionalHeaders || {}); + }); + } + patch(requestUrl, data, additionalHeaders) { + return __awaiter(this, void 0, void 0, function* () { + return this.request('PATCH', requestUrl, data, additionalHeaders || {}); + }); + } + put(requestUrl, data, additionalHeaders) { + return __awaiter(this, void 0, void 0, function* () { + return this.request('PUT', requestUrl, data, additionalHeaders || {}); + }); + } + head(requestUrl, additionalHeaders) { + return __awaiter(this, void 0, void 0, function* () { + return this.request('HEAD', requestUrl, null, additionalHeaders || {}); + }); + } + sendStream(verb, requestUrl, stream, additionalHeaders) { + return __awaiter(this, void 0, void 0, function* () { + return this.request(verb, requestUrl, stream, additionalHeaders); + }); + } + /** + * Gets a typed object from an endpoint + * Be aware that not found returns a null. Other errors (4xx, 5xx) reject the promise + */ + getJson(requestUrl, additionalHeaders = {}) { + return __awaiter(this, void 0, void 0, function* () { + additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson); + const res = yield this.get(requestUrl, additionalHeaders); + return this._processResponse(res, this.requestOptions); + }); + } + postJson(requestUrl, obj, additionalHeaders = {}) { + return __awaiter(this, void 0, void 0, function* () { + const data = JSON.stringify(obj, null, 2); + additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson); + additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson); + const res = yield this.post(requestUrl, data, additionalHeaders); + return this._processResponse(res, this.requestOptions); + }); + } + putJson(requestUrl, obj, additionalHeaders = {}) { + return __awaiter(this, void 0, void 0, function* () { + const data = JSON.stringify(obj, null, 2); + additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson); + additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson); + const res = yield this.put(requestUrl, data, additionalHeaders); + return this._processResponse(res, this.requestOptions); + }); + } + patchJson(requestUrl, obj, additionalHeaders = {}) { + return __awaiter(this, void 0, void 0, function* () { + const data = JSON.stringify(obj, null, 2); + additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson); + additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson); + const res = yield this.patch(requestUrl, data, additionalHeaders); + return this._processResponse(res, this.requestOptions); + }); + } + /** + * Makes a raw http request. + * All other methods such as get, post, patch, and request ultimately call this. + * Prefer get, del, post and patch + */ + request(verb, requestUrl, data, headers) { + return __awaiter(this, void 0, void 0, function* () { + if (this._disposed) { + throw new Error('Client has already been disposed.'); + } + const parsedUrl = new URL(requestUrl); + let info = this._prepareRequest(verb, parsedUrl, headers); + // Only perform retries on reads since writes may not be idempotent. + const maxTries = this._allowRetries && RetryableHttpVerbs.includes(verb) + ? this._maxRetries + 1 + : 1; + let numTries = 0; + let response; + do { + response = yield this.requestRaw(info, data); + // Check if it's an authentication challenge + if (response && + response.message && + response.message.statusCode === HttpCodes.Unauthorized) { + let authenticationHandler; + for (const handler of this.handlers) { + if (handler.canHandleAuthentication(response)) { + authenticationHandler = handler; + break; + } + } + if (authenticationHandler) { + return authenticationHandler.handleAuthentication(this, info, data); + } + else { + // We have received an unauthorized response but have no handlers to handle it. + // Let the response return to the caller. + return response; + } + } + let redirectsRemaining = this._maxRedirects; + while (response.message.statusCode && + HttpRedirectCodes.includes(response.message.statusCode) && + this._allowRedirects && + redirectsRemaining > 0) { + const redirectUrl = response.message.headers['location']; + if (!redirectUrl) { + // if there's no location to redirect to, we won't + break; + } + const parsedRedirectUrl = new URL(redirectUrl); + if (parsedUrl.protocol === 'https:' && + parsedUrl.protocol !== parsedRedirectUrl.protocol && + !this._allowRedirectDowngrade) { + throw new Error('Redirect from HTTPS to HTTP protocol. This downgrade is not allowed for security reasons. If you want to allow this behavior, set the allowRedirectDowngrade option to true.'); + } + // we need to finish reading the response before reassigning response + // which will leak the open socket. + yield response.readBody(); + // strip authorization header if redirected to a different hostname + if (parsedRedirectUrl.hostname !== parsedUrl.hostname) { + for (const header in headers) { + // header names are case insensitive + if (header.toLowerCase() === 'authorization') { + delete headers[header]; + } + } + } + // let's make the request with the new redirectUrl + info = this._prepareRequest(verb, parsedRedirectUrl, headers); + response = yield this.requestRaw(info, data); + redirectsRemaining--; + } + if (!response.message.statusCode || + !HttpResponseRetryCodes.includes(response.message.statusCode)) { + // If not a retry code, return immediately instead of retrying + return response; + } + numTries += 1; + if (numTries < maxTries) { + yield response.readBody(); + yield this._performExponentialBackoff(numTries); + } + } while (numTries < maxTries); + return response; + }); + } + /** + * Needs to be called if keepAlive is set to true in request options. + */ + dispose() { + if (this._agent) { + this._agent.destroy(); + } + this._disposed = true; + } + /** + * Raw request. + * @param info + * @param data + */ + requestRaw(info, data) { + return __awaiter(this, void 0, void 0, function* () { + return new Promise((resolve, reject) => { + function callbackForResult(err, res) { + if (err) { + reject(err); + } + else if (!res) { + // If `err` is not passed, then `res` must be passed. + reject(new Error('Unknown error')); + } + else { + resolve(res); + } + } + this.requestRawWithCallback(info, data, callbackForResult); + }); + }); + } + /** + * Raw request with callback. + * @param info + * @param data + * @param onResult + */ + requestRawWithCallback(info, data, onResult) { + if (typeof data === 'string') { + if (!info.options.headers) { + info.options.headers = {}; + } + info.options.headers['Content-Length'] = Buffer.byteLength(data, 'utf8'); + } + let callbackCalled = false; + function handleResult(err, res) { + if (!callbackCalled) { + callbackCalled = true; + onResult(err, res); + } + } + const req = info.httpModule.request(info.options, (msg) => { + const res = new HttpClientResponse(msg); + handleResult(undefined, res); + }); + let socket; + req.on('socket', sock => { + socket = sock; + }); + // If we ever get disconnected, we want the socket to timeout eventually + req.setTimeout(this._socketTimeout || 3 * 60000, () => { + if (socket) { + socket.end(); + } + handleResult(new Error(`Request timeout: ${info.options.path}`)); + }); + req.on('error', function (err) { + // err has statusCode property + // res should have headers + handleResult(err); + }); + if (data && typeof data === 'string') { + req.write(data, 'utf8'); + } + if (data && typeof data !== 'string') { + data.on('close', function () { + req.end(); + }); + data.pipe(req); + } + else { + req.end(); + } + } + /** + * Gets an http agent. This function is useful when you need an http agent that handles + * routing through a proxy server - depending upon the url and proxy environment variables. + * @param serverUrl The server URL where the request will be sent. For example, https://api.github.com + */ + getAgent(serverUrl) { + const parsedUrl = new URL(serverUrl); + return this._getAgent(parsedUrl); + } + _prepareRequest(method, requestUrl, headers) { + const info = {}; + info.parsedUrl = requestUrl; + const usingSsl = info.parsedUrl.protocol === 'https:'; + info.httpModule = usingSsl ? https : http; + const defaultPort = usingSsl ? 443 : 80; + info.options = {}; + info.options.host = info.parsedUrl.hostname; + info.options.port = info.parsedUrl.port + ? parseInt(info.parsedUrl.port) + : defaultPort; + info.options.path = + (info.parsedUrl.pathname || '') + (info.parsedUrl.search || ''); + info.options.method = method; + info.options.headers = this._mergeHeaders(headers); + if (this.userAgent != null) { + info.options.headers['user-agent'] = this.userAgent; + } + info.options.agent = this._getAgent(info.parsedUrl); + // gives handlers an opportunity to participate + if (this.handlers) { + for (const handler of this.handlers) { + handler.prepareRequest(info.options); + } + } + return info; + } + _mergeHeaders(headers) { + if (this.requestOptions && this.requestOptions.headers) { + return Object.assign({}, lowercaseKeys(this.requestOptions.headers), lowercaseKeys(headers || {})); + } + return lowercaseKeys(headers || {}); + } + _getExistingOrDefaultHeader(additionalHeaders, header, _default) { + let clientHeader; + if (this.requestOptions && this.requestOptions.headers) { + clientHeader = lowercaseKeys(this.requestOptions.headers)[header]; + } + return additionalHeaders[header] || clientHeader || _default; + } + _getAgent(parsedUrl) { + let agent; + const proxyUrl = pm.getProxyUrl(parsedUrl); + const useProxy = proxyUrl && proxyUrl.hostname; + if (this._keepAlive && useProxy) { + agent = this._proxyAgent; + } + if (this._keepAlive && !useProxy) { + agent = this._agent; + } + // if agent is already assigned use that agent. + if (agent) { + return agent; + } + const usingSsl = parsedUrl.protocol === 'https:'; + let maxSockets = 100; + if (this.requestOptions) { + maxSockets = this.requestOptions.maxSockets || http.globalAgent.maxSockets; + } + // This is `useProxy` again, but we need to check `proxyURl` directly for TypeScripts's flow analysis. + if (proxyUrl && proxyUrl.hostname) { + const agentOptions = { + maxSockets, + keepAlive: this._keepAlive, + proxy: Object.assign(Object.assign({}, ((proxyUrl.username || proxyUrl.password) && { + proxyAuth: `${proxyUrl.username}:${proxyUrl.password}` + })), { host: proxyUrl.hostname, port: proxyUrl.port }) + }; + let tunnelAgent; + const overHttps = proxyUrl.protocol === 'https:'; + if (usingSsl) { + tunnelAgent = overHttps ? tunnel.httpsOverHttps : tunnel.httpsOverHttp; + } + else { + tunnelAgent = overHttps ? tunnel.httpOverHttps : tunnel.httpOverHttp; + } + agent = tunnelAgent(agentOptions); + this._proxyAgent = agent; + } + // if reusing agent across request and tunneling agent isn't assigned create a new agent + if (this._keepAlive && !agent) { + const options = { keepAlive: this._keepAlive, maxSockets }; + agent = usingSsl ? new https.Agent(options) : new http.Agent(options); + this._agent = agent; + } + // if not using private agent and tunnel agent isn't setup then use global agent + if (!agent) { + agent = usingSsl ? https.globalAgent : http.globalAgent; + } + if (usingSsl && this._ignoreSslError) { + // we don't want to set NODE_TLS_REJECT_UNAUTHORIZED=0 since that will affect request for entire process + // http.RequestOptions doesn't expose a way to modify RequestOptions.agent.options + // we have to cast it to any and change it directly + agent.options = Object.assign(agent.options || {}, { + rejectUnauthorized: false + }); + } + return agent; + } + _performExponentialBackoff(retryNumber) { + return __awaiter(this, void 0, void 0, function* () { + retryNumber = Math.min(ExponentialBackoffCeiling, retryNumber); + const ms = ExponentialBackoffTimeSlice * Math.pow(2, retryNumber); + return new Promise(resolve => setTimeout(() => resolve(), ms)); + }); + } + _processResponse(res, options) { + return __awaiter(this, void 0, void 0, function* () { + return new Promise((resolve, reject) => __awaiter(this, void 0, void 0, function* () { + const statusCode = res.message.statusCode || 0; + const response = { + statusCode, + result: null, + headers: {} + }; + // not found leads to null obj returned + if (statusCode === HttpCodes.NotFound) { + resolve(response); + } + // get the result from the body + function dateTimeDeserializer(key, value) { + if (typeof value === 'string') { + const a = new Date(value); + if (!isNaN(a.valueOf())) { + return a; + } + } + return value; + } + let obj; + let contents; + try { + contents = yield res.readBody(); + if (contents && contents.length > 0) { + if (options && options.deserializeDates) { + obj = JSON.parse(contents, dateTimeDeserializer); + } + else { + obj = JSON.parse(contents); + } + response.result = obj; + } + response.headers = res.message.headers; + } + catch (err) { + // Invalid resource (contents not json); leaving result obj null + } + // note that 3xx redirects are handled by the http layer. + if (statusCode > 299) { + let msg; + // if exception/error in body, attempt to get better error + if (obj && obj.message) { + msg = obj.message; + } + else if (contents && contents.length > 0) { + // it may be the case that the exception is in the body message as string + msg = contents; + } + else { + msg = `Failed request: (${statusCode})`; + } + const err = new HttpClientError(msg, statusCode); + err.result = response.result; + reject(err); + } + else { + resolve(response); + } + })); + }); + } +} +exports.HttpClient = HttpClient; +const lowercaseKeys = (obj) => Object.keys(obj).reduce((c, k) => ((c[k.toLowerCase()] = obj[k]), c), {}); +//# sourceMappingURL=index.js.map + +/***/ }), + +/***/ 2843: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.checkBypass = exports.getProxyUrl = void 0; +function getProxyUrl(reqUrl) { + const usingSsl = reqUrl.protocol === 'https:'; + if (checkBypass(reqUrl)) { + return undefined; + } + const proxyVar = (() => { + if (usingSsl) { + return process.env['https_proxy'] || process.env['HTTPS_PROXY']; + } + else { + return process.env['http_proxy'] || process.env['HTTP_PROXY']; + } + })(); + if (proxyVar) { + return new URL(proxyVar); + } + else { + return undefined; + } +} +exports.getProxyUrl = getProxyUrl; +function checkBypass(reqUrl) { + if (!reqUrl.hostname) { + return false; + } + const noProxy = process.env['no_proxy'] || process.env['NO_PROXY'] || ''; + if (!noProxy) { + return false; + } + // Determine the request port + let reqPort; + if (reqUrl.port) { + reqPort = Number(reqUrl.port); + } + else if (reqUrl.protocol === 'http:') { + reqPort = 80; + } + else if (reqUrl.protocol === 'https:') { + reqPort = 443; + } + // Format the request hostname and hostname with port + const upperReqHosts = [reqUrl.hostname.toUpperCase()]; + if (typeof reqPort === 'number') { + upperReqHosts.push(`${upperReqHosts[0]}:${reqPort}`); + } + // Compare request host against noproxy + for (const upperNoProxyItem of noProxy + .split(',') + .map(x => x.trim().toUpperCase()) + .filter(x => x)) { + if (upperReqHosts.some(x => x === upperNoProxyItem)) { + return true; + } + } + return false; +} +exports.checkBypass = checkBypass; +//# sourceMappingURL=proxy.js.map + +/***/ }), + +/***/ 8974: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +Object.defineProperty(exports, "v1", ({ + enumerable: true, + get: function () { + return _v.default; + } +})); +Object.defineProperty(exports, "v3", ({ + enumerable: true, + get: function () { + return _v2.default; + } +})); +Object.defineProperty(exports, "v4", ({ + enumerable: true, + get: function () { + return _v3.default; + } +})); +Object.defineProperty(exports, "v5", ({ + enumerable: true, + get: function () { + return _v4.default; + } +})); +Object.defineProperty(exports, "NIL", ({ + enumerable: true, + get: function () { + return _nil.default; + } +})); +Object.defineProperty(exports, "version", ({ + enumerable: true, + get: function () { + return _version.default; + } +})); +Object.defineProperty(exports, "validate", ({ + enumerable: true, + get: function () { + return _validate.default; + } +})); +Object.defineProperty(exports, "stringify", ({ + enumerable: true, + get: function () { + return _stringify.default; + } +})); +Object.defineProperty(exports, "parse", ({ + enumerable: true, + get: function () { + return _parse.default; + } +})); + +var _v = _interopRequireDefault(__nccwpck_require__(1595)); + +var _v2 = _interopRequireDefault(__nccwpck_require__(6993)); + +var _v3 = _interopRequireDefault(__nccwpck_require__(1472)); + +var _v4 = _interopRequireDefault(__nccwpck_require__(6217)); + +var _nil = _interopRequireDefault(__nccwpck_require__(2381)); + +var _version = _interopRequireDefault(__nccwpck_require__(427)); + +var _validate = _interopRequireDefault(__nccwpck_require__(2609)); + +var _stringify = _interopRequireDefault(__nccwpck_require__(1458)); + +var _parse = _interopRequireDefault(__nccwpck_require__(6385)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/***/ }), + +/***/ 5842: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _crypto = _interopRequireDefault(__nccwpck_require__(6113)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function md5(bytes) { + if (Array.isArray(bytes)) { + bytes = Buffer.from(bytes); + } else if (typeof bytes === 'string') { + bytes = Buffer.from(bytes, 'utf8'); + } + + return _crypto.default.createHash('md5').update(bytes).digest(); +} + +var _default = md5; +exports["default"] = _default; + +/***/ }), + +/***/ 2381: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; +var _default = '00000000-0000-0000-0000-000000000000'; +exports["default"] = _default; + +/***/ }), + +/***/ 6385: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _validate = _interopRequireDefault(__nccwpck_require__(2609)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function parse(uuid) { + if (!(0, _validate.default)(uuid)) { + throw TypeError('Invalid UUID'); + } + + let v; + const arr = new Uint8Array(16); // Parse ########-....-....-....-............ + + arr[0] = (v = parseInt(uuid.slice(0, 8), 16)) >>> 24; + arr[1] = v >>> 16 & 0xff; + arr[2] = v >>> 8 & 0xff; + arr[3] = v & 0xff; // Parse ........-####-....-....-............ + + arr[4] = (v = parseInt(uuid.slice(9, 13), 16)) >>> 8; + arr[5] = v & 0xff; // Parse ........-....-####-....-............ + + arr[6] = (v = parseInt(uuid.slice(14, 18), 16)) >>> 8; + arr[7] = v & 0xff; // Parse ........-....-....-####-............ + + arr[8] = (v = parseInt(uuid.slice(19, 23), 16)) >>> 8; + arr[9] = v & 0xff; // Parse ........-....-....-....-############ + // (Use "/" to avoid 32-bit truncation when bit-shifting high-order bytes) + + arr[10] = (v = parseInt(uuid.slice(24, 36), 16)) / 0x10000000000 & 0xff; + arr[11] = v / 0x100000000 & 0xff; + arr[12] = v >>> 24 & 0xff; + arr[13] = v >>> 16 & 0xff; + arr[14] = v >>> 8 & 0xff; + arr[15] = v & 0xff; + return arr; +} + +var _default = parse; +exports["default"] = _default; + +/***/ }), + +/***/ 6230: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; +var _default = /^(?:[0-9a-f]{8}-[0-9a-f]{4}-[1-5][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}|00000000-0000-0000-0000-000000000000)$/i; +exports["default"] = _default; + +/***/ }), + +/***/ 9784: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = rng; + +var _crypto = _interopRequireDefault(__nccwpck_require__(6113)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +const rnds8Pool = new Uint8Array(256); // # of random values to pre-allocate + +let poolPtr = rnds8Pool.length; + +function rng() { + if (poolPtr > rnds8Pool.length - 16) { + _crypto.default.randomFillSync(rnds8Pool); + + poolPtr = 0; + } + + return rnds8Pool.slice(poolPtr, poolPtr += 16); +} + +/***/ }), + +/***/ 8844: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _crypto = _interopRequireDefault(__nccwpck_require__(6113)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function sha1(bytes) { + if (Array.isArray(bytes)) { + bytes = Buffer.from(bytes); + } else if (typeof bytes === 'string') { + bytes = Buffer.from(bytes, 'utf8'); + } + + return _crypto.default.createHash('sha1').update(bytes).digest(); +} + +var _default = sha1; +exports["default"] = _default; + +/***/ }), + +/***/ 1458: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _validate = _interopRequireDefault(__nccwpck_require__(2609)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * Convert array of 16 byte values to UUID string format of the form: + * XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX + */ +const byteToHex = []; + +for (let i = 0; i < 256; ++i) { + byteToHex.push((i + 0x100).toString(16).substr(1)); +} + +function stringify(arr, offset = 0) { + // Note: Be careful editing this code! It's been tuned for performance + // and works in ways you may not expect. See https://github.com/uuidjs/uuid/pull/434 + const uuid = (byteToHex[arr[offset + 0]] + byteToHex[arr[offset + 1]] + byteToHex[arr[offset + 2]] + byteToHex[arr[offset + 3]] + '-' + byteToHex[arr[offset + 4]] + byteToHex[arr[offset + 5]] + '-' + byteToHex[arr[offset + 6]] + byteToHex[arr[offset + 7]] + '-' + byteToHex[arr[offset + 8]] + byteToHex[arr[offset + 9]] + '-' + byteToHex[arr[offset + 10]] + byteToHex[arr[offset + 11]] + byteToHex[arr[offset + 12]] + byteToHex[arr[offset + 13]] + byteToHex[arr[offset + 14]] + byteToHex[arr[offset + 15]]).toLowerCase(); // Consistency check for valid UUID. If this throws, it's likely due to one + // of the following: + // - One or more input array values don't map to a hex octet (leading to + // "undefined" in the uuid) + // - Invalid input values for the RFC `version` or `variant` fields + + if (!(0, _validate.default)(uuid)) { + throw TypeError('Stringified UUID is invalid'); + } + + return uuid; +} + +var _default = stringify; +exports["default"] = _default; + +/***/ }), + +/***/ 1595: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _rng = _interopRequireDefault(__nccwpck_require__(9784)); + +var _stringify = _interopRequireDefault(__nccwpck_require__(1458)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +// **`v1()` - Generate time-based UUID** +// +// Inspired by https://github.com/LiosK/UUID.js +// and http://docs.python.org/library/uuid.html +let _nodeId; + +let _clockseq; // Previous uuid creation time + + +let _lastMSecs = 0; +let _lastNSecs = 0; // See https://github.com/uuidjs/uuid for API details + +function v1(options, buf, offset) { + let i = buf && offset || 0; + const b = buf || new Array(16); + options = options || {}; + let node = options.node || _nodeId; + let clockseq = options.clockseq !== undefined ? options.clockseq : _clockseq; // node and clockseq need to be initialized to random values if they're not + // specified. We do this lazily to minimize issues related to insufficient + // system entropy. See #189 + + if (node == null || clockseq == null) { + const seedBytes = options.random || (options.rng || _rng.default)(); + + if (node == null) { + // Per 4.5, create and 48-bit node id, (47 random bits + multicast bit = 1) + node = _nodeId = [seedBytes[0] | 0x01, seedBytes[1], seedBytes[2], seedBytes[3], seedBytes[4], seedBytes[5]]; + } + + if (clockseq == null) { + // Per 4.2.2, randomize (14 bit) clockseq + clockseq = _clockseq = (seedBytes[6] << 8 | seedBytes[7]) & 0x3fff; + } + } // UUID timestamps are 100 nano-second units since the Gregorian epoch, + // (1582-10-15 00:00). JSNumbers aren't precise enough for this, so + // time is handled internally as 'msecs' (integer milliseconds) and 'nsecs' + // (100-nanoseconds offset from msecs) since unix epoch, 1970-01-01 00:00. + + + let msecs = options.msecs !== undefined ? options.msecs : Date.now(); // Per 4.2.1.2, use count of uuid's generated during the current clock + // cycle to simulate higher resolution clock + + let nsecs = options.nsecs !== undefined ? options.nsecs : _lastNSecs + 1; // Time since last uuid creation (in msecs) + + const dt = msecs - _lastMSecs + (nsecs - _lastNSecs) / 10000; // Per 4.2.1.2, Bump clockseq on clock regression + + if (dt < 0 && options.clockseq === undefined) { + clockseq = clockseq + 1 & 0x3fff; + } // Reset nsecs if clock regresses (new clockseq) or we've moved onto a new + // time interval + + + if ((dt < 0 || msecs > _lastMSecs) && options.nsecs === undefined) { + nsecs = 0; + } // Per 4.2.1.2 Throw error if too many uuids are requested + + + if (nsecs >= 10000) { + throw new Error("uuid.v1(): Can't create more than 10M uuids/sec"); + } + + _lastMSecs = msecs; + _lastNSecs = nsecs; + _clockseq = clockseq; // Per 4.1.4 - Convert from unix epoch to Gregorian epoch + + msecs += 12219292800000; // `time_low` + + const tl = ((msecs & 0xfffffff) * 10000 + nsecs) % 0x100000000; + b[i++] = tl >>> 24 & 0xff; + b[i++] = tl >>> 16 & 0xff; + b[i++] = tl >>> 8 & 0xff; + b[i++] = tl & 0xff; // `time_mid` + + const tmh = msecs / 0x100000000 * 10000 & 0xfffffff; + b[i++] = tmh >>> 8 & 0xff; + b[i++] = tmh & 0xff; // `time_high_and_version` + + b[i++] = tmh >>> 24 & 0xf | 0x10; // include version + + b[i++] = tmh >>> 16 & 0xff; // `clock_seq_hi_and_reserved` (Per 4.2.2 - include variant) + + b[i++] = clockseq >>> 8 | 0x80; // `clock_seq_low` + + b[i++] = clockseq & 0xff; // `node` + + for (let n = 0; n < 6; ++n) { + b[i + n] = node[n]; + } + + return buf || (0, _stringify.default)(b); +} + +var _default = v1; +exports["default"] = _default; + +/***/ }), + +/***/ 6993: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _v = _interopRequireDefault(__nccwpck_require__(5920)); + +var _md = _interopRequireDefault(__nccwpck_require__(5842)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +const v3 = (0, _v.default)('v3', 0x30, _md.default); +var _default = v3; +exports["default"] = _default; + +/***/ }), + +/***/ 5920: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = _default; +exports.URL = exports.DNS = void 0; + +var _stringify = _interopRequireDefault(__nccwpck_require__(1458)); + +var _parse = _interopRequireDefault(__nccwpck_require__(6385)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function stringToBytes(str) { + str = unescape(encodeURIComponent(str)); // UTF8 escape + + const bytes = []; + + for (let i = 0; i < str.length; ++i) { + bytes.push(str.charCodeAt(i)); + } + + return bytes; +} + +const DNS = '6ba7b810-9dad-11d1-80b4-00c04fd430c8'; +exports.DNS = DNS; +const URL = '6ba7b811-9dad-11d1-80b4-00c04fd430c8'; +exports.URL = URL; + +function _default(name, version, hashfunc) { + function generateUUID(value, namespace, buf, offset) { + if (typeof value === 'string') { + value = stringToBytes(value); + } + + if (typeof namespace === 'string') { + namespace = (0, _parse.default)(namespace); + } + + if (namespace.length !== 16) { + throw TypeError('Namespace must be array-like (16 iterable integer values, 0-255)'); + } // Compute hash of namespace and value, Per 4.3 + // Future: Use spread syntax when supported on all platforms, e.g. `bytes = + // hashfunc([...namespace, ... value])` + + + let bytes = new Uint8Array(16 + value.length); + bytes.set(namespace); + bytes.set(value, namespace.length); + bytes = hashfunc(bytes); + bytes[6] = bytes[6] & 0x0f | version; + bytes[8] = bytes[8] & 0x3f | 0x80; + + if (buf) { + offset = offset || 0; + + for (let i = 0; i < 16; ++i) { + buf[offset + i] = bytes[i]; + } + + return buf; + } + + return (0, _stringify.default)(bytes); + } // Function#name is not settable on some platforms (#270) + + + try { + generateUUID.name = name; // eslint-disable-next-line no-empty + } catch (err) {} // For CommonJS default export support + + + generateUUID.DNS = DNS; + generateUUID.URL = URL; + return generateUUID; +} + +/***/ }), + +/***/ 1472: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _rng = _interopRequireDefault(__nccwpck_require__(9784)); + +var _stringify = _interopRequireDefault(__nccwpck_require__(1458)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function v4(options, buf, offset) { + options = options || {}; + + const rnds = options.random || (options.rng || _rng.default)(); // Per 4.4, set bits for version and `clock_seq_hi_and_reserved` + + + rnds[6] = rnds[6] & 0x0f | 0x40; + rnds[8] = rnds[8] & 0x3f | 0x80; // Copy bytes to buffer, if provided + + if (buf) { + offset = offset || 0; + + for (let i = 0; i < 16; ++i) { + buf[offset + i] = rnds[i]; + } + + return buf; + } + + return (0, _stringify.default)(rnds); +} + +var _default = v4; +exports["default"] = _default; + +/***/ }), + +/***/ 6217: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _v = _interopRequireDefault(__nccwpck_require__(5920)); + +var _sha = _interopRequireDefault(__nccwpck_require__(8844)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +const v5 = (0, _v.default)('v5', 0x50, _sha.default); +var _default = v5; +exports["default"] = _default; + +/***/ }), + +/***/ 2609: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _regex = _interopRequireDefault(__nccwpck_require__(6230)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function validate(uuid) { + return typeof uuid === 'string' && _regex.default.test(uuid); +} + +var _default = validate; +exports["default"] = _default; + +/***/ }), + +/***/ 427: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _validate = _interopRequireDefault(__nccwpck_require__(2609)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function version(uuid) { + if (!(0, _validate.default)(uuid)) { + throw TypeError('Invalid UUID'); + } + + return parseInt(uuid.substr(14, 1), 16); +} + +var _default = version; +exports["default"] = _default; + +/***/ }), + /***/ 1514: /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { @@ -5944,72 +7738,6 @@ class SearchState { exports.SearchState = SearchState; //# sourceMappingURL=internal-search-state.js.map -/***/ }), - -/***/ 3702: -/***/ ((__unused_webpack_module, exports) => { - -"use strict"; - -Object.defineProperty(exports, "__esModule", ({ value: true })); -class BasicCredentialHandler { - constructor(username, password) { - this.username = username; - this.password = password; - } - prepareRequest(options) { - options.headers['Authorization'] = - 'Basic ' + - Buffer.from(this.username + ':' + this.password).toString('base64'); - } - // This handler cannot handle 401 - canHandleAuthentication(response) { - return false; - } - handleAuthentication(httpClient, requestInfo, objs) { - return null; - } -} -exports.BasicCredentialHandler = BasicCredentialHandler; -class BearerCredentialHandler { - constructor(token) { - this.token = token; - } - // currently implements pre-authorization - // TODO: support preAuth = false where it hooks on 401 - prepareRequest(options) { - options.headers['Authorization'] = 'Bearer ' + this.token; - } - // This handler cannot handle 401 - canHandleAuthentication(response) { - return false; - } - handleAuthentication(httpClient, requestInfo, objs) { - return null; - } -} -exports.BearerCredentialHandler = BearerCredentialHandler; -class PersonalAccessTokenCredentialHandler { - constructor(token) { - this.token = token; - } - // currently implements pre-authorization - // TODO: support preAuth = false where it hooks on 401 - prepareRequest(options) { - options.headers['Authorization'] = - 'Basic ' + Buffer.from('PAT:' + this.token).toString('base64'); - } - // This handler cannot handle 401 - canHandleAuthentication(response) { - return false; - } - handleAuthentication(httpClient, requestInfo, objs) { - return null; - } -} -exports.PersonalAccessTokenCredentialHandler = PersonalAccessTokenCredentialHandler; - - /***/ }), /***/ 9925: diff --git a/package-lock.json b/package-lock.json index 2409f72e..7c9bcca7 100644 --- a/package-lock.json +++ b/package-lock.json @@ -74,11 +74,28 @@ } }, "node_modules/@actions/core": { - "version": "1.6.0", - "resolved": "https://registry.npmjs.org/@actions/core/-/core-1.6.0.tgz", - "integrity": "sha512-NB1UAZomZlCV/LmJqkLhNTqtKfFXJZAUPcfl/zqG7EfsQdeUJtaWO98SGbuQ3pydJ3fHl2CvI/51OKYlCYYcaw==", + "version": "1.9.1", + "resolved": "https://registry.npmjs.org/@actions/core/-/core-1.9.1.tgz", + "integrity": "sha512-5ad+U2YGrmmiw6du20AQW5XuWo7UKN2052FjSV7MX+Wfjf8sCqcsZe62NfgHys4QI4/Y+vQvLKYL8jWtA1ZBTA==", "dependencies": { - "@actions/http-client": "^1.0.11" + "@actions/http-client": "^2.0.1", + "uuid": "^8.3.2" + } + }, + "node_modules/@actions/core/node_modules/@actions/http-client": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/@actions/http-client/-/http-client-2.0.1.tgz", + "integrity": "sha512-PIXiMVtz6VvyaRsGY268qvj57hXQEpsYogYOu2nrQhlf+XCGmZstmuZBbAybUl1nQGnvS1k1eEsQ69ZoD7xlSw==", + "dependencies": { + "tunnel": "^0.0.6" + } + }, + "node_modules/@actions/core/node_modules/uuid": { + "version": "8.3.2", + "resolved": "https://registry.npmjs.org/uuid/-/uuid-8.3.2.tgz", + "integrity": "sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg==", + "bin": { + "uuid": "dist/bin/uuid" } }, "node_modules/@actions/exec": { @@ -5127,11 +5144,27 @@ } }, "@actions/core": { - "version": "1.6.0", - "resolved": "https://registry.npmjs.org/@actions/core/-/core-1.6.0.tgz", - "integrity": "sha512-NB1UAZomZlCV/LmJqkLhNTqtKfFXJZAUPcfl/zqG7EfsQdeUJtaWO98SGbuQ3pydJ3fHl2CvI/51OKYlCYYcaw==", + "version": "1.9.1", + "resolved": "https://registry.npmjs.org/@actions/core/-/core-1.9.1.tgz", + "integrity": "sha512-5ad+U2YGrmmiw6du20AQW5XuWo7UKN2052FjSV7MX+Wfjf8sCqcsZe62NfgHys4QI4/Y+vQvLKYL8jWtA1ZBTA==", "requires": { - "@actions/http-client": "^1.0.11" + "@actions/http-client": "^2.0.1", + "uuid": "^8.3.2" + }, + "dependencies": { + "@actions/http-client": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/@actions/http-client/-/http-client-2.0.1.tgz", + "integrity": "sha512-PIXiMVtz6VvyaRsGY268qvj57hXQEpsYogYOu2nrQhlf+XCGmZstmuZBbAybUl1nQGnvS1k1eEsQ69ZoD7xlSw==", + "requires": { + "tunnel": "^0.0.6" + } + }, + "uuid": { + "version": "8.3.2", + "resolved": "https://registry.npmjs.org/uuid/-/uuid-8.3.2.tgz", + "integrity": "sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg==" + } } }, "@actions/exec": { From 1e6f2cd3121764f4065a4f231bbc7222d7ffc0a4 Mon Sep 17 00:00:00 2001 From: Jef LeCompte Date: Thu, 8 Sep 2022 13:12:30 -0700 Subject: [PATCH 30/32] test: add volta e2e test --- .github/workflows/versions.yml | 19 +++++++++++++++++++ 1 file changed, 19 insertions(+) diff --git a/.github/workflows/versions.yml b/.github/workflows/versions.yml index 9422a111..f8b46b6e 100644 --- a/.github/workflows/versions.yml +++ b/.github/workflows/versions.yml @@ -102,6 +102,25 @@ jobs: - name: Verify node run: __tests__/verify-node.sh 14 + version-file-volta: + runs-on: ${{ matrix.os }} + strategy: + fail-fast: false + matrix: + os: [ ubuntu-latest, windows-latest, macos-latest ] + steps: + - uses: actions/checkout@v3 + - name: Remove engines from package.json + shell: bash + run: | + jq 'del(.engines)' ./__tests__/data/package.json | sponge ./__tests__/data/package.json + - name: Setup node from node version file + uses: ./ + with: + node-version-file: '__tests__/data/package.json' + - name: Verify node + run: __tests__/verify-node.sh 14 + node-dist: runs-on: ${{ matrix.os }} strategy: From 4cffe5c52b4c7a58d230a4f51f534a7bf0f1dfce Mon Sep 17 00:00:00 2001 From: Jef LeCompte Date: Thu, 8 Sep 2022 13:18:29 -0700 Subject: [PATCH 31/32] chore: remove sponge usage --- .github/workflows/versions.yml | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/.github/workflows/versions.yml b/.github/workflows/versions.yml index f8b46b6e..3ef717c1 100644 --- a/.github/workflows/versions.yml +++ b/.github/workflows/versions.yml @@ -112,8 +112,7 @@ jobs: - uses: actions/checkout@v3 - name: Remove engines from package.json shell: bash - run: | - jq 'del(.engines)' ./__tests__/data/package.json | sponge ./__tests__/data/package.json + run: cat <<< "$(jq 'del(.engines)' ./__tests__/data/package.json)" > ./__tests__/data/package.json - name: Setup node from node version file uses: ./ with: From 969bd2663942d722d85b6a8626225850c2f7be4b Mon Sep 17 00:00:00 2001 From: Evgenii Korolevskii <102794661+e-korolevskii@users.noreply.github.com> Date: Mon, 26 Sep 2022 13:58:23 +0200 Subject: [PATCH 32/32] Fix test volta priority (#577) --- .github/workflows/versions.yml | 8 +- __tests__/data/package.json | 2 +- dist/cache-save/index.js | 376 ++++---- dist/setup/index.js | 1508 ++++++++++++++++---------------- src/installer.ts | 2 +- 5 files changed, 948 insertions(+), 948 deletions(-) diff --git a/.github/workflows/versions.yml b/.github/workflows/versions.yml index 3ef717c1..9a138ebb 100644 --- a/.github/workflows/versions.yml +++ b/.github/workflows/versions.yml @@ -95,6 +95,9 @@ jobs: node-version-file: [.nvmrc, .tool-versions, package.json] steps: - uses: actions/checkout@v3 + - name: Remove volta from package.json + shell: bash + run: cat <<< "$(jq 'del(.volta)' ./__tests__/data/package.json)" > ./__tests__/data/package.json - name: Setup node from node version file uses: ./ with: @@ -110,15 +113,12 @@ jobs: os: [ ubuntu-latest, windows-latest, macos-latest ] steps: - uses: actions/checkout@v3 - - name: Remove engines from package.json - shell: bash - run: cat <<< "$(jq 'del(.engines)' ./__tests__/data/package.json)" > ./__tests__/data/package.json - name: Setup node from node version file uses: ./ with: node-version-file: '__tests__/data/package.json' - name: Verify node - run: __tests__/verify-node.sh 14 + run: __tests__/verify-node.sh 16 node-dist: runs-on: ${{ matrix.os }} diff --git a/__tests__/data/package.json b/__tests__/data/package.json index 915f287a..ebee7dfc 100644 --- a/__tests__/data/package.json +++ b/__tests__/data/package.json @@ -3,6 +3,6 @@ "node": "^14.0.0" }, "volta": { - "node": "14.0.0" + "node": "16.0.0" } } diff --git a/dist/cache-save/index.js b/dist/cache-save/index.js index 4bac2864..ec3131c2 100644 --- a/dist/cache-save/index.js +++ b/dist/cache-save/index.js @@ -60979,74 +60979,74 @@ exports.fromPromise = function (fn) { /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; - -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; -var __importStar = (this && this.__importStar) || function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k]; - result["default"] = mod; - return result; -}; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", ({ value: true })); -const core = __importStar(__nccwpck_require__(2186)); -const cache = __importStar(__nccwpck_require__(7799)); -const fs_1 = __importDefault(__nccwpck_require__(7147)); -const constants_1 = __nccwpck_require__(9042); -const cache_utils_1 = __nccwpck_require__(1678); -// Catch and log any unhandled exceptions. These exceptions can leak out of the uploadChunk method in -// @actions/toolkit when a failed upload closes the file descriptor causing any in-process reads to -// throw an uncaught exception. Instead of failing this action, just warn. -process.on('uncaughtException', e => { - const warningPrefix = '[warning]'; - core.info(`${warningPrefix}${e.message}`); -}); -function run() { - return __awaiter(this, void 0, void 0, function* () { - try { - const cacheLock = core.getInput('cache'); - yield cachePackages(cacheLock); - } - catch (error) { - core.setFailed(error.message); - } - }); -} -exports.run = run; -const cachePackages = (packageManager) => __awaiter(void 0, void 0, void 0, function* () { - const state = core.getState(constants_1.State.CacheMatchedKey); - const primaryKey = core.getState(constants_1.State.CachePrimaryKey); - const packageManagerInfo = yield cache_utils_1.getPackageManagerInfo(packageManager); - if (!packageManagerInfo) { - core.debug(`Caching for '${packageManager}' is not supported`); - return; - } - const cachePath = yield cache_utils_1.getCacheDirectoryPath(packageManagerInfo, packageManager); - if (!fs_1.default.existsSync(cachePath)) { - throw new Error(`Cache folder path is retrieved for ${packageManager} but doesn't exist on disk: ${cachePath}`); - } - if (primaryKey === state) { - core.info(`Cache hit occurred on the primary key ${primaryKey}, not saving cache.`); - return; - } - const cacheId = yield cache.saveCache([cachePath], primaryKey); - if (cacheId == -1) { - return; - } - core.info(`Cache saved with the key: ${primaryKey}`); -}); -run(); + +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k]; + result["default"] = mod; + return result; +}; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +const core = __importStar(__nccwpck_require__(2186)); +const cache = __importStar(__nccwpck_require__(7799)); +const fs_1 = __importDefault(__nccwpck_require__(7147)); +const constants_1 = __nccwpck_require__(9042); +const cache_utils_1 = __nccwpck_require__(1678); +// Catch and log any unhandled exceptions. These exceptions can leak out of the uploadChunk method in +// @actions/toolkit when a failed upload closes the file descriptor causing any in-process reads to +// throw an uncaught exception. Instead of failing this action, just warn. +process.on('uncaughtException', e => { + const warningPrefix = '[warning]'; + core.info(`${warningPrefix}${e.message}`); +}); +function run() { + return __awaiter(this, void 0, void 0, function* () { + try { + const cacheLock = core.getInput('cache'); + yield cachePackages(cacheLock); + } + catch (error) { + core.setFailed(error.message); + } + }); +} +exports.run = run; +const cachePackages = (packageManager) => __awaiter(void 0, void 0, void 0, function* () { + const state = core.getState(constants_1.State.CacheMatchedKey); + const primaryKey = core.getState(constants_1.State.CachePrimaryKey); + const packageManagerInfo = yield cache_utils_1.getPackageManagerInfo(packageManager); + if (!packageManagerInfo) { + core.debug(`Caching for '${packageManager}' is not supported`); + return; + } + const cachePath = yield cache_utils_1.getCacheDirectoryPath(packageManagerInfo, packageManager); + if (!fs_1.default.existsSync(cachePath)) { + throw new Error(`Cache folder path is retrieved for ${packageManager} but doesn't exist on disk: ${cachePath}`); + } + if (primaryKey === state) { + core.info(`Cache hit occurred on the primary key ${primaryKey}, not saving cache.`); + return; + } + const cacheId = yield cache.saveCache([cachePath], primaryKey); + if (cacheId == -1) { + return; + } + core.info(`Cache saved with the key: ${primaryKey}`); +}); +run(); /***/ }), @@ -61055,109 +61055,109 @@ run(); /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; - -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; -var __importStar = (this && this.__importStar) || function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k]; - result["default"] = mod; - return result; -}; -Object.defineProperty(exports, "__esModule", ({ value: true })); -const core = __importStar(__nccwpck_require__(2186)); -const exec = __importStar(__nccwpck_require__(1514)); -const cache = __importStar(__nccwpck_require__(7799)); -exports.supportedPackageManagers = { - npm: { - lockFilePatterns: ['package-lock.json', 'npm-shrinkwrap.json', 'yarn.lock'], - getCacheFolderCommand: 'npm config get cache' - }, - pnpm: { - lockFilePatterns: ['pnpm-lock.yaml'], - getCacheFolderCommand: 'pnpm store path --silent' - }, - yarn1: { - lockFilePatterns: ['yarn.lock'], - getCacheFolderCommand: 'yarn cache dir' - }, - yarn2: { - lockFilePatterns: ['yarn.lock'], - getCacheFolderCommand: 'yarn config get cacheFolder' - } -}; -exports.getCommandOutput = (toolCommand) => __awaiter(void 0, void 0, void 0, function* () { - let { stdout, stderr, exitCode } = yield exec.getExecOutput(toolCommand, undefined, { ignoreReturnCode: true }); - if (exitCode) { - stderr = !stderr.trim() - ? `The '${toolCommand}' command failed with exit code: ${exitCode}` - : stderr; - throw new Error(stderr); - } - return stdout.trim(); -}); -const getPackageManagerVersion = (packageManager, command) => __awaiter(void 0, void 0, void 0, function* () { - const stdOut = yield exports.getCommandOutput(`${packageManager} ${command}`); - if (!stdOut) { - throw new Error(`Could not retrieve version of ${packageManager}`); - } - return stdOut; -}); -exports.getPackageManagerInfo = (packageManager) => __awaiter(void 0, void 0, void 0, function* () { - if (packageManager === 'npm') { - return exports.supportedPackageManagers.npm; - } - else if (packageManager === 'pnpm') { - return exports.supportedPackageManagers.pnpm; - } - else if (packageManager === 'yarn') { - const yarnVersion = yield getPackageManagerVersion('yarn', '--version'); - core.debug(`Consumed yarn version is ${yarnVersion}`); - if (yarnVersion.startsWith('1.')) { - return exports.supportedPackageManagers.yarn1; - } - else { - return exports.supportedPackageManagers.yarn2; - } - } - else { - return null; - } -}); -exports.getCacheDirectoryPath = (packageManagerInfo, packageManager) => __awaiter(void 0, void 0, void 0, function* () { - const stdOut = yield exports.getCommandOutput(packageManagerInfo.getCacheFolderCommand); - if (!stdOut) { - throw new Error(`Could not get cache folder path for ${packageManager}`); - } - core.debug(`${packageManager} path is ${stdOut}`); - return stdOut.trim(); -}); -function isGhes() { - const ghUrl = new URL(process.env['GITHUB_SERVER_URL'] || 'https://github.com'); - return ghUrl.hostname.toUpperCase() !== 'GITHUB.COM'; -} -exports.isGhes = isGhes; -function isCacheFeatureAvailable() { - if (!cache.isFeatureAvailable()) { - if (isGhes()) { - throw new Error('Cache action is only supported on GHES version >= 3.5. If you are on version >=3.5 Please check with GHES admin if Actions cache service is enabled or not.'); - } - else { - core.warning('The runner was not able to contact the cache service. Caching will be skipped'); - } - return false; - } - return true; -} -exports.isCacheFeatureAvailable = isCacheFeatureAvailable; + +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k]; + result["default"] = mod; + return result; +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +const core = __importStar(__nccwpck_require__(2186)); +const exec = __importStar(__nccwpck_require__(1514)); +const cache = __importStar(__nccwpck_require__(7799)); +exports.supportedPackageManagers = { + npm: { + lockFilePatterns: ['package-lock.json', 'npm-shrinkwrap.json', 'yarn.lock'], + getCacheFolderCommand: 'npm config get cache' + }, + pnpm: { + lockFilePatterns: ['pnpm-lock.yaml'], + getCacheFolderCommand: 'pnpm store path --silent' + }, + yarn1: { + lockFilePatterns: ['yarn.lock'], + getCacheFolderCommand: 'yarn cache dir' + }, + yarn2: { + lockFilePatterns: ['yarn.lock'], + getCacheFolderCommand: 'yarn config get cacheFolder' + } +}; +exports.getCommandOutput = (toolCommand) => __awaiter(void 0, void 0, void 0, function* () { + let { stdout, stderr, exitCode } = yield exec.getExecOutput(toolCommand, undefined, { ignoreReturnCode: true }); + if (exitCode) { + stderr = !stderr.trim() + ? `The '${toolCommand}' command failed with exit code: ${exitCode}` + : stderr; + throw new Error(stderr); + } + return stdout.trim(); +}); +const getPackageManagerVersion = (packageManager, command) => __awaiter(void 0, void 0, void 0, function* () { + const stdOut = yield exports.getCommandOutput(`${packageManager} ${command}`); + if (!stdOut) { + throw new Error(`Could not retrieve version of ${packageManager}`); + } + return stdOut; +}); +exports.getPackageManagerInfo = (packageManager) => __awaiter(void 0, void 0, void 0, function* () { + if (packageManager === 'npm') { + return exports.supportedPackageManagers.npm; + } + else if (packageManager === 'pnpm') { + return exports.supportedPackageManagers.pnpm; + } + else if (packageManager === 'yarn') { + const yarnVersion = yield getPackageManagerVersion('yarn', '--version'); + core.debug(`Consumed yarn version is ${yarnVersion}`); + if (yarnVersion.startsWith('1.')) { + return exports.supportedPackageManagers.yarn1; + } + else { + return exports.supportedPackageManagers.yarn2; + } + } + else { + return null; + } +}); +exports.getCacheDirectoryPath = (packageManagerInfo, packageManager) => __awaiter(void 0, void 0, void 0, function* () { + const stdOut = yield exports.getCommandOutput(packageManagerInfo.getCacheFolderCommand); + if (!stdOut) { + throw new Error(`Could not get cache folder path for ${packageManager}`); + } + core.debug(`${packageManager} path is ${stdOut}`); + return stdOut.trim(); +}); +function isGhes() { + const ghUrl = new URL(process.env['GITHUB_SERVER_URL'] || 'https://github.com'); + return ghUrl.hostname.toUpperCase() !== 'GITHUB.COM'; +} +exports.isGhes = isGhes; +function isCacheFeatureAvailable() { + if (!cache.isFeatureAvailable()) { + if (isGhes()) { + throw new Error('Cache action is only supported on GHES version >= 3.5. If you are on version >=3.5 Please check with GHES admin if Actions cache service is enabled or not.'); + } + else { + core.warning('The runner was not able to contact the cache service. Caching will be skipped'); + } + return false; + } + return true; +} +exports.isCacheFeatureAvailable = isCacheFeatureAvailable; /***/ }), @@ -61166,23 +61166,23 @@ exports.isCacheFeatureAvailable = isCacheFeatureAvailable; /***/ ((__unused_webpack_module, exports) => { "use strict"; - -Object.defineProperty(exports, "__esModule", ({ value: true })); -var LockType; -(function (LockType) { - LockType["Npm"] = "npm"; - LockType["Pnpm"] = "pnpm"; - LockType["Yarn"] = "yarn"; -})(LockType = exports.LockType || (exports.LockType = {})); -var State; -(function (State) { - State["CachePrimaryKey"] = "CACHE_KEY"; - State["CacheMatchedKey"] = "CACHE_RESULT"; -})(State = exports.State || (exports.State = {})); -var Outputs; -(function (Outputs) { - Outputs["CacheHit"] = "cache-hit"; -})(Outputs = exports.Outputs || (exports.Outputs = {})); + +Object.defineProperty(exports, "__esModule", ({ value: true })); +var LockType; +(function (LockType) { + LockType["Npm"] = "npm"; + LockType["Pnpm"] = "pnpm"; + LockType["Yarn"] = "yarn"; +})(LockType = exports.LockType || (exports.LockType = {})); +var State; +(function (State) { + State["CachePrimaryKey"] = "CACHE_KEY"; + State["CacheMatchedKey"] = "CACHE_RESULT"; +})(State = exports.State || (exports.State = {})); +var Outputs; +(function (Outputs) { + Outputs["CacheHit"] = "cache-hit"; +})(Outputs = exports.Outputs || (exports.Outputs = {})); /***/ }), diff --git a/dist/setup/index.js b/dist/setup/index.js index 5b744d72..8ff49441 100644 --- a/dist/setup/index.js +++ b/dist/setup/index.js @@ -72875,62 +72875,62 @@ function wrappy (fn, cb) { /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; - -var __importStar = (this && this.__importStar) || function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k]; - result["default"] = mod; - return result; -}; -Object.defineProperty(exports, "__esModule", ({ value: true })); -const fs = __importStar(__nccwpck_require__(7147)); -const os = __importStar(__nccwpck_require__(2037)); -const path = __importStar(__nccwpck_require__(1017)); -const core = __importStar(__nccwpck_require__(2186)); -const github = __importStar(__nccwpck_require__(5438)); -function configAuthentication(registryUrl, alwaysAuth) { - const npmrc = path.resolve(process.env['RUNNER_TEMP'] || process.cwd(), '.npmrc'); - if (!registryUrl.endsWith('/')) { - registryUrl += '/'; - } - writeRegistryToFile(registryUrl, npmrc, alwaysAuth); -} -exports.configAuthentication = configAuthentication; -function writeRegistryToFile(registryUrl, fileLocation, alwaysAuth) { - let scope = core.getInput('scope'); - if (!scope && registryUrl.indexOf('npm.pkg.github.com') > -1) { - scope = github.context.repo.owner; - } - if (scope && scope[0] != '@') { - scope = '@' + scope; - } - if (scope) { - scope = scope.toLowerCase(); - } - core.debug(`Setting auth in ${fileLocation}`); - let newContents = ''; - if (fs.existsSync(fileLocation)) { - const curContents = fs.readFileSync(fileLocation, 'utf8'); - curContents.split(os.EOL).forEach((line) => { - // Add current contents unless they are setting the registry - if (!line.toLowerCase().startsWith('registry')) { - newContents += line + os.EOL; - } - }); - } - // Remove http: or https: from front of registry. - const authString = registryUrl.replace(/(^\w+:|^)/, '') + ':_authToken=${NODE_AUTH_TOKEN}'; - const registryString = scope - ? `${scope}:registry=${registryUrl}` - : `registry=${registryUrl}`; - const alwaysAuthString = `always-auth=${alwaysAuth}`; - newContents += `${authString}${os.EOL}${registryString}${os.EOL}${alwaysAuthString}`; - fs.writeFileSync(fileLocation, newContents); - core.exportVariable('NPM_CONFIG_USERCONFIG', fileLocation); - // Export empty node_auth_token if didn't exist so npm doesn't complain about not being able to find it - core.exportVariable('NODE_AUTH_TOKEN', process.env.NODE_AUTH_TOKEN || 'XXXXX-XXXXX-XXXXX-XXXXX'); -} + +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k]; + result["default"] = mod; + return result; +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +const fs = __importStar(__nccwpck_require__(7147)); +const os = __importStar(__nccwpck_require__(2037)); +const path = __importStar(__nccwpck_require__(1017)); +const core = __importStar(__nccwpck_require__(2186)); +const github = __importStar(__nccwpck_require__(5438)); +function configAuthentication(registryUrl, alwaysAuth) { + const npmrc = path.resolve(process.env['RUNNER_TEMP'] || process.cwd(), '.npmrc'); + if (!registryUrl.endsWith('/')) { + registryUrl += '/'; + } + writeRegistryToFile(registryUrl, npmrc, alwaysAuth); +} +exports.configAuthentication = configAuthentication; +function writeRegistryToFile(registryUrl, fileLocation, alwaysAuth) { + let scope = core.getInput('scope'); + if (!scope && registryUrl.indexOf('npm.pkg.github.com') > -1) { + scope = github.context.repo.owner; + } + if (scope && scope[0] != '@') { + scope = '@' + scope; + } + if (scope) { + scope = scope.toLowerCase(); + } + core.debug(`Setting auth in ${fileLocation}`); + let newContents = ''; + if (fs.existsSync(fileLocation)) { + const curContents = fs.readFileSync(fileLocation, 'utf8'); + curContents.split(os.EOL).forEach((line) => { + // Add current contents unless they are setting the registry + if (!line.toLowerCase().startsWith('registry')) { + newContents += line + os.EOL; + } + }); + } + // Remove http: or https: from front of registry. + const authString = registryUrl.replace(/(^\w+:|^)/, '') + ':_authToken=${NODE_AUTH_TOKEN}'; + const registryString = scope + ? `${scope}:registry=${registryUrl}` + : `registry=${registryUrl}`; + const alwaysAuthString = `always-auth=${alwaysAuth}`; + newContents += `${authString}${os.EOL}${registryString}${os.EOL}${alwaysAuthString}`; + fs.writeFileSync(fileLocation, newContents); + core.exportVariable('NPM_CONFIG_USERCONFIG', fileLocation); + // Export empty node_auth_token if didn't exist so npm doesn't complain about not being able to find it + core.exportVariable('NODE_AUTH_TOKEN', process.env.NODE_AUTH_TOKEN || 'XXXXX-XXXXX-XXXXX-XXXXX'); +} /***/ }), @@ -72939,70 +72939,70 @@ function writeRegistryToFile(registryUrl, fileLocation, alwaysAuth) { /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; - -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; -var __importStar = (this && this.__importStar) || function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k]; - result["default"] = mod; - return result; -}; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", ({ value: true })); -const cache = __importStar(__nccwpck_require__(7799)); -const core = __importStar(__nccwpck_require__(2186)); -const glob = __importStar(__nccwpck_require__(8090)); -const path_1 = __importDefault(__nccwpck_require__(1017)); -const fs_1 = __importDefault(__nccwpck_require__(7147)); -const constants_1 = __nccwpck_require__(9042); -const cache_utils_1 = __nccwpck_require__(1678); -exports.restoreCache = (packageManager, cacheDependencyPath) => __awaiter(void 0, void 0, void 0, function* () { - const packageManagerInfo = yield cache_utils_1.getPackageManagerInfo(packageManager); - if (!packageManagerInfo) { - throw new Error(`Caching for '${packageManager}' is not supported`); - } - const platform = process.env.RUNNER_OS; - const cachePath = yield cache_utils_1.getCacheDirectoryPath(packageManagerInfo, packageManager); - const lockFilePath = cacheDependencyPath - ? cacheDependencyPath - : findLockFile(packageManagerInfo); - const fileHash = yield glob.hashFiles(lockFilePath); - if (!fileHash) { - throw new Error('Some specified paths were not resolved, unable to cache dependencies.'); - } - const primaryKey = `node-cache-${platform}-${packageManager}-${fileHash}`; - core.debug(`primary key is ${primaryKey}`); - core.saveState(constants_1.State.CachePrimaryKey, primaryKey); - const cacheKey = yield cache.restoreCache([cachePath], primaryKey); - core.setOutput('cache-hit', Boolean(cacheKey)); - if (!cacheKey) { - core.info(`${packageManager} cache is not found`); - return; - } - core.saveState(constants_1.State.CacheMatchedKey, cacheKey); - core.info(`Cache restored from key: ${cacheKey}`); -}); -const findLockFile = (packageManager) => { - let lockFiles = packageManager.lockFilePatterns; - const workspace = process.env.GITHUB_WORKSPACE; - const rootContent = fs_1.default.readdirSync(workspace); - const lockFile = lockFiles.find(item => rootContent.includes(item)); - if (!lockFile) { - throw new Error(`Dependencies lock file is not found in ${workspace}. Supported file patterns: ${lockFiles.toString()}`); - } - return path_1.default.join(workspace, lockFile); -}; + +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k]; + result["default"] = mod; + return result; +}; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +const cache = __importStar(__nccwpck_require__(7799)); +const core = __importStar(__nccwpck_require__(2186)); +const glob = __importStar(__nccwpck_require__(8090)); +const path_1 = __importDefault(__nccwpck_require__(1017)); +const fs_1 = __importDefault(__nccwpck_require__(7147)); +const constants_1 = __nccwpck_require__(9042); +const cache_utils_1 = __nccwpck_require__(1678); +exports.restoreCache = (packageManager, cacheDependencyPath) => __awaiter(void 0, void 0, void 0, function* () { + const packageManagerInfo = yield cache_utils_1.getPackageManagerInfo(packageManager); + if (!packageManagerInfo) { + throw new Error(`Caching for '${packageManager}' is not supported`); + } + const platform = process.env.RUNNER_OS; + const cachePath = yield cache_utils_1.getCacheDirectoryPath(packageManagerInfo, packageManager); + const lockFilePath = cacheDependencyPath + ? cacheDependencyPath + : findLockFile(packageManagerInfo); + const fileHash = yield glob.hashFiles(lockFilePath); + if (!fileHash) { + throw new Error('Some specified paths were not resolved, unable to cache dependencies.'); + } + const primaryKey = `node-cache-${platform}-${packageManager}-${fileHash}`; + core.debug(`primary key is ${primaryKey}`); + core.saveState(constants_1.State.CachePrimaryKey, primaryKey); + const cacheKey = yield cache.restoreCache([cachePath], primaryKey); + core.setOutput('cache-hit', Boolean(cacheKey)); + if (!cacheKey) { + core.info(`${packageManager} cache is not found`); + return; + } + core.saveState(constants_1.State.CacheMatchedKey, cacheKey); + core.info(`Cache restored from key: ${cacheKey}`); +}); +const findLockFile = (packageManager) => { + let lockFiles = packageManager.lockFilePatterns; + const workspace = process.env.GITHUB_WORKSPACE; + const rootContent = fs_1.default.readdirSync(workspace); + const lockFile = lockFiles.find(item => rootContent.includes(item)); + if (!lockFile) { + throw new Error(`Dependencies lock file is not found in ${workspace}. Supported file patterns: ${lockFiles.toString()}`); + } + return path_1.default.join(workspace, lockFile); +}; /***/ }), @@ -73011,109 +73011,109 @@ const findLockFile = (packageManager) => { /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; - -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; -var __importStar = (this && this.__importStar) || function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k]; - result["default"] = mod; - return result; -}; -Object.defineProperty(exports, "__esModule", ({ value: true })); -const core = __importStar(__nccwpck_require__(2186)); -const exec = __importStar(__nccwpck_require__(1514)); -const cache = __importStar(__nccwpck_require__(7799)); -exports.supportedPackageManagers = { - npm: { - lockFilePatterns: ['package-lock.json', 'npm-shrinkwrap.json', 'yarn.lock'], - getCacheFolderCommand: 'npm config get cache' - }, - pnpm: { - lockFilePatterns: ['pnpm-lock.yaml'], - getCacheFolderCommand: 'pnpm store path --silent' - }, - yarn1: { - lockFilePatterns: ['yarn.lock'], - getCacheFolderCommand: 'yarn cache dir' - }, - yarn2: { - lockFilePatterns: ['yarn.lock'], - getCacheFolderCommand: 'yarn config get cacheFolder' - } -}; -exports.getCommandOutput = (toolCommand) => __awaiter(void 0, void 0, void 0, function* () { - let { stdout, stderr, exitCode } = yield exec.getExecOutput(toolCommand, undefined, { ignoreReturnCode: true }); - if (exitCode) { - stderr = !stderr.trim() - ? `The '${toolCommand}' command failed with exit code: ${exitCode}` - : stderr; - throw new Error(stderr); - } - return stdout.trim(); -}); -const getPackageManagerVersion = (packageManager, command) => __awaiter(void 0, void 0, void 0, function* () { - const stdOut = yield exports.getCommandOutput(`${packageManager} ${command}`); - if (!stdOut) { - throw new Error(`Could not retrieve version of ${packageManager}`); - } - return stdOut; -}); -exports.getPackageManagerInfo = (packageManager) => __awaiter(void 0, void 0, void 0, function* () { - if (packageManager === 'npm') { - return exports.supportedPackageManagers.npm; - } - else if (packageManager === 'pnpm') { - return exports.supportedPackageManagers.pnpm; - } - else if (packageManager === 'yarn') { - const yarnVersion = yield getPackageManagerVersion('yarn', '--version'); - core.debug(`Consumed yarn version is ${yarnVersion}`); - if (yarnVersion.startsWith('1.')) { - return exports.supportedPackageManagers.yarn1; - } - else { - return exports.supportedPackageManagers.yarn2; - } - } - else { - return null; - } -}); -exports.getCacheDirectoryPath = (packageManagerInfo, packageManager) => __awaiter(void 0, void 0, void 0, function* () { - const stdOut = yield exports.getCommandOutput(packageManagerInfo.getCacheFolderCommand); - if (!stdOut) { - throw new Error(`Could not get cache folder path for ${packageManager}`); - } - core.debug(`${packageManager} path is ${stdOut}`); - return stdOut.trim(); -}); -function isGhes() { - const ghUrl = new URL(process.env['GITHUB_SERVER_URL'] || 'https://github.com'); - return ghUrl.hostname.toUpperCase() !== 'GITHUB.COM'; -} -exports.isGhes = isGhes; -function isCacheFeatureAvailable() { - if (!cache.isFeatureAvailable()) { - if (isGhes()) { - throw new Error('Cache action is only supported on GHES version >= 3.5. If you are on version >=3.5 Please check with GHES admin if Actions cache service is enabled or not.'); - } - else { - core.warning('The runner was not able to contact the cache service. Caching will be skipped'); - } - return false; - } - return true; -} -exports.isCacheFeatureAvailable = isCacheFeatureAvailable; + +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k]; + result["default"] = mod; + return result; +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +const core = __importStar(__nccwpck_require__(2186)); +const exec = __importStar(__nccwpck_require__(1514)); +const cache = __importStar(__nccwpck_require__(7799)); +exports.supportedPackageManagers = { + npm: { + lockFilePatterns: ['package-lock.json', 'npm-shrinkwrap.json', 'yarn.lock'], + getCacheFolderCommand: 'npm config get cache' + }, + pnpm: { + lockFilePatterns: ['pnpm-lock.yaml'], + getCacheFolderCommand: 'pnpm store path --silent' + }, + yarn1: { + lockFilePatterns: ['yarn.lock'], + getCacheFolderCommand: 'yarn cache dir' + }, + yarn2: { + lockFilePatterns: ['yarn.lock'], + getCacheFolderCommand: 'yarn config get cacheFolder' + } +}; +exports.getCommandOutput = (toolCommand) => __awaiter(void 0, void 0, void 0, function* () { + let { stdout, stderr, exitCode } = yield exec.getExecOutput(toolCommand, undefined, { ignoreReturnCode: true }); + if (exitCode) { + stderr = !stderr.trim() + ? `The '${toolCommand}' command failed with exit code: ${exitCode}` + : stderr; + throw new Error(stderr); + } + return stdout.trim(); +}); +const getPackageManagerVersion = (packageManager, command) => __awaiter(void 0, void 0, void 0, function* () { + const stdOut = yield exports.getCommandOutput(`${packageManager} ${command}`); + if (!stdOut) { + throw new Error(`Could not retrieve version of ${packageManager}`); + } + return stdOut; +}); +exports.getPackageManagerInfo = (packageManager) => __awaiter(void 0, void 0, void 0, function* () { + if (packageManager === 'npm') { + return exports.supportedPackageManagers.npm; + } + else if (packageManager === 'pnpm') { + return exports.supportedPackageManagers.pnpm; + } + else if (packageManager === 'yarn') { + const yarnVersion = yield getPackageManagerVersion('yarn', '--version'); + core.debug(`Consumed yarn version is ${yarnVersion}`); + if (yarnVersion.startsWith('1.')) { + return exports.supportedPackageManagers.yarn1; + } + else { + return exports.supportedPackageManagers.yarn2; + } + } + else { + return null; + } +}); +exports.getCacheDirectoryPath = (packageManagerInfo, packageManager) => __awaiter(void 0, void 0, void 0, function* () { + const stdOut = yield exports.getCommandOutput(packageManagerInfo.getCacheFolderCommand); + if (!stdOut) { + throw new Error(`Could not get cache folder path for ${packageManager}`); + } + core.debug(`${packageManager} path is ${stdOut}`); + return stdOut.trim(); +}); +function isGhes() { + const ghUrl = new URL(process.env['GITHUB_SERVER_URL'] || 'https://github.com'); + return ghUrl.hostname.toUpperCase() !== 'GITHUB.COM'; +} +exports.isGhes = isGhes; +function isCacheFeatureAvailable() { + if (!cache.isFeatureAvailable()) { + if (isGhes()) { + throw new Error('Cache action is only supported on GHES version >= 3.5. If you are on version >=3.5 Please check with GHES admin if Actions cache service is enabled or not.'); + } + else { + core.warning('The runner was not able to contact the cache service. Caching will be skipped'); + } + return false; + } + return true; +} +exports.isCacheFeatureAvailable = isCacheFeatureAvailable; /***/ }), @@ -73122,23 +73122,23 @@ exports.isCacheFeatureAvailable = isCacheFeatureAvailable; /***/ ((__unused_webpack_module, exports) => { "use strict"; - -Object.defineProperty(exports, "__esModule", ({ value: true })); -var LockType; -(function (LockType) { - LockType["Npm"] = "npm"; - LockType["Pnpm"] = "pnpm"; - LockType["Yarn"] = "yarn"; -})(LockType = exports.LockType || (exports.LockType = {})); -var State; -(function (State) { - State["CachePrimaryKey"] = "CACHE_KEY"; - State["CacheMatchedKey"] = "CACHE_RESULT"; -})(State = exports.State || (exports.State = {})); -var Outputs; -(function (Outputs) { - Outputs["CacheHit"] = "cache-hit"; -})(Outputs = exports.Outputs || (exports.Outputs = {})); + +Object.defineProperty(exports, "__esModule", ({ value: true })); +var LockType; +(function (LockType) { + LockType["Npm"] = "npm"; + LockType["Pnpm"] = "pnpm"; + LockType["Yarn"] = "yarn"; +})(LockType = exports.LockType || (exports.LockType = {})); +var State; +(function (State) { + State["CachePrimaryKey"] = "CACHE_KEY"; + State["CacheMatchedKey"] = "CACHE_RESULT"; +})(State = exports.State || (exports.State = {})); +var Outputs; +(function (Outputs) { + Outputs["CacheHit"] = "cache-hit"; +})(Outputs = exports.Outputs || (exports.Outputs = {})); /***/ }), @@ -73147,414 +73147,414 @@ var Outputs; /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; - -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; -var __importStar = (this && this.__importStar) || function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k]; - result["default"] = mod; - return result; -}; -Object.defineProperty(exports, "__esModule", ({ value: true })); -const os = __nccwpck_require__(2037); -const assert = __importStar(__nccwpck_require__(9491)); -const core = __importStar(__nccwpck_require__(2186)); -const hc = __importStar(__nccwpck_require__(9925)); -const io = __importStar(__nccwpck_require__(7436)); -const tc = __importStar(__nccwpck_require__(7784)); -const path = __importStar(__nccwpck_require__(1017)); -const semver = __importStar(__nccwpck_require__(5911)); -const fs = __nccwpck_require__(7147); -function getNode(versionSpec, stable, checkLatest, auth, arch = os.arch()) { - return __awaiter(this, void 0, void 0, function* () { - // Store manifest data to avoid multiple calls - let manifest; - let nodeVersions; - let osPlat = os.platform(); - let osArch = translateArchToDistUrl(arch); - if (isLtsAlias(versionSpec)) { - core.info('Attempt to resolve LTS alias from manifest...'); - // No try-catch since it's not possible to resolve LTS alias without manifest - manifest = yield getManifest(auth); - versionSpec = resolveLtsAliasFromManifest(versionSpec, stable, manifest); - } - if (isLatestSyntax(versionSpec)) { - nodeVersions = yield getVersionsFromDist(); - versionSpec = yield queryDistForMatch(versionSpec, arch, nodeVersions); - core.info(`getting latest node version...`); - } - if (checkLatest) { - core.info('Attempt to resolve the latest version from manifest...'); - const resolvedVersion = yield resolveVersionFromManifest(versionSpec, stable, auth, osArch, manifest); - if (resolvedVersion) { - versionSpec = resolvedVersion; - core.info(`Resolved as '${versionSpec}'`); - } - else { - core.info(`Failed to resolve version ${versionSpec} from manifest`); - } - } - // check cache - let toolPath; - toolPath = tc.find('node', versionSpec, osArch); - // If not found in cache, download - if (toolPath) { - core.info(`Found in cache @ ${toolPath}`); - } - else { - core.info(`Attempting to download ${versionSpec}...`); - let downloadPath = ''; - let info = null; - // - // Try download from internal distribution (popular versions only) - // - try { - info = yield getInfoFromManifest(versionSpec, stable, auth, osArch, manifest); - if (info) { - core.info(`Acquiring ${info.resolvedVersion} - ${info.arch} from ${info.downloadUrl}`); - downloadPath = yield tc.downloadTool(info.downloadUrl, undefined, auth); - } - else { - core.info('Not found in manifest. Falling back to download directly from Node'); - } - } - catch (err) { - // Rate limit? - if (err instanceof tc.HTTPError && - (err.httpStatusCode === 403 || err.httpStatusCode === 429)) { - core.info(`Received HTTP status code ${err.httpStatusCode}. This usually indicates the rate limit has been exceeded`); - } - else { - core.info(err.message); - } - core.debug(err.stack); - core.info('Falling back to download directly from Node'); - } - // - // Download from nodejs.org - // - if (!downloadPath) { - info = yield getInfoFromDist(versionSpec, arch, nodeVersions); - if (!info) { - throw new Error(`Unable to find Node version '${versionSpec}' for platform ${osPlat} and architecture ${osArch}.`); - } - core.info(`Acquiring ${info.resolvedVersion} - ${info.arch} from ${info.downloadUrl}`); - try { - downloadPath = yield tc.downloadTool(info.downloadUrl); - } - catch (err) { - if (err instanceof tc.HTTPError && err.httpStatusCode == 404) { - return yield acquireNodeFromFallbackLocation(info.resolvedVersion, info.arch); - } - throw err; - } - } - // - // Extract - // - core.info('Extracting ...'); - let extPath; - info = info || {}; // satisfy compiler, never null when reaches here - if (osPlat == 'win32') { - let _7zPath = path.join(__dirname, '../..', 'externals', '7zr.exe'); - extPath = yield tc.extract7z(downloadPath, undefined, _7zPath); - // 7z extracts to folder matching file name - let nestedPath = path.join(extPath, path.basename(info.fileName, '.7z')); - if (fs.existsSync(nestedPath)) { - extPath = nestedPath; - } - } - else { - extPath = yield tc.extractTar(downloadPath, undefined, [ - 'xz', - '--strip', - '1' - ]); - } - // - // Install into the local tool cache - node extracts with a root folder that matches the fileName downloaded - // - core.info('Adding to the cache ...'); - toolPath = yield tc.cacheDir(extPath, 'node', info.resolvedVersion, info.arch); - core.info('Done'); - } - // - // a tool installer initimately knows details about the layout of that tool - // for example, node binary is in the bin folder after the extract on Mac/Linux. - // layouts could change by version, by platform etc... but that's the tool installers job - // - if (osPlat != 'win32') { - toolPath = path.join(toolPath, 'bin'); - } - // - // prepend the tools path. instructs the agent to prepend for future tasks - core.addPath(toolPath); - }); -} -exports.getNode = getNode; -function isLtsAlias(versionSpec) { - return versionSpec.startsWith('lts/'); -} -function getManifest(auth) { - core.debug('Getting manifest from actions/node-versions@main'); - return tc.getManifestFromRepo('actions', 'node-versions', auth, 'main'); -} -function resolveLtsAliasFromManifest(versionSpec, stable, manifest) { - var _a; - const alias = (_a = versionSpec.split('lts/')[1]) === null || _a === void 0 ? void 0 : _a.toLowerCase(); - if (!alias) { - throw new Error(`Unable to parse LTS alias for Node version '${versionSpec}'`); - } - core.debug(`LTS alias '${alias}' for Node version '${versionSpec}'`); - // Supported formats are `lts/`, `lts/*`, and `lts/-n`. Where asterisk means highest possible LTS and -n means the nth-highest. - const n = Number(alias); - const aliases = Object.fromEntries(manifest - .filter(x => x.lts && x.stable === stable) - .map(x => [x.lts.toLowerCase(), x]) - .reverse()); - const numbered = Object.values(aliases); - const release = alias === '*' - ? numbered[numbered.length - 1] - : n < 0 - ? numbered[numbered.length - 1 + n] - : aliases[alias]; - if (!release) { - throw new Error(`Unable to find LTS release '${alias}' for Node version '${versionSpec}'.`); - } - core.debug(`Found LTS release '${release.version}' for Node version '${versionSpec}'`); - return release.version.split('.')[0]; -} -function getInfoFromManifest(versionSpec, stable, auth, osArch = translateArchToDistUrl(os.arch()), manifest) { - return __awaiter(this, void 0, void 0, function* () { - let info = null; - if (!manifest) { - core.debug('No manifest cached'); - manifest = yield getManifest(auth); - } - const rel = yield tc.findFromManifest(versionSpec, stable, manifest, osArch); - if (rel && rel.files.length > 0) { - info = {}; - info.resolvedVersion = rel.version; - info.arch = rel.files[0].arch; - info.downloadUrl = rel.files[0].download_url; - info.fileName = rel.files[0].filename; - } - return info; - }); -} -function getInfoFromDist(versionSpec, arch = os.arch(), nodeVersions) { - return __awaiter(this, void 0, void 0, function* () { - let osPlat = os.platform(); - let osArch = translateArchToDistUrl(arch); - let version = yield queryDistForMatch(versionSpec, arch, nodeVersions); - if (!version) { - return null; - } - // - // Download - a tool installer intimately knows how to get the tool (and construct urls) - // - version = semver.clean(version) || ''; - let fileName = osPlat == 'win32' - ? `node-v${version}-win-${osArch}` - : `node-v${version}-${osPlat}-${osArch}`; - let urlFileName = osPlat == 'win32' ? `${fileName}.7z` : `${fileName}.tar.gz`; - let url = `https://nodejs.org/dist/v${version}/${urlFileName}`; - return { - downloadUrl: url, - resolvedVersion: version, - arch: arch, - fileName: fileName - }; - }); -} -function resolveVersionFromManifest(versionSpec, stable, auth, osArch = translateArchToDistUrl(os.arch()), manifest) { - return __awaiter(this, void 0, void 0, function* () { - try { - const info = yield getInfoFromManifest(versionSpec, stable, auth, osArch, manifest); - return info === null || info === void 0 ? void 0 : info.resolvedVersion; - } - catch (err) { - core.info('Unable to resolve version from manifest...'); - core.debug(err.message); - } - }); -} -// TODO - should we just export this from @actions/tool-cache? Lifted directly from there -function evaluateVersions(versions, versionSpec) { - let version = ''; - core.debug(`evaluating ${versions.length} versions`); - versions = versions.sort((a, b) => { - if (semver.gt(a, b)) { - return 1; - } - return -1; - }); - for (let i = versions.length - 1; i >= 0; i--) { - const potential = versions[i]; - const satisfied = semver.satisfies(potential, versionSpec); - if (satisfied) { - version = potential; - break; - } - } - if (version) { - core.debug(`matched: ${version}`); - } - else { - core.debug('match not found'); - } - return version; -} -function queryDistForMatch(versionSpec, arch = os.arch(), nodeVersions) { - return __awaiter(this, void 0, void 0, function* () { - let osPlat = os.platform(); - let osArch = translateArchToDistUrl(arch); - // node offers a json list of versions - let dataFileName; - switch (osPlat) { - case 'linux': - dataFileName = `linux-${osArch}`; - break; - case 'darwin': - dataFileName = `osx-${osArch}-tar`; - break; - case 'win32': - dataFileName = `win-${osArch}-exe`; - break; - default: - throw new Error(`Unexpected OS '${osPlat}'`); - } - if (!nodeVersions) { - core.debug('No dist manifest cached'); - nodeVersions = yield getVersionsFromDist(); - } - let versions = []; - if (isLatestSyntax(versionSpec)) { - core.info(`getting latest node version...`); - return nodeVersions[0].version; - } - nodeVersions.forEach((nodeVersion) => { - // ensure this version supports your os and platform - if (nodeVersion.files.indexOf(dataFileName) >= 0) { - versions.push(nodeVersion.version); - } - }); - // get the latest version that matches the version spec - let version = evaluateVersions(versions, versionSpec); - return version; - }); -} -function getVersionsFromDist() { - return __awaiter(this, void 0, void 0, function* () { - let dataUrl = 'https://nodejs.org/dist/index.json'; - let httpClient = new hc.HttpClient('setup-node', [], { - allowRetries: true, - maxRetries: 3 - }); - let response = yield httpClient.getJson(dataUrl); - return response.result || []; - }); -} -exports.getVersionsFromDist = getVersionsFromDist; -// For non LTS versions of Node, the files we need (for Windows) are sometimes located -// in a different folder than they normally are for other versions. -// Normally the format is similar to: https://nodejs.org/dist/v5.10.1/node-v5.10.1-win-x64.7z -// In this case, there will be two files located at: -// /dist/v5.10.1/win-x64/node.exe -// /dist/v5.10.1/win-x64/node.lib -// If this is not the structure, there may also be two files located at: -// /dist/v0.12.18/node.exe -// /dist/v0.12.18/node.lib -// This method attempts to download and cache the resources from these alternative locations. -// Note also that the files are normally zipped but in this case they are just an exe -// and lib file in a folder, not zipped. -function acquireNodeFromFallbackLocation(version, arch = os.arch()) { - return __awaiter(this, void 0, void 0, function* () { - let osPlat = os.platform(); - let osArch = translateArchToDistUrl(arch); - // Create temporary folder to download in to - const tempDownloadFolder = 'temp_' + Math.floor(Math.random() * 2000000000); - const tempDirectory = process.env['RUNNER_TEMP'] || ''; - assert.ok(tempDirectory, 'Expected RUNNER_TEMP to be defined'); - const tempDir = path.join(tempDirectory, tempDownloadFolder); - yield io.mkdirP(tempDir); - let exeUrl; - let libUrl; - try { - exeUrl = `https://nodejs.org/dist/v${version}/win-${osArch}/node.exe`; - libUrl = `https://nodejs.org/dist/v${version}/win-${osArch}/node.lib`; - core.info(`Downloading only node binary from ${exeUrl}`); - const exePath = yield tc.downloadTool(exeUrl); - yield io.cp(exePath, path.join(tempDir, 'node.exe')); - const libPath = yield tc.downloadTool(libUrl); - yield io.cp(libPath, path.join(tempDir, 'node.lib')); - } - catch (err) { - if (err instanceof tc.HTTPError && err.httpStatusCode == 404) { - exeUrl = `https://nodejs.org/dist/v${version}/node.exe`; - libUrl = `https://nodejs.org/dist/v${version}/node.lib`; - const exePath = yield tc.downloadTool(exeUrl); - yield io.cp(exePath, path.join(tempDir, 'node.exe')); - const libPath = yield tc.downloadTool(libUrl); - yield io.cp(libPath, path.join(tempDir, 'node.lib')); - } - else { - throw err; - } - } - let toolPath = yield tc.cacheDir(tempDir, 'node', version, arch); - core.addPath(toolPath); - return toolPath; - }); -} -// os.arch does not always match the relative download url, e.g. -// os.arch == 'arm' != node-v12.13.1-linux-armv7l.tar.gz -// All other currently supported architectures match, e.g.: -// os.arch = arm64 => https://nodejs.org/dist/v{VERSION}/node-v{VERSION}-{OS}-arm64.tar.gz -// os.arch = x64 => https://nodejs.org/dist/v{VERSION}/node-v{VERSION}-{OS}-x64.tar.gz -function translateArchToDistUrl(arch) { - switch (arch) { - case 'arm': - return 'armv7l'; - default: - return arch; - } -} -function parseNodeVersionFile(contents) { - var _a, _b, _c; - let nodeVersion; - // Try parsing the file as an NPM `package.json` file. - try { - nodeVersion = (_a = JSON.parse(contents).volta) === null || _a === void 0 ? void 0 : _a.node; - if (!nodeVersion) - nodeVersion = (_b = JSON.parse(contents).engines) === null || _b === void 0 ? void 0 : _b.node; - } - catch (_d) { - core.warning('Node version file is not JSON file'); - } - if (!nodeVersion) { - const found = contents.match(/^(?:nodejs\s+)?v?(?[^\s]+)$/m); - nodeVersion = (_c = found === null || found === void 0 ? void 0 : found.groups) === null || _c === void 0 ? void 0 : _c.version; - } - // In the case of an unknown format, - // return as is and evaluate the version separately. - if (!nodeVersion) - nodeVersion = contents.trim(); - return nodeVersion; -} -exports.parseNodeVersionFile = parseNodeVersionFile; -function isLatestSyntax(versionSpec) { - return ['current', 'latest', 'node'].includes(versionSpec); -} + +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k]; + result["default"] = mod; + return result; +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +const os = __nccwpck_require__(2037); +const assert = __importStar(__nccwpck_require__(9491)); +const core = __importStar(__nccwpck_require__(2186)); +const hc = __importStar(__nccwpck_require__(9925)); +const io = __importStar(__nccwpck_require__(7436)); +const tc = __importStar(__nccwpck_require__(7784)); +const path = __importStar(__nccwpck_require__(1017)); +const semver = __importStar(__nccwpck_require__(5911)); +const fs = __nccwpck_require__(7147); +function getNode(versionSpec, stable, checkLatest, auth, arch = os.arch()) { + return __awaiter(this, void 0, void 0, function* () { + // Store manifest data to avoid multiple calls + let manifest; + let nodeVersions; + let osPlat = os.platform(); + let osArch = translateArchToDistUrl(arch); + if (isLtsAlias(versionSpec)) { + core.info('Attempt to resolve LTS alias from manifest...'); + // No try-catch since it's not possible to resolve LTS alias without manifest + manifest = yield getManifest(auth); + versionSpec = resolveLtsAliasFromManifest(versionSpec, stable, manifest); + } + if (isLatestSyntax(versionSpec)) { + nodeVersions = yield getVersionsFromDist(); + versionSpec = yield queryDistForMatch(versionSpec, arch, nodeVersions); + core.info(`getting latest node version...`); + } + if (checkLatest) { + core.info('Attempt to resolve the latest version from manifest...'); + const resolvedVersion = yield resolveVersionFromManifest(versionSpec, stable, auth, osArch, manifest); + if (resolvedVersion) { + versionSpec = resolvedVersion; + core.info(`Resolved as '${versionSpec}'`); + } + else { + core.info(`Failed to resolve version ${versionSpec} from manifest`); + } + } + // check cache + let toolPath; + toolPath = tc.find('node', versionSpec, osArch); + // If not found in cache, download + if (toolPath) { + core.info(`Found in cache @ ${toolPath}`); + } + else { + core.info(`Attempting to download ${versionSpec}...`); + let downloadPath = ''; + let info = null; + // + // Try download from internal distribution (popular versions only) + // + try { + info = yield getInfoFromManifest(versionSpec, stable, auth, osArch, manifest); + if (info) { + core.info(`Acquiring ${info.resolvedVersion} - ${info.arch} from ${info.downloadUrl}`); + downloadPath = yield tc.downloadTool(info.downloadUrl, undefined, auth); + } + else { + core.info('Not found in manifest. Falling back to download directly from Node'); + } + } + catch (err) { + // Rate limit? + if (err instanceof tc.HTTPError && + (err.httpStatusCode === 403 || err.httpStatusCode === 429)) { + core.info(`Received HTTP status code ${err.httpStatusCode}. This usually indicates the rate limit has been exceeded`); + } + else { + core.info(err.message); + } + core.debug(err.stack); + core.info('Falling back to download directly from Node'); + } + // + // Download from nodejs.org + // + if (!downloadPath) { + info = yield getInfoFromDist(versionSpec, arch, nodeVersions); + if (!info) { + throw new Error(`Unable to find Node version '${versionSpec}' for platform ${osPlat} and architecture ${osArch}.`); + } + core.info(`Acquiring ${info.resolvedVersion} - ${info.arch} from ${info.downloadUrl}`); + try { + downloadPath = yield tc.downloadTool(info.downloadUrl); + } + catch (err) { + if (err instanceof tc.HTTPError && err.httpStatusCode == 404) { + return yield acquireNodeFromFallbackLocation(info.resolvedVersion, info.arch); + } + throw err; + } + } + // + // Extract + // + core.info('Extracting ...'); + let extPath; + info = info || {}; // satisfy compiler, never null when reaches here + if (osPlat == 'win32') { + let _7zPath = path.join(__dirname, '../..', 'externals', '7zr.exe'); + extPath = yield tc.extract7z(downloadPath, undefined, _7zPath); + // 7z extracts to folder matching file name + let nestedPath = path.join(extPath, path.basename(info.fileName, '.7z')); + if (fs.existsSync(nestedPath)) { + extPath = nestedPath; + } + } + else { + extPath = yield tc.extractTar(downloadPath, undefined, [ + 'xz', + '--strip', + '1' + ]); + } + // + // Install into the local tool cache - node extracts with a root folder that matches the fileName downloaded + // + core.info('Adding to the cache ...'); + toolPath = yield tc.cacheDir(extPath, 'node', info.resolvedVersion, info.arch); + core.info('Done'); + } + // + // a tool installer initimately knows details about the layout of that tool + // for example, node binary is in the bin folder after the extract on Mac/Linux. + // layouts could change by version, by platform etc... but that's the tool installers job + // + if (osPlat != 'win32') { + toolPath = path.join(toolPath, 'bin'); + } + // + // prepend the tools path. instructs the agent to prepend for future tasks + core.addPath(toolPath); + }); +} +exports.getNode = getNode; +function isLtsAlias(versionSpec) { + return versionSpec.startsWith('lts/'); +} +function getManifest(auth) { + core.debug('Getting manifest from actions/node-versions@main'); + return tc.getManifestFromRepo('actions', 'node-versions', auth, 'main'); +} +function resolveLtsAliasFromManifest(versionSpec, stable, manifest) { + var _a; + const alias = (_a = versionSpec.split('lts/')[1]) === null || _a === void 0 ? void 0 : _a.toLowerCase(); + if (!alias) { + throw new Error(`Unable to parse LTS alias for Node version '${versionSpec}'`); + } + core.debug(`LTS alias '${alias}' for Node version '${versionSpec}'`); + // Supported formats are `lts/`, `lts/*`, and `lts/-n`. Where asterisk means highest possible LTS and -n means the nth-highest. + const n = Number(alias); + const aliases = Object.fromEntries(manifest + .filter(x => x.lts && x.stable === stable) + .map(x => [x.lts.toLowerCase(), x]) + .reverse()); + const numbered = Object.values(aliases); + const release = alias === '*' + ? numbered[numbered.length - 1] + : n < 0 + ? numbered[numbered.length - 1 + n] + : aliases[alias]; + if (!release) { + throw new Error(`Unable to find LTS release '${alias}' for Node version '${versionSpec}'.`); + } + core.debug(`Found LTS release '${release.version}' for Node version '${versionSpec}'`); + return release.version.split('.')[0]; +} +function getInfoFromManifest(versionSpec, stable, auth, osArch = translateArchToDistUrl(os.arch()), manifest) { + return __awaiter(this, void 0, void 0, function* () { + let info = null; + if (!manifest) { + core.debug('No manifest cached'); + manifest = yield getManifest(auth); + } + const rel = yield tc.findFromManifest(versionSpec, stable, manifest, osArch); + if (rel && rel.files.length > 0) { + info = {}; + info.resolvedVersion = rel.version; + info.arch = rel.files[0].arch; + info.downloadUrl = rel.files[0].download_url; + info.fileName = rel.files[0].filename; + } + return info; + }); +} +function getInfoFromDist(versionSpec, arch = os.arch(), nodeVersions) { + return __awaiter(this, void 0, void 0, function* () { + let osPlat = os.platform(); + let osArch = translateArchToDistUrl(arch); + let version = yield queryDistForMatch(versionSpec, arch, nodeVersions); + if (!version) { + return null; + } + // + // Download - a tool installer intimately knows how to get the tool (and construct urls) + // + version = semver.clean(version) || ''; + let fileName = osPlat == 'win32' + ? `node-v${version}-win-${osArch}` + : `node-v${version}-${osPlat}-${osArch}`; + let urlFileName = osPlat == 'win32' ? `${fileName}.7z` : `${fileName}.tar.gz`; + let url = `https://nodejs.org/dist/v${version}/${urlFileName}`; + return { + downloadUrl: url, + resolvedVersion: version, + arch: arch, + fileName: fileName + }; + }); +} +function resolveVersionFromManifest(versionSpec, stable, auth, osArch = translateArchToDistUrl(os.arch()), manifest) { + return __awaiter(this, void 0, void 0, function* () { + try { + const info = yield getInfoFromManifest(versionSpec, stable, auth, osArch, manifest); + return info === null || info === void 0 ? void 0 : info.resolvedVersion; + } + catch (err) { + core.info('Unable to resolve version from manifest...'); + core.debug(err.message); + } + }); +} +// TODO - should we just export this from @actions/tool-cache? Lifted directly from there +function evaluateVersions(versions, versionSpec) { + let version = ''; + core.debug(`evaluating ${versions.length} versions`); + versions = versions.sort((a, b) => { + if (semver.gt(a, b)) { + return 1; + } + return -1; + }); + for (let i = versions.length - 1; i >= 0; i--) { + const potential = versions[i]; + const satisfied = semver.satisfies(potential, versionSpec); + if (satisfied) { + version = potential; + break; + } + } + if (version) { + core.debug(`matched: ${version}`); + } + else { + core.debug('match not found'); + } + return version; +} +function queryDistForMatch(versionSpec, arch = os.arch(), nodeVersions) { + return __awaiter(this, void 0, void 0, function* () { + let osPlat = os.platform(); + let osArch = translateArchToDistUrl(arch); + // node offers a json list of versions + let dataFileName; + switch (osPlat) { + case 'linux': + dataFileName = `linux-${osArch}`; + break; + case 'darwin': + dataFileName = `osx-${osArch}-tar`; + break; + case 'win32': + dataFileName = `win-${osArch}-exe`; + break; + default: + throw new Error(`Unexpected OS '${osPlat}'`); + } + if (!nodeVersions) { + core.debug('No dist manifest cached'); + nodeVersions = yield getVersionsFromDist(); + } + let versions = []; + if (isLatestSyntax(versionSpec)) { + core.info(`getting latest node version...`); + return nodeVersions[0].version; + } + nodeVersions.forEach((nodeVersion) => { + // ensure this version supports your os and platform + if (nodeVersion.files.indexOf(dataFileName) >= 0) { + versions.push(nodeVersion.version); + } + }); + // get the latest version that matches the version spec + let version = evaluateVersions(versions, versionSpec); + return version; + }); +} +function getVersionsFromDist() { + return __awaiter(this, void 0, void 0, function* () { + let dataUrl = 'https://nodejs.org/dist/index.json'; + let httpClient = new hc.HttpClient('setup-node', [], { + allowRetries: true, + maxRetries: 3 + }); + let response = yield httpClient.getJson(dataUrl); + return response.result || []; + }); +} +exports.getVersionsFromDist = getVersionsFromDist; +// For non LTS versions of Node, the files we need (for Windows) are sometimes located +// in a different folder than they normally are for other versions. +// Normally the format is similar to: https://nodejs.org/dist/v5.10.1/node-v5.10.1-win-x64.7z +// In this case, there will be two files located at: +// /dist/v5.10.1/win-x64/node.exe +// /dist/v5.10.1/win-x64/node.lib +// If this is not the structure, there may also be two files located at: +// /dist/v0.12.18/node.exe +// /dist/v0.12.18/node.lib +// This method attempts to download and cache the resources from these alternative locations. +// Note also that the files are normally zipped but in this case they are just an exe +// and lib file in a folder, not zipped. +function acquireNodeFromFallbackLocation(version, arch = os.arch()) { + return __awaiter(this, void 0, void 0, function* () { + let osPlat = os.platform(); + let osArch = translateArchToDistUrl(arch); + // Create temporary folder to download in to + const tempDownloadFolder = 'temp_' + Math.floor(Math.random() * 2000000000); + const tempDirectory = process.env['RUNNER_TEMP'] || ''; + assert.ok(tempDirectory, 'Expected RUNNER_TEMP to be defined'); + const tempDir = path.join(tempDirectory, tempDownloadFolder); + yield io.mkdirP(tempDir); + let exeUrl; + let libUrl; + try { + exeUrl = `https://nodejs.org/dist/v${version}/win-${osArch}/node.exe`; + libUrl = `https://nodejs.org/dist/v${version}/win-${osArch}/node.lib`; + core.info(`Downloading only node binary from ${exeUrl}`); + const exePath = yield tc.downloadTool(exeUrl); + yield io.cp(exePath, path.join(tempDir, 'node.exe')); + const libPath = yield tc.downloadTool(libUrl); + yield io.cp(libPath, path.join(tempDir, 'node.lib')); + } + catch (err) { + if (err instanceof tc.HTTPError && err.httpStatusCode == 404) { + exeUrl = `https://nodejs.org/dist/v${version}/node.exe`; + libUrl = `https://nodejs.org/dist/v${version}/node.lib`; + const exePath = yield tc.downloadTool(exeUrl); + yield io.cp(exePath, path.join(tempDir, 'node.exe')); + const libPath = yield tc.downloadTool(libUrl); + yield io.cp(libPath, path.join(tempDir, 'node.lib')); + } + else { + throw err; + } + } + let toolPath = yield tc.cacheDir(tempDir, 'node', version, arch); + core.addPath(toolPath); + return toolPath; + }); +} +// os.arch does not always match the relative download url, e.g. +// os.arch == 'arm' != node-v12.13.1-linux-armv7l.tar.gz +// All other currently supported architectures match, e.g.: +// os.arch = arm64 => https://nodejs.org/dist/v{VERSION}/node-v{VERSION}-{OS}-arm64.tar.gz +// os.arch = x64 => https://nodejs.org/dist/v{VERSION}/node-v{VERSION}-{OS}-x64.tar.gz +function translateArchToDistUrl(arch) { + switch (arch) { + case 'arm': + return 'armv7l'; + default: + return arch; + } +} +function parseNodeVersionFile(contents) { + var _a, _b, _c; + let nodeVersion; + // Try parsing the file as an NPM `package.json` file. + try { + nodeVersion = (_a = JSON.parse(contents).volta) === null || _a === void 0 ? void 0 : _a.node; + if (!nodeVersion) + nodeVersion = (_b = JSON.parse(contents).engines) === null || _b === void 0 ? void 0 : _b.node; + } + catch (_d) { + core.info('Node version file is not JSON file'); + } + if (!nodeVersion) { + const found = contents.match(/^(?:nodejs\s+)?v?(?[^\s]+)$/m); + nodeVersion = (_c = found === null || found === void 0 ? void 0 : found.groups) === null || _c === void 0 ? void 0 : _c.version; + } + // In the case of an unknown format, + // return as is and evaluate the version separately. + if (!nodeVersion) + nodeVersion = contents.trim(); + return nodeVersion; +} +exports.parseNodeVersionFile = parseNodeVersionFile; +function isLatestSyntax(versionSpec) { + return ['current', 'latest', 'node'].includes(versionSpec); +} /***/ }), @@ -73563,108 +73563,108 @@ function isLatestSyntax(versionSpec) { /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; - -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; -var __importStar = (this && this.__importStar) || function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k]; - result["default"] = mod; - return result; -}; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", ({ value: true })); -const core = __importStar(__nccwpck_require__(2186)); -const exec = __importStar(__nccwpck_require__(1514)); -const installer = __importStar(__nccwpck_require__(2574)); -const fs_1 = __importDefault(__nccwpck_require__(7147)); -const auth = __importStar(__nccwpck_require__(7573)); -const path = __importStar(__nccwpck_require__(1017)); -const cache_restore_1 = __nccwpck_require__(9517); -const cache_utils_1 = __nccwpck_require__(1678); -const os = __nccwpck_require__(2037); -function run() { - return __awaiter(this, void 0, void 0, function* () { - try { - // - // Version is optional. If supplied, install / use from the tool cache - // If not supplied then task is still used to setup proxy, auth, etc... - // - let version = resolveVersionInput(); - let arch = core.getInput('architecture'); - const cache = core.getInput('cache'); - // if architecture supplied but node-version is not - // if we don't throw a warning, the already installed x64 node will be used which is not probably what user meant. - if (arch && !version) { - core.warning('`architecture` is provided but `node-version` is missing. In this configuration, the version/architecture of Node will not be changed. To fix this, provide `architecture` in combination with `node-version`'); - } - if (!arch) { - arch = os.arch(); - } - if (version) { - let token = core.getInput('token'); - let auth = !token || cache_utils_1.isGhes() ? undefined : `token ${token}`; - let stable = (core.getInput('stable') || 'true').toUpperCase() === 'TRUE'; - const checkLatest = (core.getInput('check-latest') || 'false').toUpperCase() === 'TRUE'; - yield installer.getNode(version, stable, checkLatest, auth, arch); - } - // Output version of node is being used - try { - const { stdout: installedVersion } = yield exec.getExecOutput('node', ['--version'], { ignoreReturnCode: true, silent: true }); - core.setOutput('node-version', installedVersion.trim()); - } - catch (err) { - core.setOutput('node-version', ''); - } - const registryUrl = core.getInput('registry-url'); - const alwaysAuth = core.getInput('always-auth'); - if (registryUrl) { - auth.configAuthentication(registryUrl, alwaysAuth); - } - if (cache && cache_utils_1.isCacheFeatureAvailable()) { - const cacheDependencyPath = core.getInput('cache-dependency-path'); - yield cache_restore_1.restoreCache(cache, cacheDependencyPath); - } - const matchersPath = path.join(__dirname, '../..', '.github'); - core.info(`##[add-matcher]${path.join(matchersPath, 'tsc.json')}`); - core.info(`##[add-matcher]${path.join(matchersPath, 'eslint-stylish.json')}`); - core.info(`##[add-matcher]${path.join(matchersPath, 'eslint-compact.json')}`); - } - catch (err) { - core.setFailed(err.message); - } - }); -} -exports.run = run; -function resolveVersionInput() { - let version = core.getInput('node-version'); - const versionFileInput = core.getInput('node-version-file'); - if (version && versionFileInput) { - core.warning('Both node-version and node-version-file inputs are specified, only node-version will be used'); - } - if (version) { - return version; - } - if (versionFileInput) { - const versionFilePath = path.join(process.env.GITHUB_WORKSPACE, versionFileInput); - if (!fs_1.default.existsSync(versionFilePath)) { - throw new Error(`The specified node version file at: ${versionFilePath} does not exist`); - } - version = installer.parseNodeVersionFile(fs_1.default.readFileSync(versionFilePath, 'utf8')); - core.info(`Resolved ${versionFileInput} as ${version}`); - } - return version; -} + +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k]; + result["default"] = mod; + return result; +}; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +const core = __importStar(__nccwpck_require__(2186)); +const exec = __importStar(__nccwpck_require__(1514)); +const installer = __importStar(__nccwpck_require__(2574)); +const fs_1 = __importDefault(__nccwpck_require__(7147)); +const auth = __importStar(__nccwpck_require__(7573)); +const path = __importStar(__nccwpck_require__(1017)); +const cache_restore_1 = __nccwpck_require__(9517); +const cache_utils_1 = __nccwpck_require__(1678); +const os = __nccwpck_require__(2037); +function run() { + return __awaiter(this, void 0, void 0, function* () { + try { + // + // Version is optional. If supplied, install / use from the tool cache + // If not supplied then task is still used to setup proxy, auth, etc... + // + let version = resolveVersionInput(); + let arch = core.getInput('architecture'); + const cache = core.getInput('cache'); + // if architecture supplied but node-version is not + // if we don't throw a warning, the already installed x64 node will be used which is not probably what user meant. + if (arch && !version) { + core.warning('`architecture` is provided but `node-version` is missing. In this configuration, the version/architecture of Node will not be changed. To fix this, provide `architecture` in combination with `node-version`'); + } + if (!arch) { + arch = os.arch(); + } + if (version) { + let token = core.getInput('token'); + let auth = !token || cache_utils_1.isGhes() ? undefined : `token ${token}`; + let stable = (core.getInput('stable') || 'true').toUpperCase() === 'TRUE'; + const checkLatest = (core.getInput('check-latest') || 'false').toUpperCase() === 'TRUE'; + yield installer.getNode(version, stable, checkLatest, auth, arch); + } + // Output version of node is being used + try { + const { stdout: installedVersion } = yield exec.getExecOutput('node', ['--version'], { ignoreReturnCode: true, silent: true }); + core.setOutput('node-version', installedVersion.trim()); + } + catch (err) { + core.setOutput('node-version', ''); + } + const registryUrl = core.getInput('registry-url'); + const alwaysAuth = core.getInput('always-auth'); + if (registryUrl) { + auth.configAuthentication(registryUrl, alwaysAuth); + } + if (cache && cache_utils_1.isCacheFeatureAvailable()) { + const cacheDependencyPath = core.getInput('cache-dependency-path'); + yield cache_restore_1.restoreCache(cache, cacheDependencyPath); + } + const matchersPath = path.join(__dirname, '../..', '.github'); + core.info(`##[add-matcher]${path.join(matchersPath, 'tsc.json')}`); + core.info(`##[add-matcher]${path.join(matchersPath, 'eslint-stylish.json')}`); + core.info(`##[add-matcher]${path.join(matchersPath, 'eslint-compact.json')}`); + } + catch (err) { + core.setFailed(err.message); + } + }); +} +exports.run = run; +function resolveVersionInput() { + let version = core.getInput('node-version'); + const versionFileInput = core.getInput('node-version-file'); + if (version && versionFileInput) { + core.warning('Both node-version and node-version-file inputs are specified, only node-version will be used'); + } + if (version) { + return version; + } + if (versionFileInput) { + const versionFilePath = path.join(process.env.GITHUB_WORKSPACE, versionFileInput); + if (!fs_1.default.existsSync(versionFilePath)) { + throw new Error(`The specified node version file at: ${versionFilePath} does not exist`); + } + version = installer.parseNodeVersionFile(fs_1.default.readFileSync(versionFilePath, 'utf8')); + core.info(`Resolved ${versionFileInput} as ${version}`); + } + return version; +} /***/ }), @@ -73920,10 +73920,10 @@ var __webpack_exports__ = {}; (() => { "use strict"; var exports = __webpack_exports__; - -Object.defineProperty(exports, "__esModule", ({ value: true })); -const main_1 = __nccwpck_require__(399); -main_1.run(); + +Object.defineProperty(exports, "__esModule", ({ value: true })); +const main_1 = __nccwpck_require__(399); +main_1.run(); })(); diff --git a/src/installer.ts b/src/installer.ts index bd9c8423..83a43d85 100644 --- a/src/installer.ts +++ b/src/installer.ts @@ -502,7 +502,7 @@ export function parseNodeVersionFile(contents: string): string { nodeVersion = JSON.parse(contents).volta?.node; if (!nodeVersion) nodeVersion = JSON.parse(contents).engines?.node; } catch { - core.warning('Node version file is not JSON file'); + core.info('Node version file is not JSON file'); } if (!nodeVersion) {