From af6aca3df9633235cbb345551dcbf5e4a2237d30 Mon Sep 17 00:00:00 2001 From: Aidan Lee Date: Sat, 22 Jun 2024 21:01:02 +0100 Subject: [PATCH] set fallback lib search path for latest haxe nightlies --- dist/cache-save/index.js | 215 ++++--- dist/index.js | 1151 ++++++++++++++++++++++---------------- src/setup.ts | 2 + 3 files changed, 802 insertions(+), 566 deletions(-) diff --git a/dist/cache-save/index.js b/dist/cache-save/index.js index be7b6ac..e044d00 100644 --- a/dist/cache-save/index.js +++ b/dist/cache-save/index.js @@ -2647,8 +2647,11 @@ var MAX_SAFE_INTEGER = Number.MAX_SAFE_INTEGER || // Max safe segment length for coercion. var MAX_SAFE_COMPONENT_LENGTH = 16 +var MAX_SAFE_BUILD_LENGTH = MAX_LENGTH - 6 + // The actual regexps go on exports.re var re = exports.re = [] +var safeRe = exports.safeRe = [] var src = exports.src = [] var t = exports.tokens = {} var R = 0 @@ -2657,6 +2660,31 @@ function tok (n) { t[n] = R++ } +var LETTERDASHNUMBER = '[a-zA-Z0-9-]' + +// Replace some greedy regex tokens to prevent regex dos issues. These regex are +// used internally via the safeRe object since all inputs in this library get +// normalized first to trim and collapse all extra whitespace. The original +// regexes are exported for userland consumption and lower level usage. A +// future breaking change could export the safer regex only with a note that +// all input should have extra whitespace removed. +var safeRegexReplacements = [ + ['\\s', 1], + ['\\d', MAX_LENGTH], + [LETTERDASHNUMBER, MAX_SAFE_BUILD_LENGTH], +] + +function makeSafeRe (value) { + for (var i = 0; i < safeRegexReplacements.length; i++) { + var token = safeRegexReplacements[i][0] + var max = safeRegexReplacements[i][1] + value = value + .split(token + '*').join(token + '{0,' + max + '}') + .split(token + '+').join(token + '{1,' + max + '}') + } + return value +} + // The following Regular Expressions can be used for tokenizing, // validating, and parsing SemVer version strings. @@ -2666,14 +2694,14 @@ function tok (n) { tok('NUMERICIDENTIFIER') src[t.NUMERICIDENTIFIER] = '0|[1-9]\\d*' tok('NUMERICIDENTIFIERLOOSE') -src[t.NUMERICIDENTIFIERLOOSE] = '[0-9]+' +src[t.NUMERICIDENTIFIERLOOSE] = '\\d+' // ## Non-numeric Identifier // Zero or more digits, followed by a letter or hyphen, and then zero or // more letters, digits, or hyphens. tok('NONNUMERICIDENTIFIER') -src[t.NONNUMERICIDENTIFIER] = '\\d*[a-zA-Z-][a-zA-Z0-9-]*' +src[t.NONNUMERICIDENTIFIER] = '\\d*[a-zA-Z-]' + LETTERDASHNUMBER + '*' // ## Main Version // Three dot-separated numeric identifiers. @@ -2715,7 +2743,7 @@ src[t.PRERELEASELOOSE] = '(?:-?(' + src[t.PRERELEASEIDENTIFIERLOOSE] + // Any combination of digits, letters, or hyphens. tok('BUILDIDENTIFIER') -src[t.BUILDIDENTIFIER] = '[0-9A-Za-z-]+' +src[t.BUILDIDENTIFIER] = LETTERDASHNUMBER + '+' // ## Build Metadata // Plus sign, followed by one or more period-separated build metadata @@ -2795,6 +2823,7 @@ src[t.COERCE] = '(^|[^\\d])' + '(?:$|[^\\d])' tok('COERCERTL') re[t.COERCERTL] = new RegExp(src[t.COERCE], 'g') +safeRe[t.COERCERTL] = new RegExp(makeSafeRe(src[t.COERCE]), 'g') // Tilde ranges. // Meaning is "reasonably at or greater than" @@ -2804,6 +2833,7 @@ src[t.LONETILDE] = '(?:~>?)' tok('TILDETRIM') src[t.TILDETRIM] = '(\\s*)' + src[t.LONETILDE] + '\\s+' re[t.TILDETRIM] = new RegExp(src[t.TILDETRIM], 'g') +safeRe[t.TILDETRIM] = new RegExp(makeSafeRe(src[t.TILDETRIM]), 'g') var tildeTrimReplace = '$1~' tok('TILDE') @@ -2819,6 +2849,7 @@ src[t.LONECARET] = '(?:\\^)' tok('CARETTRIM') src[t.CARETTRIM] = '(\\s*)' + src[t.LONECARET] + '\\s+' re[t.CARETTRIM] = new RegExp(src[t.CARETTRIM], 'g') +safeRe[t.CARETTRIM] = new RegExp(makeSafeRe(src[t.CARETTRIM]), 'g') var caretTrimReplace = '$1^' tok('CARET') @@ -2840,6 +2871,7 @@ src[t.COMPARATORTRIM] = '(\\s*)' + src[t.GTLT] + // this one has to use the /g flag re[t.COMPARATORTRIM] = new RegExp(src[t.COMPARATORTRIM], 'g') +safeRe[t.COMPARATORTRIM] = new RegExp(makeSafeRe(src[t.COMPARATORTRIM]), 'g') var comparatorTrimReplace = '$1$2$3' // Something like `1.2.3 - 1.2.4` @@ -2868,6 +2900,14 @@ for (var i = 0; i < R; i++) { debug(i, src[i]) if (!re[i]) { re[i] = new RegExp(src[i]) + + // Replace all greedy whitespace to prevent regex dos issues. These regex are + // used internally via the safeRe object since all inputs in this library get + // normalized first to trim and collapse all extra whitespace. The original + // regexes are exported for userland consumption and lower level usage. A + // future breaking change could export the safer regex only with a note that + // all input should have extra whitespace removed. + safeRe[i] = new RegExp(makeSafeRe(src[i])) } } @@ -2892,7 +2932,7 @@ function parse (version, options) { return null } - var r = options.loose ? re[t.LOOSE] : re[t.FULL] + var r = options.loose ? safeRe[t.LOOSE] : safeRe[t.FULL] if (!r.test(version)) { return null } @@ -2947,7 +2987,7 @@ function SemVer (version, options) { this.options = options this.loose = !!options.loose - var m = version.trim().match(options.loose ? re[t.LOOSE] : re[t.FULL]) + var m = version.trim().match(options.loose ? safeRe[t.LOOSE] : safeRe[t.FULL]) if (!m) { throw new TypeError('Invalid Version: ' + version) @@ -3392,6 +3432,7 @@ function Comparator (comp, options) { return new Comparator(comp, options) } + comp = comp.trim().split(/\s+/).join(' ') debug('comparator', comp, options) this.options = options this.loose = !!options.loose @@ -3408,7 +3449,7 @@ function Comparator (comp, options) { var ANY = {} Comparator.prototype.parse = function (comp) { - var r = this.options.loose ? re[t.COMPARATORLOOSE] : re[t.COMPARATOR] + var r = this.options.loose ? safeRe[t.COMPARATORLOOSE] : safeRe[t.COMPARATOR] var m = comp.match(r) if (!m) { @@ -3532,9 +3573,16 @@ function Range (range, options) { this.loose = !!options.loose this.includePrerelease = !!options.includePrerelease - // First, split based on boolean or || + // First reduce all whitespace as much as possible so we do not have to rely + // on potentially slow regexes like \s*. This is then stored and used for + // future error messages as well. this.raw = range - this.set = range.split(/\s*\|\|\s*/).map(function (range) { + .trim() + .split(/\s+/) + .join(' ') + + // First, split based on boolean or || + this.set = this.raw.split('||').map(function (range) { return this.parseRange(range.trim()) }, this).filter(function (c) { // throw out any that are not relevant for whatever reason @@ -3542,7 +3590,7 @@ function Range (range, options) { }) if (!this.set.length) { - throw new TypeError('Invalid SemVer Range: ' + range) + throw new TypeError('Invalid SemVer Range: ' + this.raw) } this.format() @@ -3561,20 +3609,19 @@ Range.prototype.toString = function () { Range.prototype.parseRange = function (range) { var loose = this.options.loose - range = range.trim() // `1.2.3 - 1.2.4` => `>=1.2.3 <=1.2.4` - var hr = loose ? re[t.HYPHENRANGELOOSE] : re[t.HYPHENRANGE] + var hr = loose ? safeRe[t.HYPHENRANGELOOSE] : safeRe[t.HYPHENRANGE] range = range.replace(hr, hyphenReplace) debug('hyphen replace', range) // `> 1.2.3 < 1.2.5` => `>1.2.3 <1.2.5` - range = range.replace(re[t.COMPARATORTRIM], comparatorTrimReplace) - debug('comparator trim', range, re[t.COMPARATORTRIM]) + range = range.replace(safeRe[t.COMPARATORTRIM], comparatorTrimReplace) + debug('comparator trim', range, safeRe[t.COMPARATORTRIM]) // `~ 1.2.3` => `~1.2.3` - range = range.replace(re[t.TILDETRIM], tildeTrimReplace) + range = range.replace(safeRe[t.TILDETRIM], tildeTrimReplace) // `^ 1.2.3` => `^1.2.3` - range = range.replace(re[t.CARETTRIM], caretTrimReplace) + range = range.replace(safeRe[t.CARETTRIM], caretTrimReplace) // normalize spaces range = range.split(/\s+/).join(' ') @@ -3582,7 +3629,7 @@ Range.prototype.parseRange = function (range) { // At this point, the range is completely trimmed and // ready to be split into comparators. - var compRe = loose ? re[t.COMPARATORLOOSE] : re[t.COMPARATOR] + var compRe = loose ? safeRe[t.COMPARATORLOOSE] : safeRe[t.COMPARATOR] var set = range.split(' ').map(function (comp) { return parseComparator(comp, this.options) }, this).join(' ').split(/\s+/) @@ -3682,7 +3729,7 @@ function replaceTildes (comp, options) { } function replaceTilde (comp, options) { - var r = options.loose ? re[t.TILDELOOSE] : re[t.TILDE] + var r = options.loose ? safeRe[t.TILDELOOSE] : safeRe[t.TILDE] return comp.replace(r, function (_, M, m, p, pr) { debug('tilde', comp, _, M, m, p, pr) var ret @@ -3723,7 +3770,7 @@ function replaceCarets (comp, options) { function replaceCaret (comp, options) { debug('caret', comp, options) - var r = options.loose ? re[t.CARETLOOSE] : re[t.CARET] + var r = options.loose ? safeRe[t.CARETLOOSE] : safeRe[t.CARET] return comp.replace(r, function (_, M, m, p, pr) { debug('caret', comp, _, M, m, p, pr) var ret @@ -3782,7 +3829,7 @@ function replaceXRanges (comp, options) { function replaceXRange (comp, options) { comp = comp.trim() - var r = options.loose ? re[t.XRANGELOOSE] : re[t.XRANGE] + var r = options.loose ? safeRe[t.XRANGELOOSE] : safeRe[t.XRANGE] return comp.replace(r, function (ret, gtlt, M, m, p, pr) { debug('xRange', comp, ret, gtlt, M, m, p, pr) var xM = isX(M) @@ -3857,7 +3904,7 @@ function replaceXRange (comp, options) { function replaceStars (comp, options) { debug('replaceStars', comp, options) // Looseness is ignored here. star is always as loose as it gets! - return comp.trim().replace(re[t.STAR], '') + return comp.trim().replace(safeRe[t.STAR], '') } // This function is passed to string.replace(re[t.HYPHENRANGE]) @@ -4183,7 +4230,7 @@ function coerce (version, options) { var match = null if (!options.rtl) { - match = version.match(re[t.COERCE]) + match = version.match(safeRe[t.COERCE]) } else { // Find the right-most coercible string that does not share // a terminus with a more left-ward coercible string. @@ -4194,17 +4241,17 @@ function coerce (version, options) { // Stop when we get a match that ends at the string end, since no // coercible string can be more right-ward without the same terminus. var next - while ((next = re[t.COERCERTL].exec(version)) && + while ((next = safeRe[t.COERCERTL].exec(version)) && (!match || match.index + match[0].length !== version.length) ) { if (!match || next.index + next[0].length !== match.index + match[0].length) { match = next } - re[t.COERCERTL].lastIndex = next.index + next[1].length + next[2].length + safeRe[t.COERCERTL].lastIndex = next.index + next[1].length + next[2].length } // leave it in a clean state - re[t.COERCERTL].lastIndex = -1 + safeRe[t.COERCERTL].lastIndex = -1 } if (match === null) { @@ -59008,20 +59055,20 @@ __nccwpck_require__.r(__webpack_exports__); /* harmony import */ var _actions_core__WEBPACK_IMPORTED_MODULE_0__ = __nccwpck_require__(2186); /* harmony import */ var _actions_core__WEBPACK_IMPORTED_MODULE_0___default = /*#__PURE__*/__nccwpck_require__.n(_actions_core__WEBPACK_IMPORTED_MODULE_0__); /* harmony import */ var _haxelib__WEBPACK_IMPORTED_MODULE_1__ = __nccwpck_require__(6191); - - -async function run() { - try { - const cacheDependencyPath = _actions_core__WEBPACK_IMPORTED_MODULE_0__.getInput('cache-dependency-path'); - if (cacheDependencyPath.length > 0) { - await (0,_haxelib__WEBPACK_IMPORTED_MODULE_1__/* .saveHaxelib */ .X)(); - } - } - catch (error) { // eslint-disable-line @typescript-eslint/no-implicit-any-catch - _actions_core__WEBPACK_IMPORTED_MODULE_0__.setFailed(error.message); - } -} -await run(); + + +async function run() { + try { + const cacheDependencyPath = _actions_core__WEBPACK_IMPORTED_MODULE_0__.getInput('cache-dependency-path'); + if (cacheDependencyPath.length > 0) { + await (0,_haxelib__WEBPACK_IMPORTED_MODULE_1__/* .saveHaxelib */ .X)(); + } + } + catch (error) { // eslint-disable-line @typescript-eslint/no-implicit-any-catch + _actions_core__WEBPACK_IMPORTED_MODULE_0__.setFailed(error.message); + } +} +await run(); __webpack_async_result__(); } catch(e) { __webpack_async_result__(e); } }, 1); @@ -59049,52 +59096,52 @@ var lib_cache = __nccwpck_require__(7799); // EXTERNAL MODULE: ./node_modules/@actions/glob/lib/glob.js var lib_glob = __nccwpck_require__(8090); ;// CONCATENATED MODULE: ./src/haxelib.ts - - - - -var State; -(function (State) { - State["CachePrimaryKey"] = "PRIMARY_KEY"; - State["CacheRestoreResult"] = "RESTORE_RESULT"; - State["CacheHaxelibPath"] = "HAXELIB_PATH"; -})(State || (State = {})); -async function createHaxelibKey(platform, version, cacheDependencyPath) { - const fileHash = await glob.hashFiles(cacheDependencyPath); - if (!fileHash) { - throw new Error('Some specified paths were not resolved, unable to cache dependencies.'); - } - return `haxelib-cache-${platform}-haxe${version}-${fileHash}`; -} -async function restoreHaxelib(primaryKey, haxelibPath) { - core.saveState(State.CachePrimaryKey, primaryKey); - core.saveState(State.CacheHaxelibPath, haxelibPath); - const restoreResult = await cache.restoreCache([haxelibPath], primaryKey); - core.setOutput('cache-hit', Boolean(restoreResult)); - if (!restoreResult) { - core.info('haxelib cache is not found'); - return; - } - core.saveState(State.CacheRestoreResult, restoreResult); - core.info(`Cache restored from key: ${restoreResult}`); -} -async function saveHaxelib() { - const restoreResult = lib_core.getState(State.CacheRestoreResult); - const primaryKey = lib_core.getState(State.CachePrimaryKey); - const haxelibPath = lib_core.getState(State.CacheHaxelibPath); - if (!external_node_fs_namespaceObject.existsSync(haxelibPath)) { - throw new Error(`Cache folder path is retrieved but doesn't exist on disk: ${haxelibPath}`); - } - if (primaryKey === restoreResult) { - lib_core.info(`Cache hit occurred on the primary key ${primaryKey}, not saving cache.`); - return; - } - const cacheId = await lib_cache.saveCache([haxelibPath], primaryKey); - if (cacheId === -1) { - return; - } - lib_core.info(`Cache saved with the key: ${primaryKey}`); -} + + + + +var State; +(function (State) { + State["CachePrimaryKey"] = "PRIMARY_KEY"; + State["CacheRestoreResult"] = "RESTORE_RESULT"; + State["CacheHaxelibPath"] = "HAXELIB_PATH"; +})(State || (State = {})); +async function createHaxelibKey(platform, version, cacheDependencyPath) { + const fileHash = await glob.hashFiles(cacheDependencyPath); + if (!fileHash) { + throw new Error('Some specified paths were not resolved, unable to cache dependencies.'); + } + return `haxelib-cache-${platform}-haxe${version}-${fileHash}`; +} +async function restoreHaxelib(primaryKey, haxelibPath) { + core.saveState(State.CachePrimaryKey, primaryKey); + core.saveState(State.CacheHaxelibPath, haxelibPath); + const restoreResult = await cache.restoreCache([haxelibPath], primaryKey); + core.setOutput('cache-hit', Boolean(restoreResult)); + if (!restoreResult) { + core.info('haxelib cache is not found'); + return; + } + core.saveState(State.CacheRestoreResult, restoreResult); + core.info(`Cache restored from key: ${restoreResult}`); +} +async function saveHaxelib() { + const restoreResult = lib_core.getState(State.CacheRestoreResult); + const primaryKey = lib_core.getState(State.CachePrimaryKey); + const haxelibPath = lib_core.getState(State.CacheHaxelibPath); + if (!external_node_fs_namespaceObject.existsSync(haxelibPath)) { + throw new Error(`Cache folder path is retrieved but doesn't exist on disk: ${haxelibPath}`); + } + if (primaryKey === restoreResult) { + lib_core.info(`Cache hit occurred on the primary key ${primaryKey}, not saving cache.`); + return; + } + const cacheId = await lib_cache.saveCache([haxelibPath], primaryKey); + if (cacheId === -1) { + return; + } + lib_core.info(`Cache saved with the key: ${primaryKey}`); +} /***/ }), diff --git a/dist/index.js b/dist/index.js index 632ce30..ac996e4 100644 --- a/dist/index.js +++ b/dist/index.js @@ -12,28 +12,28 @@ __nccwpck_require__.r(__webpack_exports__); /* harmony import */ var semver__WEBPACK_IMPORTED_MODULE_1__ = __nccwpck_require__(1383); /* harmony import */ var semver__WEBPACK_IMPORTED_MODULE_1___default = /*#__PURE__*/__nccwpck_require__.n(semver__WEBPACK_IMPORTED_MODULE_1__); /* harmony import */ var _setup__WEBPACK_IMPORTED_MODULE_2__ = __nccwpck_require__(3582); -// Copyright (c) 2020 Sho Kuroda -// -// This software is released under the MIT License. -// https://opensource.org/licenses/MIT - - - -async function main() { - try { - const inputVersion = _actions_core__WEBPACK_IMPORTED_MODULE_0__.getInput('haxe-version'); - const cacheDependencyPath = _actions_core__WEBPACK_IMPORTED_MODULE_0__.getInput('cache-dependency-path'); - const nightly = /^(\d{4}-\d{2}-\d{2}_[\w.-]+_\w+)|latest$/.test(inputVersion); - const version = nightly ? inputVersion : semver__WEBPACK_IMPORTED_MODULE_1__.valid(semver__WEBPACK_IMPORTED_MODULE_1__.clean(inputVersion)); - if (version) { - await (0,_setup__WEBPACK_IMPORTED_MODULE_2__/* .setup */ .c)(version, nightly, cacheDependencyPath); - } - } - catch (error) { // eslint-disable-line @typescript-eslint/no-implicit-any-catch - _actions_core__WEBPACK_IMPORTED_MODULE_0__.setFailed(error.message); - } -} -await main(); +// Copyright (c) 2020 Sho Kuroda +// +// This software is released under the MIT License. +// https://opensource.org/licenses/MIT + + + +async function main() { + try { + const inputVersion = _actions_core__WEBPACK_IMPORTED_MODULE_0__.getInput('haxe-version'); + const cacheDependencyPath = _actions_core__WEBPACK_IMPORTED_MODULE_0__.getInput('cache-dependency-path'); + const nightly = /^(\d{4}-\d{2}-\d{2}_[\w.-]+_\w+)|latest$/.test(inputVersion); + const version = nightly ? inputVersion : semver__WEBPACK_IMPORTED_MODULE_1__.valid(semver__WEBPACK_IMPORTED_MODULE_1__.clean(inputVersion)); + if (version) { + await (0,_setup__WEBPACK_IMPORTED_MODULE_2__/* .setup */ .c)(version, nightly, cacheDependencyPath); + } + } + catch (error) { // eslint-disable-line @typescript-eslint/no-implicit-any-catch + _actions_core__WEBPACK_IMPORTED_MODULE_0__.setFailed(error.message); + } +} +await main(); __webpack_async_result__(); } catch(e) { __webpack_async_result__(e); } }, 1); @@ -63,291 +63,292 @@ const external_node_os_namespaceObject = require("node:os"); // EXTERNAL MODULE: ./node_modules/@actions/tool-cache/lib/tool-cache.js var tool_cache = __nccwpck_require__(7784); ;// CONCATENATED MODULE: ./lib/asset.js -// Copyright (c) 2020 Sho Kuroda -// -// This software is released under the MIT License. -// https://opensource.org/licenses/MIT - - - - - - -class Asset { - name; - version; - env; - constructor(name, version, env) { - this.name = name; - this.version = version; - this.env = env; - } - async setup() { - const toolPath = tool_cache.find(this.name, this.version); - if (toolPath) { - return toolPath; - } - return tool_cache.cacheDir(await this.download(), this.name, this.version); - } - makeDownloadUrl(path) { - return `https://github.com/HaxeFoundation${path}`; - } - get fileExt() { - switch (this.env.platform) { - case 'win': { - return '.zip'; - } - default: { - return '.tar.gz'; - } - } - } - async download() { - const downloadPath = await tool_cache.downloadTool(this.downloadUrl); - const extractPath = await this.extract(downloadPath, this.fileNameWithoutExt, this.fileExt); - const toolRoot = await this.findToolRoot(extractPath, this.isDirectoryNested); - if (!toolRoot) { - throw new Error(`tool directory not found: ${extractPath}`); - } - lib_core.debug(`found toolRoot: ${toolRoot}`); - return toolRoot; - } - async extract(file, dest, ext) { - if (external_node_fs_namespaceObject.existsSync(dest)) { - external_node_fs_namespaceObject.rmdirSync(dest, { recursive: true }); - } - switch (ext) { - case '.tar.gz': { - return tool_cache.extractTar(file, dest); - } - case '.zip': { - return tool_cache.extractZip(file, dest); - } - default: { - throw new Error(`unknown ext: ${ext}`); // eslint-disable-line @typescript-eslint/restrict-template-expressions - } - } - } - // * NOTE: tar xz -C haxe-4.0.5-linux64 -f haxe-4.0.5-linux64.tar.gz --> haxe-4.0.5-linux64/haxe_20191217082701_67feacebc - async findToolRoot(extractPath, nested) { - if (!nested) { - return extractPath; - } - let found = false; - let toolRoot = ''; - await (0,exec.exec)('ls', ['-1', extractPath], { - listeners: { - stdout(data) { - const entry = data.toString().trim(); - if (entry.length > 0) { - toolRoot = external_node_path_namespaceObject.join(extractPath, entry); - found = true; - } - }, - }, - }); - return found ? toolRoot : null; - } -} -// * NOTE https://github.com/HaxeFoundation/neko/releases/download/v2-3-0/neko-2.3.0-linux64.tar.gz -// * NOTE https://github.com/HaxeFoundation/neko/releases/download/v2-3-0/neko-2.3.0-osx64.tar.gz -// * NOTE https://github.com/HaxeFoundation/neko/releases/download/v2-3-0/neko-2.3.0-win64.zip -class NekoAsset extends Asset { - static resolveFromHaxeVersion(version) { - const nekoVer = version.startsWith('3.') ? '2.1.0' : '2.3.0'; // Haxe 3 only supports neko 2.1 - return new NekoAsset(nekoVer); - } - constructor(version, env = new Env()) { - super('neko', version, env); - } - get downloadUrl() { - const tag = `v${this.version.replace(/\./g, '-')}`; - return super.makeDownloadUrl(`/neko/releases/download/${tag}/${this.fileNameWithoutExt}${this.fileExt}`); - } - get target() { - // No 64bit version of neko 2.1 available for windows - if (this.env.platform === 'win' && this.version.startsWith('2.1')) { - return this.env.platform; - } - return `${this.env.platform}${this.env.arch}`; - } - get fileNameWithoutExt() { - return `neko-${this.version}-${this.target}`; - } - get isDirectoryNested() { - return true; - } -} -// * NOTE https://github.com/HaxeFoundation/haxe/releases/download/4.0.5/haxe-4.0.5-linux64.tar.gz -// * NOTE https://github.com/HaxeFoundation/haxe/releases/download/3.4.7/haxe-3.4.7-win64.zip -class HaxeAsset extends Asset { - nightly = false; - constructor(version, nightly, env = new Env()) { - super('haxe', version, env); - this.nightly = nightly; - } - get downloadUrl() { - if (this.nightly) { - return `https://build.haxe.org/builds/haxe/${this.nightlyTarget}/${this.fileNameWithoutExt}${this.fileExt}`; - } - return super.makeDownloadUrl(`/haxe/releases/download/${this.version}/${this.fileNameWithoutExt}${this.fileExt}`); - } - get target() { - if (this.env.platform === 'osx') { - return this.env.platform; - } - // No 64bit version of neko 2.1 available for windows, thus we can also only use 32bit version of Haxe 3 - if (this.env.platform === 'win' && this.version.startsWith('3.')) { - return this.env.platform; - } - return `${this.env.platform}${this.env.arch}`; - } - get nightlyTarget() { - const plat = this.env.platform; - switch (plat) { - case 'osx': { - return 'mac'; - } - case 'linux': { - return 'linux64'; - } - case 'win': { - return 'windows64'; - } - default: { - throw new Error(`${plat} not supported`); // eslint-disable-line @typescript-eslint/restrict-template-expressions - } - } - } - get fileNameWithoutExt() { - if (this.nightly) { - return `haxe_${this.version}`; - } - return `haxe-${this.version}-${this.target}`; - } - get isDirectoryNested() { - return true; - } -} -class Env { - get platform() { - const plat = external_node_os_namespaceObject.platform(); - switch (plat) { - case 'linux': { - return 'linux'; - } - case 'win32': { - return 'win'; - } - case 'darwin': { - return 'osx'; - } - default: { - throw new Error(`${plat} not supported`); - } - } - } - get arch() { - const arch = external_node_os_namespaceObject.arch(); - switch (arch) { - case 'x64': { - return '64'; - } - default: { - throw new Error(`${arch} not supported`); - } - } - } -} +// Copyright (c) 2020 Sho Kuroda +// +// This software is released under the MIT License. +// https://opensource.org/licenses/MIT + + + + + + +class Asset { + name; + version; + env; + constructor(name, version, env) { + this.name = name; + this.version = version; + this.env = env; + } + async setup() { + const toolPath = tool_cache.find(this.name, this.version); + if (toolPath) { + return toolPath; + } + return tool_cache.cacheDir(await this.download(), this.name, this.version); + } + makeDownloadUrl(path) { + return `https://github.com/HaxeFoundation${path}`; + } + get fileExt() { + switch (this.env.platform) { + case 'win': { + return '.zip'; + } + default: { + return '.tar.gz'; + } + } + } + async download() { + const downloadPath = await tool_cache.downloadTool(this.downloadUrl); + const extractPath = await this.extract(downloadPath, this.fileNameWithoutExt, this.fileExt); + const toolRoot = await this.findToolRoot(extractPath, this.isDirectoryNested); + if (!toolRoot) { + throw new Error(`tool directory not found: ${extractPath}`); + } + lib_core.debug(`found toolRoot: ${toolRoot}`); + return toolRoot; + } + async extract(file, dest, ext) { + if (external_node_fs_namespaceObject.existsSync(dest)) { + external_node_fs_namespaceObject.rmdirSync(dest, { recursive: true }); + } + switch (ext) { + case '.tar.gz': { + return tool_cache.extractTar(file, dest); + } + case '.zip': { + return tool_cache.extractZip(file, dest); + } + default: { + throw new Error(`unknown ext: ${ext}`); // eslint-disable-line @typescript-eslint/restrict-template-expressions + } + } + } + // * NOTE: tar xz -C haxe-4.0.5-linux64 -f haxe-4.0.5-linux64.tar.gz --> haxe-4.0.5-linux64/haxe_20191217082701_67feacebc + async findToolRoot(extractPath, nested) { + if (!nested) { + return extractPath; + } + let found = false; + let toolRoot = ''; + await (0,exec.exec)('ls', ['-1', extractPath], { + listeners: { + stdout(data) { + const entry = data.toString().trim(); + if (entry.length > 0) { + toolRoot = external_node_path_namespaceObject.join(extractPath, entry); + found = true; + } + }, + }, + }); + return found ? toolRoot : null; + } +} +// * NOTE https://github.com/HaxeFoundation/neko/releases/download/v2-3-0/neko-2.3.0-linux64.tar.gz +// * NOTE https://github.com/HaxeFoundation/neko/releases/download/v2-3-0/neko-2.3.0-osx64.tar.gz +// * NOTE https://github.com/HaxeFoundation/neko/releases/download/v2-3-0/neko-2.3.0-win64.zip +class NekoAsset extends Asset { + static resolveFromHaxeVersion(version) { + const nekoVer = version.startsWith('3.') ? '2.1.0' : '2.3.0'; // Haxe 3 only supports neko 2.1 + return new NekoAsset(nekoVer); + } + constructor(version, env = new Env()) { + super('neko', version, env); + } + get downloadUrl() { + const tag = `v${this.version.replace(/\./g, '-')}`; + return super.makeDownloadUrl(`/neko/releases/download/${tag}/${this.fileNameWithoutExt}${this.fileExt}`); + } + get target() { + // No 64bit version of neko 2.1 available for windows + if (this.env.platform === 'win' && this.version.startsWith('2.1')) { + return this.env.platform; + } + return `${this.env.platform}${this.env.arch}`; + } + get fileNameWithoutExt() { + return `neko-${this.version}-${this.target}`; + } + get isDirectoryNested() { + return true; + } +} +// * NOTE https://github.com/HaxeFoundation/haxe/releases/download/4.0.5/haxe-4.0.5-linux64.tar.gz +// * NOTE https://github.com/HaxeFoundation/haxe/releases/download/3.4.7/haxe-3.4.7-win64.zip +class HaxeAsset extends Asset { + nightly = false; + constructor(version, nightly, env = new Env()) { + super('haxe', version, env); + this.nightly = nightly; + } + get downloadUrl() { + if (this.nightly) { + return `https://build.haxe.org/builds/haxe/${this.nightlyTarget}/${this.fileNameWithoutExt}${this.fileExt}`; + } + return super.makeDownloadUrl(`/haxe/releases/download/${this.version}/${this.fileNameWithoutExt}${this.fileExt}`); + } + get target() { + if (this.env.platform === 'osx') { + return this.env.platform; + } + // No 64bit version of neko 2.1 available for windows, thus we can also only use 32bit version of Haxe 3 + if (this.env.platform === 'win' && this.version.startsWith('3.')) { + return this.env.platform; + } + return `${this.env.platform}${this.env.arch}`; + } + get nightlyTarget() { + const plat = this.env.platform; + switch (plat) { + case 'osx': { + return 'mac'; + } + case 'linux': { + return 'linux64'; + } + case 'win': { + return 'windows64'; + } + default: { + throw new Error(`${plat} not supported`); // eslint-disable-line @typescript-eslint/restrict-template-expressions + } + } + } + get fileNameWithoutExt() { + if (this.nightly) { + return `haxe_${this.version}`; + } + return `haxe-${this.version}-${this.target}`; + } + get isDirectoryNested() { + return true; + } +} +class Env { + get platform() { + const plat = external_node_os_namespaceObject.platform(); + switch (plat) { + case 'linux': { + return 'linux'; + } + case 'win32': { + return 'win'; + } + case 'darwin': { + return 'osx'; + } + default: { + throw new Error(`${plat} not supported`); + } + } + } + get arch() { + const arch = external_node_os_namespaceObject.arch(); + switch (arch) { + case 'x64': { + return '64'; + } + default: { + throw new Error(`${arch} not supported`); + } + } + } +} // EXTERNAL MODULE: ./node_modules/@actions/cache/lib/cache.js var lib_cache = __nccwpck_require__(7799); // EXTERNAL MODULE: ./node_modules/@actions/glob/lib/glob.js var glob = __nccwpck_require__(8090); ;// CONCATENATED MODULE: ./lib/haxelib.js - - - - -var State; -(function (State) { - State["CachePrimaryKey"] = "PRIMARY_KEY"; - State["CacheRestoreResult"] = "RESTORE_RESULT"; - State["CacheHaxelibPath"] = "HAXELIB_PATH"; -})(State || (State = {})); -async function createHaxelibKey(platform, version, cacheDependencyPath) { - const fileHash = await glob.hashFiles(cacheDependencyPath); - if (!fileHash) { - throw new Error('Some specified paths were not resolved, unable to cache dependencies.'); - } - return `haxelib-cache-${platform}-haxe${version}-${fileHash}`; -} -async function restoreHaxelib(primaryKey, haxelibPath) { - lib_core.saveState(State.CachePrimaryKey, primaryKey); - lib_core.saveState(State.CacheHaxelibPath, haxelibPath); - const restoreResult = await lib_cache.restoreCache([haxelibPath], primaryKey); - lib_core.setOutput('cache-hit', Boolean(restoreResult)); - if (!restoreResult) { - lib_core.info('haxelib cache is not found'); - return; - } - lib_core.saveState(State.CacheRestoreResult, restoreResult); - lib_core.info(`Cache restored from key: ${restoreResult}`); -} -async function saveHaxelib() { - const restoreResult = core.getState(State.CacheRestoreResult); - const primaryKey = core.getState(State.CachePrimaryKey); - const haxelibPath = core.getState(State.CacheHaxelibPath); - if (!fs.existsSync(haxelibPath)) { - throw new Error(`Cache folder path is retrieved but doesn't exist on disk: ${haxelibPath}`); - } - if (primaryKey === restoreResult) { - core.info(`Cache hit occurred on the primary key ${primaryKey}, not saving cache.`); - return; - } - const cacheId = await cache.saveCache([haxelibPath], primaryKey); - if (cacheId === -1) { - return; - } - core.info(`Cache saved with the key: ${primaryKey}`); -} + + + + +var State; +(function (State) { + State["CachePrimaryKey"] = "PRIMARY_KEY"; + State["CacheRestoreResult"] = "RESTORE_RESULT"; + State["CacheHaxelibPath"] = "HAXELIB_PATH"; +})(State || (State = {})); +async function createHaxelibKey(platform, version, cacheDependencyPath) { + const fileHash = await glob.hashFiles(cacheDependencyPath); + if (!fileHash) { + throw new Error('Some specified paths were not resolved, unable to cache dependencies.'); + } + return `haxelib-cache-${platform}-haxe${version}-${fileHash}`; +} +async function restoreHaxelib(primaryKey, haxelibPath) { + lib_core.saveState(State.CachePrimaryKey, primaryKey); + lib_core.saveState(State.CacheHaxelibPath, haxelibPath); + const restoreResult = await lib_cache.restoreCache([haxelibPath], primaryKey); + lib_core.setOutput('cache-hit', Boolean(restoreResult)); + if (!restoreResult) { + lib_core.info('haxelib cache is not found'); + return; + } + lib_core.saveState(State.CacheRestoreResult, restoreResult); + lib_core.info(`Cache restored from key: ${restoreResult}`); +} +async function saveHaxelib() { + const restoreResult = core.getState(State.CacheRestoreResult); + const primaryKey = core.getState(State.CachePrimaryKey); + const haxelibPath = core.getState(State.CacheHaxelibPath); + if (!fs.existsSync(haxelibPath)) { + throw new Error(`Cache folder path is retrieved but doesn't exist on disk: ${haxelibPath}`); + } + if (primaryKey === restoreResult) { + core.info(`Cache hit occurred on the primary key ${primaryKey}, not saving cache.`); + return; + } + const cacheId = await cache.saveCache([haxelibPath], primaryKey); + if (cacheId === -1) { + return; + } + core.info(`Cache saved with the key: ${primaryKey}`); +} ;// CONCATENATED MODULE: ./lib/setup.js -// Copyright (c) 2020 Sho Kuroda -// -// This software is released under the MIT License. -// https://opensource.org/licenses/MIT - - - - - -const env = new Env(); -async function setup(version, nightly, cacheDependencyPath) { - const neko = NekoAsset.resolveFromHaxeVersion(version); // Haxelib requires Neko - const nekoPath = await neko.setup(); - lib_core.addPath(nekoPath); - lib_core.exportVariable('NEKOPATH', nekoPath); - lib_core.exportVariable('LD_LIBRARY_PATH', `${nekoPath}:$LD_LIBRARY_PATH`); - const haxe = new HaxeAsset(version, nightly); - const haxePath = await haxe.setup(); - lib_core.addPath(haxePath); - lib_core.exportVariable('HAXE_STD_PATH', external_node_path_namespaceObject.join(haxePath, 'std')); - if (env.platform === 'osx') { - // Ref: https://github.com/asdf-community/asdf-haxe/pull/7 - await (0,exec.exec)('ln', [ - '-sfv', - external_node_path_namespaceObject.join(nekoPath, 'libneko.2.dylib'), - external_node_path_namespaceObject.join(haxePath, 'libneko.2.dylib'), - ]); - } - const haxelibPath = external_node_path_namespaceObject.join(haxePath, 'lib'); - await (0,exec.exec)('haxelib', ['setup', haxelibPath]); - if (cacheDependencyPath.length > 0) { - const key = await createHaxelibKey(haxe.target, version, cacheDependencyPath); - await restoreHaxelib(key, haxelibPath); - } -} +// Copyright (c) 2020 Sho Kuroda +// +// This software is released under the MIT License. +// https://opensource.org/licenses/MIT + + + + + +const env = new Env(); +async function setup(version, nightly, cacheDependencyPath) { + const neko = NekoAsset.resolveFromHaxeVersion(version); // Haxelib requires Neko + const nekoPath = await neko.setup(); + lib_core.addPath(nekoPath); + lib_core.exportVariable('NEKOPATH', nekoPath); + lib_core.exportVariable('LD_LIBRARY_PATH', `${nekoPath}:$LD_LIBRARY_PATH`); + const haxe = new HaxeAsset(version, nightly); + const haxePath = await haxe.setup(); + lib_core.addPath(haxePath); + lib_core.exportVariable('HAXE_STD_PATH', external_node_path_namespaceObject.join(haxePath, 'std')); + if (env.platform === 'osx') { + lib_core.exportVariable('DYLD_FALLBACK_LIBRARY_PATH', `${nekoPath}:$DYLD_FALLBACK_LIBRARY_PATH`); + // Ref: https://github.com/asdf-community/asdf-haxe/pull/7 + await (0,exec.exec)('ln', [ + '-sfv', + external_node_path_namespaceObject.join(nekoPath, 'libneko.2.dylib'), + external_node_path_namespaceObject.join(haxePath, 'libneko.2.dylib'), + ]); + } + const haxelibPath = external_node_path_namespaceObject.join(haxePath, 'lib'); + await (0,exec.exec)('haxelib', ['setup', haxelibPath]); + if (cacheDependencyPath.length > 0) { + const key = await createHaxelibKey(haxe.target, version, cacheDependencyPath); + await restoreHaxelib(key, haxelibPath); + } +} /***/ }), @@ -2998,8 +2999,11 @@ var MAX_SAFE_INTEGER = Number.MAX_SAFE_INTEGER || // Max safe segment length for coercion. var MAX_SAFE_COMPONENT_LENGTH = 16 +var MAX_SAFE_BUILD_LENGTH = MAX_LENGTH - 6 + // The actual regexps go on exports.re var re = exports.re = [] +var safeRe = exports.safeRe = [] var src = exports.src = [] var t = exports.tokens = {} var R = 0 @@ -3008,6 +3012,31 @@ function tok (n) { t[n] = R++ } +var LETTERDASHNUMBER = '[a-zA-Z0-9-]' + +// Replace some greedy regex tokens to prevent regex dos issues. These regex are +// used internally via the safeRe object since all inputs in this library get +// normalized first to trim and collapse all extra whitespace. The original +// regexes are exported for userland consumption and lower level usage. A +// future breaking change could export the safer regex only with a note that +// all input should have extra whitespace removed. +var safeRegexReplacements = [ + ['\\s', 1], + ['\\d', MAX_LENGTH], + [LETTERDASHNUMBER, MAX_SAFE_BUILD_LENGTH], +] + +function makeSafeRe (value) { + for (var i = 0; i < safeRegexReplacements.length; i++) { + var token = safeRegexReplacements[i][0] + var max = safeRegexReplacements[i][1] + value = value + .split(token + '*').join(token + '{0,' + max + '}') + .split(token + '+').join(token + '{1,' + max + '}') + } + return value +} + // The following Regular Expressions can be used for tokenizing, // validating, and parsing SemVer version strings. @@ -3017,14 +3046,14 @@ function tok (n) { tok('NUMERICIDENTIFIER') src[t.NUMERICIDENTIFIER] = '0|[1-9]\\d*' tok('NUMERICIDENTIFIERLOOSE') -src[t.NUMERICIDENTIFIERLOOSE] = '[0-9]+' +src[t.NUMERICIDENTIFIERLOOSE] = '\\d+' // ## Non-numeric Identifier // Zero or more digits, followed by a letter or hyphen, and then zero or // more letters, digits, or hyphens. tok('NONNUMERICIDENTIFIER') -src[t.NONNUMERICIDENTIFIER] = '\\d*[a-zA-Z-][a-zA-Z0-9-]*' +src[t.NONNUMERICIDENTIFIER] = '\\d*[a-zA-Z-]' + LETTERDASHNUMBER + '*' // ## Main Version // Three dot-separated numeric identifiers. @@ -3066,7 +3095,7 @@ src[t.PRERELEASELOOSE] = '(?:-?(' + src[t.PRERELEASEIDENTIFIERLOOSE] + // Any combination of digits, letters, or hyphens. tok('BUILDIDENTIFIER') -src[t.BUILDIDENTIFIER] = '[0-9A-Za-z-]+' +src[t.BUILDIDENTIFIER] = LETTERDASHNUMBER + '+' // ## Build Metadata // Plus sign, followed by one or more period-separated build metadata @@ -3146,6 +3175,7 @@ src[t.COERCE] = '(^|[^\\d])' + '(?:$|[^\\d])' tok('COERCERTL') re[t.COERCERTL] = new RegExp(src[t.COERCE], 'g') +safeRe[t.COERCERTL] = new RegExp(makeSafeRe(src[t.COERCE]), 'g') // Tilde ranges. // Meaning is "reasonably at or greater than" @@ -3155,6 +3185,7 @@ src[t.LONETILDE] = '(?:~>?)' tok('TILDETRIM') src[t.TILDETRIM] = '(\\s*)' + src[t.LONETILDE] + '\\s+' re[t.TILDETRIM] = new RegExp(src[t.TILDETRIM], 'g') +safeRe[t.TILDETRIM] = new RegExp(makeSafeRe(src[t.TILDETRIM]), 'g') var tildeTrimReplace = '$1~' tok('TILDE') @@ -3170,6 +3201,7 @@ src[t.LONECARET] = '(?:\\^)' tok('CARETTRIM') src[t.CARETTRIM] = '(\\s*)' + src[t.LONECARET] + '\\s+' re[t.CARETTRIM] = new RegExp(src[t.CARETTRIM], 'g') +safeRe[t.CARETTRIM] = new RegExp(makeSafeRe(src[t.CARETTRIM]), 'g') var caretTrimReplace = '$1^' tok('CARET') @@ -3191,6 +3223,7 @@ src[t.COMPARATORTRIM] = '(\\s*)' + src[t.GTLT] + // this one has to use the /g flag re[t.COMPARATORTRIM] = new RegExp(src[t.COMPARATORTRIM], 'g') +safeRe[t.COMPARATORTRIM] = new RegExp(makeSafeRe(src[t.COMPARATORTRIM]), 'g') var comparatorTrimReplace = '$1$2$3' // Something like `1.2.3 - 1.2.4` @@ -3219,6 +3252,14 @@ for (var i = 0; i < R; i++) { debug(i, src[i]) if (!re[i]) { re[i] = new RegExp(src[i]) + + // Replace all greedy whitespace to prevent regex dos issues. These regex are + // used internally via the safeRe object since all inputs in this library get + // normalized first to trim and collapse all extra whitespace. The original + // regexes are exported for userland consumption and lower level usage. A + // future breaking change could export the safer regex only with a note that + // all input should have extra whitespace removed. + safeRe[i] = new RegExp(makeSafeRe(src[i])) } } @@ -3243,7 +3284,7 @@ function parse (version, options) { return null } - var r = options.loose ? re[t.LOOSE] : re[t.FULL] + var r = options.loose ? safeRe[t.LOOSE] : safeRe[t.FULL] if (!r.test(version)) { return null } @@ -3298,7 +3339,7 @@ function SemVer (version, options) { this.options = options this.loose = !!options.loose - var m = version.trim().match(options.loose ? re[t.LOOSE] : re[t.FULL]) + var m = version.trim().match(options.loose ? safeRe[t.LOOSE] : safeRe[t.FULL]) if (!m) { throw new TypeError('Invalid Version: ' + version) @@ -3743,6 +3784,7 @@ function Comparator (comp, options) { return new Comparator(comp, options) } + comp = comp.trim().split(/\s+/).join(' ') debug('comparator', comp, options) this.options = options this.loose = !!options.loose @@ -3759,7 +3801,7 @@ function Comparator (comp, options) { var ANY = {} Comparator.prototype.parse = function (comp) { - var r = this.options.loose ? re[t.COMPARATORLOOSE] : re[t.COMPARATOR] + var r = this.options.loose ? safeRe[t.COMPARATORLOOSE] : safeRe[t.COMPARATOR] var m = comp.match(r) if (!m) { @@ -3883,9 +3925,16 @@ function Range (range, options) { this.loose = !!options.loose this.includePrerelease = !!options.includePrerelease - // First, split based on boolean or || + // First reduce all whitespace as much as possible so we do not have to rely + // on potentially slow regexes like \s*. This is then stored and used for + // future error messages as well. this.raw = range - this.set = range.split(/\s*\|\|\s*/).map(function (range) { + .trim() + .split(/\s+/) + .join(' ') + + // First, split based on boolean or || + this.set = this.raw.split('||').map(function (range) { return this.parseRange(range.trim()) }, this).filter(function (c) { // throw out any that are not relevant for whatever reason @@ -3893,7 +3942,7 @@ function Range (range, options) { }) if (!this.set.length) { - throw new TypeError('Invalid SemVer Range: ' + range) + throw new TypeError('Invalid SemVer Range: ' + this.raw) } this.format() @@ -3912,20 +3961,19 @@ Range.prototype.toString = function () { Range.prototype.parseRange = function (range) { var loose = this.options.loose - range = range.trim() // `1.2.3 - 1.2.4` => `>=1.2.3 <=1.2.4` - var hr = loose ? re[t.HYPHENRANGELOOSE] : re[t.HYPHENRANGE] + var hr = loose ? safeRe[t.HYPHENRANGELOOSE] : safeRe[t.HYPHENRANGE] range = range.replace(hr, hyphenReplace) debug('hyphen replace', range) // `> 1.2.3 < 1.2.5` => `>1.2.3 <1.2.5` - range = range.replace(re[t.COMPARATORTRIM], comparatorTrimReplace) - debug('comparator trim', range, re[t.COMPARATORTRIM]) + range = range.replace(safeRe[t.COMPARATORTRIM], comparatorTrimReplace) + debug('comparator trim', range, safeRe[t.COMPARATORTRIM]) // `~ 1.2.3` => `~1.2.3` - range = range.replace(re[t.TILDETRIM], tildeTrimReplace) + range = range.replace(safeRe[t.TILDETRIM], tildeTrimReplace) // `^ 1.2.3` => `^1.2.3` - range = range.replace(re[t.CARETTRIM], caretTrimReplace) + range = range.replace(safeRe[t.CARETTRIM], caretTrimReplace) // normalize spaces range = range.split(/\s+/).join(' ') @@ -3933,7 +3981,7 @@ Range.prototype.parseRange = function (range) { // At this point, the range is completely trimmed and // ready to be split into comparators. - var compRe = loose ? re[t.COMPARATORLOOSE] : re[t.COMPARATOR] + var compRe = loose ? safeRe[t.COMPARATORLOOSE] : safeRe[t.COMPARATOR] var set = range.split(' ').map(function (comp) { return parseComparator(comp, this.options) }, this).join(' ').split(/\s+/) @@ -4033,7 +4081,7 @@ function replaceTildes (comp, options) { } function replaceTilde (comp, options) { - var r = options.loose ? re[t.TILDELOOSE] : re[t.TILDE] + var r = options.loose ? safeRe[t.TILDELOOSE] : safeRe[t.TILDE] return comp.replace(r, function (_, M, m, p, pr) { debug('tilde', comp, _, M, m, p, pr) var ret @@ -4074,7 +4122,7 @@ function replaceCarets (comp, options) { function replaceCaret (comp, options) { debug('caret', comp, options) - var r = options.loose ? re[t.CARETLOOSE] : re[t.CARET] + var r = options.loose ? safeRe[t.CARETLOOSE] : safeRe[t.CARET] return comp.replace(r, function (_, M, m, p, pr) { debug('caret', comp, _, M, m, p, pr) var ret @@ -4133,7 +4181,7 @@ function replaceXRanges (comp, options) { function replaceXRange (comp, options) { comp = comp.trim() - var r = options.loose ? re[t.XRANGELOOSE] : re[t.XRANGE] + var r = options.loose ? safeRe[t.XRANGELOOSE] : safeRe[t.XRANGE] return comp.replace(r, function (ret, gtlt, M, m, p, pr) { debug('xRange', comp, ret, gtlt, M, m, p, pr) var xM = isX(M) @@ -4208,7 +4256,7 @@ function replaceXRange (comp, options) { function replaceStars (comp, options) { debug('replaceStars', comp, options) // Looseness is ignored here. star is always as loose as it gets! - return comp.trim().replace(re[t.STAR], '') + return comp.trim().replace(safeRe[t.STAR], '') } // This function is passed to string.replace(re[t.HYPHENRANGE]) @@ -4534,7 +4582,7 @@ function coerce (version, options) { var match = null if (!options.rtl) { - match = version.match(re[t.COERCE]) + match = version.match(safeRe[t.COERCE]) } else { // Find the right-most coercible string that does not share // a terminus with a more left-ward coercible string. @@ -4545,17 +4593,17 @@ function coerce (version, options) { // Stop when we get a match that ends at the string end, since no // coercible string can be more right-ward without the same terminus. var next - while ((next = re[t.COERCERTL].exec(version)) && + while ((next = safeRe[t.COERCERTL].exec(version)) && (!match || match.index + match[0].length !== version.length) ) { if (!match || next.index + next[0].length !== match.index + match[0].length) { match = next } - re[t.COERCERTL].lastIndex = next.index + next[1].length + next[2].length + safeRe[t.COERCERTL].lastIndex = next.index + next[1].length + next[2].length } // leave it in a clean state - re[t.COERCERTL].lastIndex = -1 + safeRe[t.COERCERTL].lastIndex = -1 } if (match === null) { @@ -10543,8 +10591,11 @@ var MAX_SAFE_INTEGER = Number.MAX_SAFE_INTEGER || // Max safe segment length for coercion. var MAX_SAFE_COMPONENT_LENGTH = 16 +var MAX_SAFE_BUILD_LENGTH = MAX_LENGTH - 6 + // The actual regexps go on exports.re var re = exports.re = [] +var safeRe = exports.safeRe = [] var src = exports.src = [] var t = exports.tokens = {} var R = 0 @@ -10553,6 +10604,31 @@ function tok (n) { t[n] = R++ } +var LETTERDASHNUMBER = '[a-zA-Z0-9-]' + +// Replace some greedy regex tokens to prevent regex dos issues. These regex are +// used internally via the safeRe object since all inputs in this library get +// normalized first to trim and collapse all extra whitespace. The original +// regexes are exported for userland consumption and lower level usage. A +// future breaking change could export the safer regex only with a note that +// all input should have extra whitespace removed. +var safeRegexReplacements = [ + ['\\s', 1], + ['\\d', MAX_LENGTH], + [LETTERDASHNUMBER, MAX_SAFE_BUILD_LENGTH], +] + +function makeSafeRe (value) { + for (var i = 0; i < safeRegexReplacements.length; i++) { + var token = safeRegexReplacements[i][0] + var max = safeRegexReplacements[i][1] + value = value + .split(token + '*').join(token + '{0,' + max + '}') + .split(token + '+').join(token + '{1,' + max + '}') + } + return value +} + // The following Regular Expressions can be used for tokenizing, // validating, and parsing SemVer version strings. @@ -10562,14 +10638,14 @@ function tok (n) { tok('NUMERICIDENTIFIER') src[t.NUMERICIDENTIFIER] = '0|[1-9]\\d*' tok('NUMERICIDENTIFIERLOOSE') -src[t.NUMERICIDENTIFIERLOOSE] = '[0-9]+' +src[t.NUMERICIDENTIFIERLOOSE] = '\\d+' // ## Non-numeric Identifier // Zero or more digits, followed by a letter or hyphen, and then zero or // more letters, digits, or hyphens. tok('NONNUMERICIDENTIFIER') -src[t.NONNUMERICIDENTIFIER] = '\\d*[a-zA-Z-][a-zA-Z0-9-]*' +src[t.NONNUMERICIDENTIFIER] = '\\d*[a-zA-Z-]' + LETTERDASHNUMBER + '*' // ## Main Version // Three dot-separated numeric identifiers. @@ -10611,7 +10687,7 @@ src[t.PRERELEASELOOSE] = '(?:-?(' + src[t.PRERELEASEIDENTIFIERLOOSE] + // Any combination of digits, letters, or hyphens. tok('BUILDIDENTIFIER') -src[t.BUILDIDENTIFIER] = '[0-9A-Za-z-]+' +src[t.BUILDIDENTIFIER] = LETTERDASHNUMBER + '+' // ## Build Metadata // Plus sign, followed by one or more period-separated build metadata @@ -10691,6 +10767,7 @@ src[t.COERCE] = '(^|[^\\d])' + '(?:$|[^\\d])' tok('COERCERTL') re[t.COERCERTL] = new RegExp(src[t.COERCE], 'g') +safeRe[t.COERCERTL] = new RegExp(makeSafeRe(src[t.COERCE]), 'g') // Tilde ranges. // Meaning is "reasonably at or greater than" @@ -10700,6 +10777,7 @@ src[t.LONETILDE] = '(?:~>?)' tok('TILDETRIM') src[t.TILDETRIM] = '(\\s*)' + src[t.LONETILDE] + '\\s+' re[t.TILDETRIM] = new RegExp(src[t.TILDETRIM], 'g') +safeRe[t.TILDETRIM] = new RegExp(makeSafeRe(src[t.TILDETRIM]), 'g') var tildeTrimReplace = '$1~' tok('TILDE') @@ -10715,6 +10793,7 @@ src[t.LONECARET] = '(?:\\^)' tok('CARETTRIM') src[t.CARETTRIM] = '(\\s*)' + src[t.LONECARET] + '\\s+' re[t.CARETTRIM] = new RegExp(src[t.CARETTRIM], 'g') +safeRe[t.CARETTRIM] = new RegExp(makeSafeRe(src[t.CARETTRIM]), 'g') var caretTrimReplace = '$1^' tok('CARET') @@ -10736,6 +10815,7 @@ src[t.COMPARATORTRIM] = '(\\s*)' + src[t.GTLT] + // this one has to use the /g flag re[t.COMPARATORTRIM] = new RegExp(src[t.COMPARATORTRIM], 'g') +safeRe[t.COMPARATORTRIM] = new RegExp(makeSafeRe(src[t.COMPARATORTRIM]), 'g') var comparatorTrimReplace = '$1$2$3' // Something like `1.2.3 - 1.2.4` @@ -10764,6 +10844,14 @@ for (var i = 0; i < R; i++) { debug(i, src[i]) if (!re[i]) { re[i] = new RegExp(src[i]) + + // Replace all greedy whitespace to prevent regex dos issues. These regex are + // used internally via the safeRe object since all inputs in this library get + // normalized first to trim and collapse all extra whitespace. The original + // regexes are exported for userland consumption and lower level usage. A + // future breaking change could export the safer regex only with a note that + // all input should have extra whitespace removed. + safeRe[i] = new RegExp(makeSafeRe(src[i])) } } @@ -10788,7 +10876,7 @@ function parse (version, options) { return null } - var r = options.loose ? re[t.LOOSE] : re[t.FULL] + var r = options.loose ? safeRe[t.LOOSE] : safeRe[t.FULL] if (!r.test(version)) { return null } @@ -10843,7 +10931,7 @@ function SemVer (version, options) { this.options = options this.loose = !!options.loose - var m = version.trim().match(options.loose ? re[t.LOOSE] : re[t.FULL]) + var m = version.trim().match(options.loose ? safeRe[t.LOOSE] : safeRe[t.FULL]) if (!m) { throw new TypeError('Invalid Version: ' + version) @@ -11288,6 +11376,7 @@ function Comparator (comp, options) { return new Comparator(comp, options) } + comp = comp.trim().split(/\s+/).join(' ') debug('comparator', comp, options) this.options = options this.loose = !!options.loose @@ -11304,7 +11393,7 @@ function Comparator (comp, options) { var ANY = {} Comparator.prototype.parse = function (comp) { - var r = this.options.loose ? re[t.COMPARATORLOOSE] : re[t.COMPARATOR] + var r = this.options.loose ? safeRe[t.COMPARATORLOOSE] : safeRe[t.COMPARATOR] var m = comp.match(r) if (!m) { @@ -11428,9 +11517,16 @@ function Range (range, options) { this.loose = !!options.loose this.includePrerelease = !!options.includePrerelease - // First, split based on boolean or || + // First reduce all whitespace as much as possible so we do not have to rely + // on potentially slow regexes like \s*. This is then stored and used for + // future error messages as well. this.raw = range - this.set = range.split(/\s*\|\|\s*/).map(function (range) { + .trim() + .split(/\s+/) + .join(' ') + + // First, split based on boolean or || + this.set = this.raw.split('||').map(function (range) { return this.parseRange(range.trim()) }, this).filter(function (c) { // throw out any that are not relevant for whatever reason @@ -11438,7 +11534,7 @@ function Range (range, options) { }) if (!this.set.length) { - throw new TypeError('Invalid SemVer Range: ' + range) + throw new TypeError('Invalid SemVer Range: ' + this.raw) } this.format() @@ -11457,20 +11553,19 @@ Range.prototype.toString = function () { Range.prototype.parseRange = function (range) { var loose = this.options.loose - range = range.trim() // `1.2.3 - 1.2.4` => `>=1.2.3 <=1.2.4` - var hr = loose ? re[t.HYPHENRANGELOOSE] : re[t.HYPHENRANGE] + var hr = loose ? safeRe[t.HYPHENRANGELOOSE] : safeRe[t.HYPHENRANGE] range = range.replace(hr, hyphenReplace) debug('hyphen replace', range) // `> 1.2.3 < 1.2.5` => `>1.2.3 <1.2.5` - range = range.replace(re[t.COMPARATORTRIM], comparatorTrimReplace) - debug('comparator trim', range, re[t.COMPARATORTRIM]) + range = range.replace(safeRe[t.COMPARATORTRIM], comparatorTrimReplace) + debug('comparator trim', range, safeRe[t.COMPARATORTRIM]) // `~ 1.2.3` => `~1.2.3` - range = range.replace(re[t.TILDETRIM], tildeTrimReplace) + range = range.replace(safeRe[t.TILDETRIM], tildeTrimReplace) // `^ 1.2.3` => `^1.2.3` - range = range.replace(re[t.CARETTRIM], caretTrimReplace) + range = range.replace(safeRe[t.CARETTRIM], caretTrimReplace) // normalize spaces range = range.split(/\s+/).join(' ') @@ -11478,7 +11573,7 @@ Range.prototype.parseRange = function (range) { // At this point, the range is completely trimmed and // ready to be split into comparators. - var compRe = loose ? re[t.COMPARATORLOOSE] : re[t.COMPARATOR] + var compRe = loose ? safeRe[t.COMPARATORLOOSE] : safeRe[t.COMPARATOR] var set = range.split(' ').map(function (comp) { return parseComparator(comp, this.options) }, this).join(' ').split(/\s+/) @@ -11578,7 +11673,7 @@ function replaceTildes (comp, options) { } function replaceTilde (comp, options) { - var r = options.loose ? re[t.TILDELOOSE] : re[t.TILDE] + var r = options.loose ? safeRe[t.TILDELOOSE] : safeRe[t.TILDE] return comp.replace(r, function (_, M, m, p, pr) { debug('tilde', comp, _, M, m, p, pr) var ret @@ -11619,7 +11714,7 @@ function replaceCarets (comp, options) { function replaceCaret (comp, options) { debug('caret', comp, options) - var r = options.loose ? re[t.CARETLOOSE] : re[t.CARET] + var r = options.loose ? safeRe[t.CARETLOOSE] : safeRe[t.CARET] return comp.replace(r, function (_, M, m, p, pr) { debug('caret', comp, _, M, m, p, pr) var ret @@ -11678,7 +11773,7 @@ function replaceXRanges (comp, options) { function replaceXRange (comp, options) { comp = comp.trim() - var r = options.loose ? re[t.XRANGELOOSE] : re[t.XRANGE] + var r = options.loose ? safeRe[t.XRANGELOOSE] : safeRe[t.XRANGE] return comp.replace(r, function (ret, gtlt, M, m, p, pr) { debug('xRange', comp, ret, gtlt, M, m, p, pr) var xM = isX(M) @@ -11753,7 +11848,7 @@ function replaceXRange (comp, options) { function replaceStars (comp, options) { debug('replaceStars', comp, options) // Looseness is ignored here. star is always as loose as it gets! - return comp.trim().replace(re[t.STAR], '') + return comp.trim().replace(safeRe[t.STAR], '') } // This function is passed to string.replace(re[t.HYPHENRANGE]) @@ -12079,7 +12174,7 @@ function coerce (version, options) { var match = null if (!options.rtl) { - match = version.match(re[t.COERCE]) + match = version.match(safeRe[t.COERCE]) } else { // Find the right-most coercible string that does not share // a terminus with a more left-ward coercible string. @@ -12090,17 +12185,17 @@ function coerce (version, options) { // Stop when we get a match that ends at the string end, since no // coercible string can be more right-ward without the same terminus. var next - while ((next = re[t.COERCERTL].exec(version)) && + while ((next = safeRe[t.COERCERTL].exec(version)) && (!match || match.index + match[0].length !== version.length) ) { if (!match || next.index + next[0].length !== match.index + match[0].length) { match = next } - re[t.COERCERTL].lastIndex = next.index + next[1].length + next[2].length + safeRe[t.COERCERTL].lastIndex = next.index + next[1].length + next[2].length } // leave it in a clean state - re[t.COERCERTL].lastIndex = -1 + safeRe[t.COERCERTL].lastIndex = -1 } if (match === null) { @@ -54432,6 +54527,7 @@ class Comparator { } } + comp = comp.trim().split(/\s+/).join(' ') debug('comparator', comp, options) this.options = options this.loose = !!options.loose @@ -54494,13 +54590,6 @@ class Comparator { throw new TypeError('a Comparator is required') } - if (!options || typeof options !== 'object') { - options = { - loose: !!options, - includePrerelease: false, - } - } - if (this.operator === '') { if (this.value === '') { return true @@ -54513,39 +54602,50 @@ class Comparator { return new Range(this.value, options).test(comp.semver) } - const sameDirectionIncreasing = - (this.operator === '>=' || this.operator === '>') && - (comp.operator === '>=' || comp.operator === '>') - const sameDirectionDecreasing = - (this.operator === '<=' || this.operator === '<') && - (comp.operator === '<=' || comp.operator === '<') - const sameSemVer = this.semver.version === comp.semver.version - const differentDirectionsInclusive = - (this.operator === '>=' || this.operator === '<=') && - (comp.operator === '>=' || comp.operator === '<=') - const oppositeDirectionsLessThan = - cmp(this.semver, '<', comp.semver, options) && - (this.operator === '>=' || this.operator === '>') && - (comp.operator === '<=' || comp.operator === '<') - const oppositeDirectionsGreaterThan = - cmp(this.semver, '>', comp.semver, options) && - (this.operator === '<=' || this.operator === '<') && - (comp.operator === '>=' || comp.operator === '>') + options = parseOptions(options) - return ( - sameDirectionIncreasing || - sameDirectionDecreasing || - (sameSemVer && differentDirectionsInclusive) || - oppositeDirectionsLessThan || - oppositeDirectionsGreaterThan - ) + // Special cases where nothing can possibly be lower + if (options.includePrerelease && + (this.value === '<0.0.0-0' || comp.value === '<0.0.0-0')) { + return false + } + if (!options.includePrerelease && + (this.value.startsWith('<0.0.0') || comp.value.startsWith('<0.0.0'))) { + return false + } + + // Same direction increasing (> or >=) + if (this.operator.startsWith('>') && comp.operator.startsWith('>')) { + return true + } + // Same direction decreasing (< or <=) + if (this.operator.startsWith('<') && comp.operator.startsWith('<')) { + return true + } + // same SemVer and both sides are inclusive (<= or >=) + if ( + (this.semver.version === comp.semver.version) && + this.operator.includes('=') && comp.operator.includes('=')) { + return true + } + // opposite directions less than + if (cmp(this.semver, '<', comp.semver, options) && + this.operator.startsWith('>') && comp.operator.startsWith('<')) { + return true + } + // opposite directions greater than + if (cmp(this.semver, '>', comp.semver, options) && + this.operator.startsWith('<') && comp.operator.startsWith('>')) { + return true + } + return false } } module.exports = Comparator const parseOptions = __nccwpck_require__(785) -const { re, t } = __nccwpck_require__(9523) +const { safeRe: re, t } = __nccwpck_require__(9523) const cmp = __nccwpck_require__(5098) const debug = __nccwpck_require__(427) const SemVer = __nccwpck_require__(8088) @@ -54585,19 +54685,26 @@ class Range { this.loose = !!options.loose this.includePrerelease = !!options.includePrerelease - // First, split based on boolean or || + // First reduce all whitespace as much as possible so we do not have to rely + // on potentially slow regexes like \s*. This is then stored and used for + // future error messages as well. this.raw = range - this.set = range + .trim() + .split(/\s+/) + .join(' ') + + // First, split on || + this.set = this.raw .split('||') // map the range to a 2d array of comparators - .map(r => this.parseRange(r.trim())) + .map(r => this.parseRange(r)) // throw out any comparator lists that are empty // this generally means that it was not a valid range, which is allowed // in loose mode, but will still throw if the WHOLE range is invalid. .filter(c => c.length) if (!this.set.length) { - throw new TypeError(`Invalid SemVer Range: ${range}`) + throw new TypeError(`Invalid SemVer Range: ${this.raw}`) } // if we have any that are not the null set, throw out null sets. @@ -54623,9 +54730,7 @@ class Range { format () { this.range = this.set - .map((comps) => { - return comps.join(' ').trim() - }) + .map((comps) => comps.join(' ').trim()) .join('||') .trim() return this.range @@ -54636,12 +54741,12 @@ class Range { } parseRange (range) { - range = range.trim() - // memoize range parsing for performance. // this is a very hot path, and fully deterministic. - const memoOpts = Object.keys(this.options).join(',') - const memoKey = `parseRange:${memoOpts}:${range}` + const memoOpts = + (this.options.includePrerelease && FLAG_INCLUDE_PRERELEASE) | + (this.options.loose && FLAG_LOOSE) + const memoKey = memoOpts + ':' + range const cached = cache.get(memoKey) if (cached) { return cached @@ -54662,9 +54767,6 @@ class Range { // `^ 1.2.3` => `^1.2.3` range = range.replace(re[t.CARETTRIM], caretTrimReplace) - // normalize spaces - range = range.split(/\s+/).join(' ') - // At this point, the range is completely trimmed and // ready to be split into comparators. @@ -54749,6 +54851,7 @@ class Range { return false } } + module.exports = Range const LRU = __nccwpck_require__(7129) @@ -54759,12 +54862,13 @@ const Comparator = __nccwpck_require__(1532) const debug = __nccwpck_require__(427) const SemVer = __nccwpck_require__(8088) const { - re, + safeRe: re, t, comparatorTrimReplace, tildeTrimReplace, caretTrimReplace, } = __nccwpck_require__(9523) +const { FLAG_INCLUDE_PRERELEASE, FLAG_LOOSE } = __nccwpck_require__(2293) const isNullSet = c => c.value === '<0.0.0-0' const isAny = c => c.value === '' @@ -54812,10 +54916,13 @@ const isX = id => !id || id.toLowerCase() === 'x' || id === '*' // ~1.2.3, ~>1.2.3 --> >=1.2.3 <1.3.0-0 // ~1.2.0, ~>1.2.0 --> >=1.2.0 <1.3.0-0 // ~0.0.1 --> >=0.0.1 <0.1.0-0 -const replaceTildes = (comp, options) => - comp.trim().split(/\s+/).map((c) => { - return replaceTilde(c, options) - }).join(' ') +const replaceTildes = (comp, options) => { + return comp + .trim() + .split(/\s+/) + .map((c) => replaceTilde(c, options)) + .join(' ') +} const replaceTilde = (comp, options) => { const r = options.loose ? re[t.TILDELOOSE] : re[t.TILDE] @@ -54853,10 +54960,13 @@ const replaceTilde = (comp, options) => { // ^1.2.0 --> >=1.2.0 <2.0.0-0 // ^0.0.1 --> >=0.0.1 <0.0.2-0 // ^0.1.0 --> >=0.1.0 <0.2.0-0 -const replaceCarets = (comp, options) => - comp.trim().split(/\s+/).map((c) => { - return replaceCaret(c, options) - }).join(' ') +const replaceCarets = (comp, options) => { + return comp + .trim() + .split(/\s+/) + .map((c) => replaceCaret(c, options)) + .join(' ') +} const replaceCaret = (comp, options) => { debug('caret', comp, options) @@ -54913,9 +55023,10 @@ const replaceCaret = (comp, options) => { const replaceXRanges = (comp, options) => { debug('replaceXRanges', comp, options) - return comp.split(/\s+/).map((c) => { - return replaceXRange(c, options) - }).join(' ') + return comp + .split(/\s+/) + .map((c) => replaceXRange(c, options)) + .join(' ') } const replaceXRange = (comp, options) => { @@ -54998,12 +55109,15 @@ const replaceXRange = (comp, options) => { const replaceStars = (comp, options) => { debug('replaceStars', comp, options) // Looseness is ignored here. star is always as loose as it gets! - return comp.trim().replace(re[t.STAR], '') + return comp + .trim() + .replace(re[t.STAR], '') } const replaceGTE0 = (comp, options) => { debug('replaceGTE0', comp, options) - return comp.trim() + return comp + .trim() .replace(re[options.includePrerelease ? t.GTE0PRE : t.GTE0], '') } @@ -55041,7 +55155,7 @@ const hyphenReplace = incPr => ($0, to = `<=${to}` } - return (`${from} ${to}`).trim() + return `${from} ${to}`.trim() } const testSet = (set, version, options) => { @@ -55088,7 +55202,7 @@ const testSet = (set, version, options) => { const debug = __nccwpck_require__(427) const { MAX_LENGTH, MAX_SAFE_INTEGER } = __nccwpck_require__(2293) -const { re, t } = __nccwpck_require__(9523) +const { safeRe: re, t } = __nccwpck_require__(9523) const parseOptions = __nccwpck_require__(785) const { compareIdentifiers } = __nccwpck_require__(2463) @@ -55104,7 +55218,7 @@ class SemVer { version = version.version } } else if (typeof version !== 'string') { - throw new TypeError(`Invalid Version: ${version}`) + throw new TypeError(`Invalid version. Must be a string. Got type "${typeof version}".`) } if (version.length > MAX_LENGTH) { @@ -55263,36 +55377,36 @@ class SemVer { // preminor will bump the version up to the next minor release, and immediately // down to pre-release. premajor and prepatch work the same way. - inc (release, identifier) { + inc (release, identifier, identifierBase) { switch (release) { case 'premajor': this.prerelease.length = 0 this.patch = 0 this.minor = 0 this.major++ - this.inc('pre', identifier) + this.inc('pre', identifier, identifierBase) break case 'preminor': this.prerelease.length = 0 this.patch = 0 this.minor++ - this.inc('pre', identifier) + this.inc('pre', identifier, identifierBase) break case 'prepatch': // If this is already a prerelease, it will bump to the next version // drop any prereleases that might already exist, since they are not // relevant at this point. this.prerelease.length = 0 - this.inc('patch', identifier) - this.inc('pre', identifier) + this.inc('patch', identifier, identifierBase) + this.inc('pre', identifier, identifierBase) break // If the input is a non-prerelease version, this acts the same as // prepatch. case 'prerelease': if (this.prerelease.length === 0) { - this.inc('patch', identifier) + this.inc('patch', identifier, identifierBase) } - this.inc('pre', identifier) + this.inc('pre', identifier, identifierBase) break case 'major': @@ -55334,9 +55448,15 @@ class SemVer { break // This probably shouldn't be used publicly. // 1.0.0 'pre' would become 1.0.0-0 which is the wrong direction. - case 'pre': + case 'pre': { + const base = Number(identifierBase) ? 1 : 0 + + if (!identifier && identifierBase === false) { + throw new Error('invalid increment argument: identifier is empty') + } + if (this.prerelease.length === 0) { - this.prerelease = [0] + this.prerelease = [base] } else { let i = this.prerelease.length while (--i >= 0) { @@ -55347,27 +55467,36 @@ class SemVer { } if (i === -1) { // didn't increment anything - this.prerelease.push(0) + if (identifier === this.prerelease.join('.') && identifierBase === false) { + throw new Error('invalid increment argument: identifier already exists') + } + this.prerelease.push(base) } } if (identifier) { // 1.2.0-beta.1 bumps to 1.2.0-beta.2, // 1.2.0-beta.fooblz or 1.2.0-beta bumps to 1.2.0-beta.0 + let prerelease = [identifier, base] + if (identifierBase === false) { + prerelease = [identifier] + } if (compareIdentifiers(this.prerelease[0], identifier) === 0) { if (isNaN(this.prerelease[1])) { - this.prerelease = [identifier, 0] + this.prerelease = prerelease } } else { - this.prerelease = [identifier, 0] + this.prerelease = prerelease } } break - + } default: throw new Error(`invalid increment argument: ${release}`) } - this.format() - this.raw = this.version + this.raw = this.format() + if (this.build.length) { + this.raw += `+${this.build.join('.')}` + } return this } } @@ -55454,7 +55583,7 @@ module.exports = cmp const SemVer = __nccwpck_require__(8088) const parse = __nccwpck_require__(5925) -const { re, t } = __nccwpck_require__(9523) +const { safeRe: re, t } = __nccwpck_require__(9523) const coerce = (version, options) => { if (version instanceof SemVer) { @@ -55548,27 +55677,69 @@ module.exports = compare /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { const parse = __nccwpck_require__(5925) -const eq = __nccwpck_require__(1898) const diff = (version1, version2) => { - if (eq(version1, version2)) { + const v1 = parse(version1, null, true) + const v2 = parse(version2, null, true) + const comparison = v1.compare(v2) + + if (comparison === 0) { return null - } else { - const v1 = parse(version1) - const v2 = parse(version2) - const hasPre = v1.prerelease.length || v2.prerelease.length - const prefix = hasPre ? 'pre' : '' - const defaultResult = hasPre ? 'prerelease' : '' - for (const key in v1) { - if (key === 'major' || key === 'minor' || key === 'patch') { - if (v1[key] !== v2[key]) { - return prefix + key - } - } + } + + const v1Higher = comparison > 0 + const highVersion = v1Higher ? v1 : v2 + const lowVersion = v1Higher ? v2 : v1 + const highHasPre = !!highVersion.prerelease.length + const lowHasPre = !!lowVersion.prerelease.length + + if (lowHasPre && !highHasPre) { + // Going from prerelease -> no prerelease requires some special casing + + // If the low version has only a major, then it will always be a major + // Some examples: + // 1.0.0-1 -> 1.0.0 + // 1.0.0-1 -> 1.1.1 + // 1.0.0-1 -> 2.0.0 + if (!lowVersion.patch && !lowVersion.minor) { + return 'major' } - return defaultResult // may be undefined + + // Otherwise it can be determined by checking the high version + + if (highVersion.patch) { + // anything higher than a patch bump would result in the wrong version + return 'patch' + } + + if (highVersion.minor) { + // anything higher than a minor bump would result in the wrong version + return 'minor' + } + + // bumping major/minor/patch all have same result + return 'major' } + + // add the `pre` prefix if we are going to a prerelease version + const prefix = highHasPre ? 'pre' : '' + + if (v1.major !== v2.major) { + return prefix + 'major' + } + + if (v1.minor !== v2.minor) { + return prefix + 'minor' + } + + if (v1.patch !== v2.patch) { + return prefix + 'patch' + } + + // high and low are preleases + return 'prerelease' } + module.exports = diff @@ -55609,8 +55780,9 @@ module.exports = gte const SemVer = __nccwpck_require__(8088) -const inc = (version, release, options, identifier) => { +const inc = (version, release, options, identifier, identifierBase) => { if (typeof (options) === 'string') { + identifierBase = identifier identifier = options options = undefined } @@ -55619,7 +55791,7 @@ const inc = (version, release, options, identifier) => { return new SemVer( version instanceof SemVer ? version.version : version, options - ).inc(release, identifier).version + ).inc(release, identifier, identifierBase).version } catch (er) { return null } @@ -55682,35 +55854,18 @@ module.exports = neq /***/ 5925: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -const { MAX_LENGTH } = __nccwpck_require__(2293) -const { re, t } = __nccwpck_require__(9523) const SemVer = __nccwpck_require__(8088) - -const parseOptions = __nccwpck_require__(785) -const parse = (version, options) => { - options = parseOptions(options) - +const parse = (version, options, throwErrors = false) => { if (version instanceof SemVer) { return version } - - if (typeof version !== 'string') { - return null - } - - if (version.length > MAX_LENGTH) { - return null - } - - const r = options.loose ? re[t.LOOSE] : re[t.FULL] - if (!r.test(version)) { - return null - } - try { return new SemVer(version, options) } catch (er) { - return null + if (!throwErrors) { + return null + } + throw er } } @@ -55890,6 +56045,7 @@ module.exports = { src: internalRe.src, tokens: internalRe.t, SEMVER_SPEC_VERSION: constants.SEMVER_SPEC_VERSION, + RELEASE_TYPES: constants.RELEASE_TYPES, compareIdentifiers: identifiers.compareIdentifiers, rcompareIdentifiers: identifiers.rcompareIdentifiers, } @@ -55911,11 +56067,24 @@ const MAX_SAFE_INTEGER = Number.MAX_SAFE_INTEGER || // Max safe segment length for coercion. const MAX_SAFE_COMPONENT_LENGTH = 16 +const RELEASE_TYPES = [ + 'major', + 'premajor', + 'minor', + 'preminor', + 'patch', + 'prepatch', + 'prerelease', +] + module.exports = { - SEMVER_SPEC_VERSION, MAX_LENGTH, - MAX_SAFE_INTEGER, MAX_SAFE_COMPONENT_LENGTH, + MAX_SAFE_INTEGER, + RELEASE_TYPES, + SEMVER_SPEC_VERSION, + FLAG_INCLUDE_PRERELEASE: 0b001, + FLAG_LOOSE: 0b010, } @@ -55970,16 +56139,20 @@ module.exports = { /***/ 785: /***/ ((module) => { -// parse out just the options we care about so we always get a consistent -// obj with keys in a consistent order. -const opts = ['includePrerelease', 'loose', 'rtl'] -const parseOptions = options => - !options ? {} - : typeof options !== 'object' ? { loose: true } - : opts.filter(k => options[k]).reduce((o, k) => { - o[k] = true - return o - }, {}) +// parse out just the options we care about +const looseOption = Object.freeze({ loose: true }) +const emptyOpts = Object.freeze({ }) +const parseOptions = options => { + if (!options) { + return emptyOpts + } + + if (typeof options !== 'object') { + return looseOption + } + + return options +} module.exports = parseOptions @@ -55994,16 +56167,27 @@ exports = module.exports = {} // The actual regexps go on exports.re const re = exports.re = [] +const safeRe = exports.safeRe = [] const src = exports.src = [] const t = exports.t = {} let R = 0 const createToken = (name, value, isGlobal) => { + // Replace all greedy whitespace to prevent regex dos issues. These regex are + // used internally via the safeRe object since all inputs in this library get + // normalized first to trim and collapse all extra whitespace. The original + // regexes are exported for userland consumption and lower level usage. A + // future breaking change could export the safer regex only with a note that + // all input should have extra whitespace removed. + const safe = value + .split('\\s*').join('\\s{0,1}') + .split('\\s+').join('\\s') const index = R++ debug(name, index, value) t[name] = index src[index] = value re[index] = new RegExp(value, isGlobal ? 'g' : undefined) + safeRe[index] = new RegExp(safe, isGlobal ? 'g' : undefined) } // The following Regular Expressions can be used for tokenizing, @@ -56192,7 +56376,7 @@ const Range = __nccwpck_require__(9828) const intersects = (r1, r2, options) => { r1 = new Range(r1, options) r2 = new Range(r2, options) - return r1.intersects(r2) + return r1.intersects(r2, options) } module.exports = intersects @@ -56555,6 +56739,9 @@ const subset = (sub, dom, options = {}) => { return true } +const minimumVersionWithPreRelease = [new Comparator('>=0.0.0-0')] +const minimumVersion = [new Comparator('>=0.0.0')] + const simpleSubset = (sub, dom, options) => { if (sub === dom) { return true @@ -56564,9 +56751,9 @@ const simpleSubset = (sub, dom, options) => { if (dom.length === 1 && dom[0].semver === ANY) { return true } else if (options.includePrerelease) { - sub = [new Comparator('>=0.0.0-0')] + sub = minimumVersionWithPreRelease } else { - sub = [new Comparator('>=0.0.0')] + sub = minimumVersion } } @@ -56574,7 +56761,7 @@ const simpleSubset = (sub, dom, options) => { if (options.includePrerelease) { return true } else { - dom = [new Comparator('>=0.0.0')] + dom = minimumVersion } } diff --git a/src/setup.ts b/src/setup.ts index 2cd1927..20566e6 100644 --- a/src/setup.ts +++ b/src/setup.ts @@ -24,6 +24,8 @@ export async function setup(version: string, nightly: boolean, cacheDependencyPa core.exportVariable('HAXE_STD_PATH', path.join(haxePath, 'std')); if (env.platform === 'osx') { + core.exportVariable('DYLD_FALLBACK_LIBRARY_PATH', `${nekoPath}:$DYLD_FALLBACK_LIBRARY_PATH`); + // Ref: https://github.com/asdf-community/asdf-haxe/pull/7 await exec('ln', [ '-sfv',